text_prompt
stringlengths
157
13.1k
code_prompt
stringlengths
7
19.8k
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _find_channel(connection, name, ctype, dtype, sample_rate, unique=False): """Internal method to find a single channel Parameters connection : `nds2.connection`, optional open NDS2 connection to use for query name : `str` the name of the channel to find ctype : `int` the NDS2 channel type to match dtype : `int` the NDS2 data type to match sample_rate : `tuple` a pre-formatted rate tuple (see `find_channels`) unique : `bool`, optional, default: `False` require one (and only one) match per channel Returns ------- channels : `list` of `nds2.channel` list of NDS2 channel objects, if `unique=True` is given the list is guaranteed to have only one element. See also -------- nds2.connection.find_channels for documentation on the underlying query method """
# parse channel type from name, # e.g. 'L1:GDS-CALIB_STRAIN,reduced' -> 'L1:GDS-CALIB_STRAIN', 'reduced' name, ctype = _strip_ctype(name, ctype, connection.get_protocol()) # query NDS2 found = connection.find_channels(name, ctype, dtype, *sample_rate) # if don't care about defaults, just return now if not unique: return found # if two results, remove 'online' copy (if present) # (if no online channels present, this does nothing) if len(found) == 2: found = [c for c in found if c.channel_type != Nds2ChannelType.ONLINE.value] # if not unique result, panic if len(found) != 1: raise ValueError("unique NDS2 channel match not found for %r" % name) return found
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _strip_ctype(name, ctype, protocol=2): """Strip the ctype from a channel name for the given nds server version This is needed because NDS1 servers store trend channels _including_ the suffix, but not raw channels, and NDS2 doesn't do this. """
# parse channel type from name (e.g. 'L1:GDS-CALIB_STRAIN,reduced') try: name, ctypestr = name.rsplit(',', 1) except ValueError: pass else: ctype = Nds2ChannelType.find(ctypestr).value # NDS1 stores channels with trend suffix, so we put it back: if protocol == 1 and ctype in ( Nds2ChannelType.STREND.value, Nds2ChannelType.MTREND.value ): name += ',{0}'.format(ctypestr) return name, ctype
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_availability(channels, start, end, connection=None, host=None, port=None): # pylint: disable=unused-argument """Query an NDS2 server for data availability Parameters channels : `list` of `str` list of channel names to query; this list is mapped to NDS channel names using :func:`find_channels`. start : `int` GPS start time of query end : `int` GPS end time of query connection : `nds2.connection`, optional open NDS2 connection to use for query host : `str`, optional name of NDS2 server to query, required if ``connection`` is not given port : `int`, optional port number on host to use for NDS2 connection Returns ------- segdict : `~gwpy.segments.SegmentListDict` dict of ``(name, SegmentList)`` pairs Raises ------ ValueError if the given channel name cannot be mapped uniquely to a name in the NDS server database. See also -------- nds2.connection.get_availability for documentation on the underlying query method """
from ..segments import (Segment, SegmentList, SegmentListDict) connection.set_epoch(start, end) # map user-given real names to NDS names names = list(map( _get_nds2_name, find_channels(channels, epoch=(start, end), connection=connection, unique=True), )) # query for availability result = connection.get_availability(names) # map to segment types out = SegmentListDict() for name, result in zip(channels, result): out[name] = SegmentList([Segment(s.gps_start, s.gps_stop) for s in result.simple_list()]) return out
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def minute_trend_times(start, end): """Expand a [start, end) interval for use in querying for minute trends NDS2 requires start and end times for minute trends to be a multiple of 60 (to exactly match the time of a minute-trend sample), so this function expands the given ``[start, end)`` interval to the nearest multiples. Parameters start : `int` GPS start time of query end : `int` GPS end time of query Returns ------- mstart : `int` ``start`` rounded down to nearest multiple of 60 mend : `int` ``end`` rounded up to nearest multiple of 60 """
if start % 60: start = int(start) // 60 * 60 if end % 60: end = int(end) // 60 * 60 + 60 return int(start), int(end)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def find(cls, name): """Returns the NDS2 channel type corresponding to the given name """
try: return cls._member_map_[name] except KeyError: for ctype in cls._member_map_.values(): if ctype.name == name: return ctype raise ValueError('%s is not a valid %s' % (name, cls.__name__))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def find(cls, dtype): """Returns the NDS2 type corresponding to the given python type """
try: return cls._member_map_[dtype] except KeyError: try: dtype = numpy.dtype(dtype).type except TypeError: for ndstype in cls._member_map_.values(): if ndstype.value is dtype: return ndstype else: for ndstype in cls._member_map_.values(): if ndstype.value and ndstype.numpy_dtype is dtype: return ndstype raise ValueError('%s is not a valid %s' % (dtype, cls.__name__))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def reconnect(connection): """Open a new datafind connection based on an existing connection This is required because of https://git.ligo.org/lscsoft/glue/issues/1 Parameters connection : :class:`~gwdatafind.http.HTTPConnection` or `FflConnection` a connection object (doesn't need to be open) Returns ------- newconn : :class:`~gwdatafind.http.HTTPConnection` or `FflConnection` the new open connection to the same `host:port` server """
if isinstance(connection, FflConnection): return type(connection)(connection.ffldir) kw = {'context': connection._context} if connection.port != 80 else {} return connection.__class__(connection.host, port=connection.port, **kw)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _type_priority(ifo, ftype, trend=None): """Prioritise the given GWF type based on its name or trend status. This is essentially an ad-hoc ordering function based on internal knowledge of how LIGO does GWF type naming. """
# if looking for a trend channel, prioritise the matching type for trendname, trend_regex in [ ('m-trend', MINUTE_TREND_TYPE), ('s-trend', SECOND_TREND_TYPE), ]: if trend == trendname and trend_regex.match(ftype): return 0, len(ftype) # otherwise rank this type according to priority for reg, prio in { HIGH_PRIORITY_TYPE: 1, re.compile(r'[A-Z]\d_C'): 6, LOW_PRIORITY_TYPE: 10, MINUTE_TREND_TYPE: 10, SECOND_TREND_TYPE: 10, }.items(): if reg.search(ftype): return prio, len(ftype) return 5, len(ftype)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def on_tape(*files): """Determine whether any of the given files are on tape Parameters *files : `str` one or more paths to GWF files Returns ------- True/False : `bool` `True` if any of the files are determined to be on tape, otherwise `False` """
for path in files: try: if os.stat(path).st_blocks == 0: return True except AttributeError: # windows doesn't have st_blocks return False return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def with_connection(func): """Decorate a function to open a new datafind connection if required This method will inspect the ``connection`` keyword, and if `None` (or missing), will use the ``host`` and ``port`` keywords to open a new connection and pass it as ``connection=<new>`` to ``func``. """
@wraps(func) def wrapped(*args, **kwargs): if kwargs.get('connection') is None: kwargs['connection'] = _choose_connection(host=kwargs.get('host'), port=kwargs.get('port')) try: return func(*args, **kwargs) except HTTPException: kwargs['connection'] = reconnect(kwargs['connection']) return func(*args, **kwargs) return wrapped
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def find_best_frametype(channel, start, end, frametype_match=None, allow_tape=True, connection=None, host=None, port=None): """Intelligently select the best frametype from which to read this channel Parameters channel : `str`, `~gwpy.detector.Channel` the channel to be found start : `~gwpy.time.LIGOTimeGPS`, `float`, `str` GPS start time of period of interest, any input parseable by `~gwpy.time.to_gps` is fine end : `~gwpy.time.LIGOTimeGPS`, `float`, `str` GPS end time of period of interest, any input parseable by `~gwpy.time.to_gps` is fine host : `str`, optional name of datafind host to use port : `int`, optional port on datafind host to use frametype_match : `str`, optiona regular expression to use for frametype `str` matching allow_tape : `bool`, optional do not test types whose frame files are stored on tape (not on spinning disk) Returns ------- frametype : `str` the best matching frametype for the ``channel`` in the ``[start, end)`` interval Raises ------ ValueError if no valid frametypes are found Examples -------- 'L1_HOFT_C00' """
try: return find_frametype(channel, gpstime=(start, end), frametype_match=frametype_match, allow_tape=allow_tape, on_gaps='error', connection=connection, host=host, port=port) except RuntimeError: # gaps (or something else went wrong) ftout = find_frametype(channel, gpstime=(start, end), frametype_match=frametype_match, return_all=True, allow_tape=allow_tape, on_gaps='ignore', connection=connection, host=host, port=port) try: if isinstance(ftout, dict): return {key: ftout[key][0] for key in ftout} return ftout[0] except IndexError: raise ValueError("Cannot find any valid frametypes for channel(s)")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def find_types(observatory, match=None, trend=None, connection=None, **connection_kw): """Find the available data types for a given observatory. See also -------- gwdatafind.http.HTTPConnection.find_types FflConnection.find_types for details on the underlying method(s) """
return sorted(connection.find_types(observatory, match=match), key=lambda x: _type_priority(observatory, x, trend=trend))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def find_urls(observatory, frametype, start, end, on_gaps='error', connection=None, **connection_kw): """Find the URLs of files of a given data type in a GPS interval. See also -------- gwdatafind.http.HTTPConnection.find_urls FflConnection.find_urls for details on the underlying method(s) """
return connection.find_urls(observatory, frametype, start, end, on_gaps=on_gaps)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def ffl_path(self, site, frametype): """Returns the path of the FFL file for the given site and frametype Examples -------- /virgoData/ffl/V1Online.ffl """
try: return self.paths[(site, frametype)] except KeyError: self._find_paths() return self.paths[(site, frametype)]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def find_types(self, site=None, match=r'^(?!lastfile|spectro|\.).*'): """Return the list of known data types. This is just the basename of each FFL file found in the FFL directory (minus the ``.ffl`` extension) """
self._find_paths() types = [tag for (site_, tag) in self.paths if site in (None, site_)] if match is not None: match = re.compile(match) return list(filter(match.search, types)) return types
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def find_urls(self, site, frametype, gpsstart, gpsend, match=None, on_gaps='warn'): """Find all files of the given type in the [start, end) GPS interval. """
span = Segment(gpsstart, gpsend) cache = [e for e in self._read_ffl_cache(site, frametype) if e.observatory == site and e.description == frametype and e.segment.intersects(span)] urls = [e.path for e in cache] missing = SegmentList([span]) - cache_segments(cache) if match: match = re.compile(match) urls = list(filter(match.search, urls)) # no missing data or don't care, return if on_gaps == 'ignore' or not missing: return urls # handle missing data msg = 'Missing segments: \n{0}'.format('\n'.join(map(str, missing))) if on_gaps == 'warn': warnings.warn(msg) return urls raise RuntimeError(msg)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def read_series(source, name, match=None): """Read a `Series` from LIGO_LW-XML Parameters source : `file`, `str`, :class:`~ligo.lw.ligolw.Document` file path or open LIGO_LW-format XML file name : `str` name of the relevant `LIGO_LW` element to read match : `dict`, optional dict of (key, value) `Param` pairs to match correct LIGO_LW element, this is useful if a single file contains multiple `LIGO_LW` elements with the same name """
from ligo.lw.ligolw import (LIGO_LW, Time, Array, Dim) from ligo.lw.param import get_param # read document xmldoc = read_ligolw(source, contenthandler=series_contenthandler()) # parse match dict if match is None: match = dict() def _is_match(elem): try: if elem.Name != name: return False except AttributeError: # Name is not set return False for key, value in match.items(): try: if get_param(elem, key).pcdata != value: return False except ValueError: # no Param with this Name return False return True # parse out correct element matches = filter(_is_match, xmldoc.getElementsByTagName(LIGO_LW.tagName)) try: elem, = matches except ValueError as exc: if not matches: exc.args = ("no LIGO_LW elements found matching request",) else: exc.args = ('multiple LIGO_LW elements found matching request, ' 'please consider using `match=` to select the ' 'correct element',) raise # get data array, = elem.getElementsByTagName(Array.tagName) # parse dimensions dims = array.getElementsByTagName(Dim.tagName) xdim = dims[0] x0 = xdim.Start dx = xdim.Scale xunit = xdim.Unit try: ndim = dims[1].n except IndexError: pass else: if ndim > 2: raise ValueError("Cannot parse LIGO_LW Array with {} " "dimensions".format(ndim)) # parse metadata array_kw = { 'name': array.Name, 'unit': array.Unit, 'xunit': xunit, } try: array_kw['epoch'] = to_gps( elem.getElementsByTagName(Time.tagName)[0].pcdata) except IndexError: pass for key in ('channel',): try: array_kw[key] = get_param(elem, key) except ValueError: pass # build Series try: xindex, value = array.array except ValueError: # not two dimensions stored return Series(array.array[0], x0=x0, dx=dx, **array_kw) return Series(value, xindex=xindex, **array_kw)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def make_plot(self): """Generate the coherence plot from all time series """
args = self.args fftlength = float(args.secpfft) overlap = args.overlap self.log(2, "Calculating spectrum secpfft: %s, overlap: %s" % (fftlength, overlap)) if overlap is not None: overlap *= fftlength self.log(3, 'Reference channel: ' + self.ref_chan) # group data by segment groups = OrderedDict() for series in self.timeseries: seg = series.span try: groups[seg][series.channel.name] = series except KeyError: groups[seg] = OrderedDict() groups[seg][series.channel.name] = series # -- plot plot = Plot(figsize=self.figsize, dpi=self.dpi) ax = plot.gca() self.spectra = [] # calculate coherence for seg in groups: refts = groups[seg].pop(self.ref_chan) for name in groups[seg]: series = groups[seg][name] coh = series.coherence(refts, fftlength=fftlength, overlap=overlap, window=args.window) label = name if len(self.start_list) > 1: label += ', {0}'.format(series.epoch.gps) if self.usetex: label = label_to_latex(label) ax.plot(coh, label=label) self.spectra.append(coh) if args.xscale == 'log' and not args.xmin: args.xmin = 1/fftlength return plot
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_legend(self): """Create a legend for this product """
leg = super(Coherence, self).set_legend() if leg is not None: leg.set_title('Coherence with:') return leg
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse_unit(name, parse_strict='warn', format='gwpy'): """Attempt to intelligently parse a `str` as a `~astropy.units.Unit` Parameters name : `str` unit name to parse parse_strict : `str` one of 'silent', 'warn', or 'raise' depending on how pedantic you want the parser to be format : `~astropy.units.format.Base` the formatter class to use when parsing the unit string Returns ------- unit : `~astropy.units.UnitBase` the unit parsed by `~astropy.units.Unit` Raises ------ ValueError if the unit cannot be parsed and `parse_strict='raise'` """
if name is None or isinstance(name, units.UnitBase): return name try: # have we already identified this unit as unrecognised? return UNRECOGNIZED_UNITS[name] except KeyError: # no, this is new # pylint: disable=unexpected-keyword-arg try: return units.Unit(name, parse_strict='raise') except ValueError as exc: if (parse_strict == 'raise' or 'did not parse as unit' not in str(exc)): raise # try again using out own lenient parser GWpyFormat.warn = parse_strict != 'silent' return units.Unit(name, parse_strict='silent', format=format) finally: GWpyFormat.warn = True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _row_from_frevent(frevent, columns, selection): """Generate a table row from an FrEvent Filtering (``selection``) is done here, rather than in the table reader, to enable filtering on columns that aren't being returned. """
# read params params = dict(frevent.GetParam()) params['time'] = float(LIGOTimeGPS(*frevent.GetGTime())) params['amplitude'] = frevent.GetAmplitude() params['probability'] = frevent.GetProbability() params['timeBefore'] = frevent.GetTimeBefore() params['timeAfter'] = frevent.GetTimeAfter() params['comment'] = frevent.GetComment() # filter if not all(op_(params[c], t) for c, op_, t in selection): return None # return event as list return [params[c] for c in columns]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def table_to_gwf(table, filename, name, **kwargs): """Create a new `~frameCPP.FrameH` and fill it with data Parameters table : `~astropy.table.Table` the data to write filename : `str` the name of the file to write into **kwargs other keyword arguments (see below for references) See Also -------- gwpy.io.gwf.create_frame gwpy.io.gwf.write_frames for documentation of keyword arguments """
from LDAStools.frameCPP import (FrEvent, GPSTime) # create frame write_kw = {key: kwargs.pop(key) for key in ('compression', 'compression_level') if key in kwargs} frame = io_gwf.create_frame(name=name, **kwargs) # append row by row names = table.dtype.names for row in table: rowd = dict((n, row[n]) for n in names) gps = LIGOTimeGPS(rowd.pop('time', 0)) frame.AppendFrEvent(FrEvent( str(name), str(rowd.pop('comment', '')), str(rowd.pop('inputs', '')), GPSTime(gps.gpsSeconds, gps.gpsNanoSeconds), float(rowd.pop('timeBefore', 0)), float(rowd.pop('timeAfter', 0)), int(rowd.pop('eventStatus', 0)), float(rowd.pop('amplitude', 0)), float(rowd.pop('probability', -1)), str(rowd.pop('statistics', '')), list(rowd.items()), # remaining params as tuple )) # write frame to file io_gwf.write_frames(filename, [frame], **write_kw)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def read(cls, source, *args, **kwargs): """Read data into a `FrequencySeries` Arguments and keywords depend on the output format, see the online documentation for full details for each format, the parameters below are common to most formats. Parameters source : `str`, `list` Source of data, any of the following: - `str` path of single data file, - `str` path of LAL-format cache file, - `list` of paths. *args Other arguments are (in general) specific to the given ``format``. format : `str`, optional Source format identifier. If not given, the format will be detected if possible. See below for list of acceptable formats. **kwargs Other keywords are (in general) specific to the given ``format``. Notes -----"""
return io_registry.read(cls, source, *args, **kwargs)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def ifft(self): """Compute the one-dimensional discrete inverse Fourier transform of this `FrequencySeries`. Returns ------- out : :class:`~gwpy.timeseries.TimeSeries` the normalised, real-valued `TimeSeries`. See Also -------- :mod:`scipy.fftpack` for the definition of the DFT and conventions used. Notes ----- This method applies the necessary normalisation such that the condition holds: """
from ..timeseries import TimeSeries nout = (self.size - 1) * 2 # Undo normalization from TimeSeries.fft # The DC component does not have the factor of two applied # so we account for it here dift = npfft.irfft(self.value * nout) / 2 new = TimeSeries(dift, epoch=self.epoch, channel=self.channel, unit=self.unit, dx=1/self.dx/nout) return new
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def interpolate(self, df): """Interpolate this `FrequencySeries` to a new resolution. Parameters df : `float` desired frequency resolution of the interpolated `FrequencySeries`, in Hz Returns ------- out : `FrequencySeries` the interpolated version of the input `FrequencySeries` See Also -------- numpy.interp for the underlying 1-D linear interpolation scheme """
f0 = self.f0.decompose().value N = (self.size - 1) * (self.df.decompose().value / df) + 1 fsamples = numpy.arange(0, numpy.rint(N), dtype=self.dtype) * df + f0 out = type(self)(numpy.interp(fsamples, self.frequencies.value, self.value)) out.__array_finalize__(self) out.f0 = f0 out.df = df return out
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def from_lal(cls, lalfs, copy=True): """Generate a new `FrequencySeries` from a LAL `FrequencySeries` of any type """
from ..utils.lal import from_lal_unit try: unit = from_lal_unit(lalfs.sampleUnits) except TypeError: unit = None channel = Channel(lalfs.name, unit=unit, dtype=lalfs.data.data.dtype) return cls(lalfs.data.data, channel=channel, f0=lalfs.f0, df=lalfs.deltaF, epoch=float(lalfs.epoch), dtype=lalfs.data.data.dtype, copy=copy)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def from_pycbc(cls, fs, copy=True): """Convert a `pycbc.types.frequencyseries.FrequencySeries` into a `FrequencySeries` Parameters fs : `pycbc.types.frequencyseries.FrequencySeries` the input PyCBC `~pycbc.types.frequencyseries.FrequencySeries` array copy : `bool`, optional, default: `True` if `True`, copy these data to a new array Returns ------- spectrum : `FrequencySeries` a GWpy version of the input frequency series """
return cls(fs.data, f0=0, df=fs.delta_f, epoch=fs.epoch, copy=copy)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def to_pycbc(self, copy=True): """Convert this `FrequencySeries` into a `~pycbc.types.frequencyseries.FrequencySeries` Parameters copy : `bool`, optional, default: `True` if `True`, copy these data to a new array Returns ------- frequencyseries : `pycbc.types.frequencyseries.FrequencySeries` a PyCBC representation of this `FrequencySeries` """
from pycbc import types if self.epoch is None: epoch = None else: epoch = self.epoch.gps return types.FrequencySeries(self.value, delta_f=self.df.to('Hz').value, epoch=epoch, copy=copy)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _fetch_losc_data_file(url, *args, **kwargs): """Internal function for fetching a single LOSC file and returning a Series """
cls = kwargs.pop('cls', TimeSeries) cache = kwargs.pop('cache', None) verbose = kwargs.pop('verbose', False) # match file format if url.endswith('.gz'): ext = os.path.splitext(url[:-3])[-1] else: ext = os.path.splitext(url)[-1] if ext == '.hdf5': kwargs.setdefault('format', 'hdf5.losc') elif ext == '.txt': kwargs.setdefault('format', 'ascii.losc') elif ext == '.gwf': kwargs.setdefault('format', 'gwf') with _download_file(url, cache, verbose=verbose) as rem: # get channel for GWF if not given if ext == ".gwf" and (not args or args[0] is None): args = (_gwf_channel(rem, cls, kwargs.get("verbose")),) if verbose: print('Reading data...', end=' ') try: series = cls.read(rem, *args, **kwargs) except Exception as exc: if verbose: print('') exc.args = ("Failed to read LOSC data from %r: %s" % (url, str(exc)),) raise else: # parse bits from unit in GWF if ext == '.gwf' and isinstance(series, StateVector): try: bits = {} for bit in str(series.unit).split(): a, b = bit.split(':', 1) bits[int(a)] = b series.bits = bits series.override_unit('') except (TypeError, ValueError): # don't care, bad LOSC pass if verbose: print('[Done]') return series
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _overlapping(files): """Quick method to see if a file list contains overlapping files """
segments = set() for path in files: seg = file_segment(path) for s in segments: if seg.intersects(s): return True segments.add(seg) return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def fetch_losc_data(detector, start, end, cls=TimeSeries, **kwargs): """Fetch LOSC data for a given detector This function is for internal purposes only, all users should instead use the interface provided by `TimeSeries.fetch_open_data` (and similar for `StateVector.fetch_open_data`). """
# format arguments start = to_gps(start) end = to_gps(end) span = Segment(start, end) kwargs.update({ 'start': start, 'end': end, }) # find URLs (requires gwopensci) url_kw = {key: kwargs.pop(key) for key in GWOSC_LOCATE_KWARGS if key in kwargs} if 'sample_rate' in url_kw: # format as Hertz url_kw['sample_rate'] = Quantity(url_kw['sample_rate'], 'Hz').value cache = get_urls(detector, int(start), int(ceil(end)), **url_kw) # if event dataset, pick shortest file that covers the request # -- this is a bit hacky, and presumes that only an event dataset # -- would be produced with overlapping files. # -- This should probably be improved to use dataset information if len(cache) and _overlapping(cache): cache.sort(key=lambda x: abs(file_segment(x))) for url in cache: a, b = file_segment(url) if a <= start and b >= end: cache = [url] break if kwargs.get('verbose', False): # get_urls() guarantees len(cache) >= 1 host = urlparse(cache[0]).netloc print("Fetched {0} URLs from {1} for [{2} .. {3}))".format( len(cache), host, int(start), int(ceil(end)))) is_gwf = cache[0].endswith('.gwf') if is_gwf and len(cache): args = (kwargs.pop('channel', None),) else: args = () # read data out = None kwargs['cls'] = cls for url in cache: keep = file_segment(url) & span new = _fetch_losc_data_file(url, *args, **kwargs).crop( *keep, copy=False) if is_gwf and (not args or args[0] is None): args = (new.name,) if out is None: out = new.copy() else: out.append(new, resize=True) return out
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def read_losc_hdf5(h5f, path='strain/Strain', start=None, end=None, copy=False): """Read a `TimeSeries` from a LOSC-format HDF file. Parameters h5f : `str`, `h5py.HLObject` path of HDF5 file, or open `H5File` path : `str` name of HDF5 dataset to read. Returns ------- data : `~gwpy.timeseries.TimeSeries` a new `TimeSeries` containing the data read from disk """
dataset = io_hdf5.find_dataset(h5f, path) # read data nddata = dataset[()] # read metadata xunit = parse_unit(dataset.attrs['Xunits']) epoch = dataset.attrs['Xstart'] dt = Quantity(dataset.attrs['Xspacing'], xunit) unit = dataset.attrs['Yunits'] # build and return return TimeSeries(nddata, epoch=epoch, sample_rate=(1/dt).to('Hertz'), unit=unit, name=path.rsplit('/', 1)[1], copy=copy).crop(start=start, end=end)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def read_losc_hdf5_state(f, path='quality/simple', start=None, end=None, copy=False): """Read a `StateVector` from a LOSC-format HDF file. Parameters f : `str`, `h5py.HLObject` path of HDF5 file, or open `H5File` path : `str` path of HDF5 dataset to read. start : `Time`, `~gwpy.time.LIGOTimeGPS`, optional start GPS time of desired data end : `Time`, `~gwpy.time.LIGOTimeGPS`, optional end GPS time of desired data copy : `bool`, default: `False` create a fresh-memory copy of the underlying array Returns ------- data : `~gwpy.timeseries.TimeSeries` a new `TimeSeries` containing the data read from disk """
# find data dataset = io_hdf5.find_dataset(f, '%s/DQmask' % path) maskset = io_hdf5.find_dataset(f, '%s/DQDescriptions' % path) # read data nddata = dataset[()] bits = [bytes.decode(bytes(b), 'utf-8') for b in maskset[()]] # read metadata epoch = dataset.attrs['Xstart'] try: dt = dataset.attrs['Xspacing'] except KeyError: dt = Quantity(1, 's') else: xunit = parse_unit(dataset.attrs['Xunits']) dt = Quantity(dt, xunit) return StateVector(nddata, bits=bits, t0=epoch, name='Data quality', dx=dt, copy=copy).crop(start=start, end=end)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _gwf_channel(path, series_class=TimeSeries, verbose=False): """Find the right channel name for a LOSC GWF file """
channels = list(io_gwf.iter_channel_names(file_path(path))) if issubclass(series_class, StateVector): regex = DQMASK_CHANNEL_REGEX else: regex = STRAIN_CHANNEL_REGEX found, = list(filter(regex.match, channels)) if verbose: print("Using channel {0!r}".format(found)) return found
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def from_segwizard(source, gpstype=LIGOTimeGPS, strict=True): """Read segments from a segwizard format file into a `SegmentList` Parameters source : `file`, `str` An open file, or file path, from which to read gpstype : `type`, optional The numeric type to which to cast times (from `str`) when reading. strict : `bool`, optional Check that recorded duration matches ``end-start`` for all segments; only used when reading from a 3+-column file. Returns ------- segments : `~gwpy.segments.SegmentList` The list of segments as parsed from the file. Notes ----- This method is adapted from original code written by Kipp Cannon and distributed under GPLv3. """
# read file path if isinstance(source, string_types): with open(source, 'r') as fobj: return from_segwizard(fobj, gpstype=gpstype, strict=strict) # read file object out = SegmentList() fmt_pat = None for line in source: if line.startswith(('#', ';')): # comment continue # determine line format if fmt_pat is None: fmt_pat = _line_format(line) # parse line tokens, = fmt_pat.findall(line) out.append(_format_segment(tokens[-3:], gpstype=gpstype, strict=strict)) return out
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _line_format(line): """Determine the column format pattern for a line in an ASCII segment file. """
for pat in (FOUR_COL_REGEX, THREE_COL_REGEX, TWO_COL_REGEX): if pat.match(line): return pat raise ValueError("unable to parse segment from line {!r}".format(line))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _format_segment(tokens, strict=True, gpstype=LIGOTimeGPS): """Format a list of tokens parsed from an ASCII file into a segment. """
try: start, end, dur = tokens except ValueError: # two-columns return Segment(*map(gpstype, tokens)) seg = Segment(gpstype(start), gpstype(end)) if strict and not float(abs(seg)) == float(dur): raise ValueError( "segment {0!r} has incorrect duration {1!r}".format(seg, dur), ) return seg
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def to_segwizard(segs, target, header=True, coltype=LIGOTimeGPS): """Write the given `SegmentList` to a file in SegWizard format. Parameters segs : :class:`~gwpy.segments.SegmentList` The list of segments to write. target : `file`, `str` An open file, or file path, to which to write. header : `bool`, optional Print a column header into the file, default: `True`. coltype : `type`, optional The numerical type in which to cast times before printing. Notes ----- This method is adapted from original code written by Kipp Cannon and distributed under GPLv3. """
# write file path if isinstance(target, string_types): with open(target, 'w') as fobj: return to_segwizard(segs, fobj, header=header, coltype=coltype) # write file object if header: print('# seg\tstart\tstop\tduration', file=target) for i, seg in enumerate(segs): a = coltype(seg[0]) b = coltype(seg[1]) c = float(b - a) print( '\t'.join(map(str, (i, a, b, c))), file=target, )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def gopen(name, *args, **kwargs): """Open a file handling optional gzipping If ``name`` endswith ``'.gz'``, or if the GZIP file signature is found at the beginning of the file, the file will be opened with `gzip.open`, otherwise a regular file will be returned from `open`. Parameters name : `str` path (name) of file to open. *args, **kwargs other arguments to pass to either `open` for regular files, or `gzip.open` for gzipped files. Returns ------- file : `io.TextIoBase`, `file`, `gzip.GzipFile` the open file object """
# filename declares gzip if name.endswith('.gz'): return gzip.open(name, *args, **kwargs) # open regular file fobj = open(name, *args, **kwargs) sig = fobj.read(3) fobj.seek(0) if sig == GZIP_SIGNATURE: # file signature declares gzip fobj.close() # GzipFile won't close orig file when it closes return gzip.open(name, *args, **kwargs) return fobj
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def file_list(flist): """Parse a number of possible input types into a list of filepaths. Parameters flist : `file-like` or `list-like` iterable the input data container, normally just a single file path, or a list of paths, but can generally be any of the following - `str` representing a single file path (or comma-separated collection) - open `file` or `~gzip.GzipFile` object - :class:`~lal.utils.CacheEntry` - `str` with ``.cache`` or ``.lcf`` extension - simple `list` or `tuple` of `str` paths Returns ------- files : `list` `list` of `str` file paths Raises ------ ValueError if the input `flist` cannot be interpreted as any of the above inputs """
# open a cache file and return list of paths if (isinstance(flist, string_types) and flist.endswith(('.cache', '.lcf', '.ffl'))): from .cache import read_cache return read_cache(flist) # separate comma-separate list of names if isinstance(flist, string_types): return flist.split(',') # parse list of entries (of some format) if isinstance(flist, (list, tuple)): return list(map(file_path, flist)) # otherwise parse a single entry try: return [file_path(flist)] except ValueError as exc: exc.args = ( "Could not parse input {!r} as one or more " "file-like objects".format(flist), ) raise
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def file_path(fobj): """Determine the path of a file. This doesn't do any sanity checking to check that the file actually exists, or is readable. Parameters the file object or path to parse Returns ------- path : `str` the path of the underlying file Raises ------ ValueError if a file path cannnot be determined Examples -------- 'test.txt' 'test.txt' '/home/user/test.txt' """
if isinstance(fobj, string_types) and fobj.startswith("file:"): return urlparse(fobj).path if isinstance(fobj, string_types): return fobj if (isinstance(fobj, FILE_LIKE) and hasattr(fobj, "name")): return fobj.name try: return fobj.path except AttributeError: raise ValueError("Cannot parse file name for {!r}".format(fobj))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def process_in_out_queues(func, q_in, q_out): """Iterate through a Queue, call, ``func`, and Queue the result Parameters func : `callable` any function that can take an element of the input `Queue` as the only argument q_in : `multiprocessing.queue.Queue` the input `Queue` q_out : `multiprocessing.queue.Queue` the output `Queue` Notes ----- To close the input `Queue`, add ``(None, None)` as the last item """
while True: # pick item out of input wqueue idx, arg = q_in.get() if idx is None: # sentinel break # execute method and put the result in the output queue q_out.put((idx, func(arg)))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def multiprocess_with_queues(nproc, func, inputs, verbose=False, **progress_kw): """Map a function over a list of inputs using multiprocess This essentially duplicates `multiprocess.map` but allows for arbitrary functions (that aren't necessarily importable) Parameters nproc : `int` number of processes to use, if ``1`` is given, the current process is used, and no child processes are forked func : `callable` the function to call in each iteration, should take a single argument that is the next element from ``inputs`` inputs : `iterable` iterable (e.g. `list`) of inputs, each element of which is passed to ``func`` in one of the child processes verbose : `bool`, `str`, optional if `True`, print progress to the console as a bar, pass a `str` to customise the heading for the progress bar, default: `False`, (default heading ``'Processing:'`` if ``verbose=True`) Returns ------- outputs : `list` the `list` of results from calling ``func(x)`` for each element of ``inputs`` """
if nproc != 1 and os.name == 'nt': warnings.warn( "multiprocessing is currently not supported on Windows, see " "https://github.com/gwpy/gwpy/issues/880, will continue with " "serial procesing (nproc=1)") nproc = 1 if progress_kw.pop('raise_exceptions', None) is not None: warnings.warn("the `raise_exceptions` keyword to " "multiprocess_with_queues is deprecated, and will be " "removed in a future release, all exceptions will be " "raised if they occur", DeprecationWarning) # create progress bar for verbose output if bool(verbose): if not isinstance(verbose, bool): progress_kw['desc'] = str(verbose) if isinstance(inputs, (list, tuple)): progress_kw.setdefault('total', len(inputs)) pbar = progress_bar(**progress_kw) else: pbar = None # ------------------------------------------- def _inner(x): """Run function capturing errors """ try: return func(x) except Exception as exc: # pylint: disable=broad-except if nproc == 1: raise return exc finally: if pbar and nproc == 1: pbar.update() # ------------------------------------------- # shortcut single process if nproc == 1: return list(map(_inner, inputs)) # ------------------------------------------- # create input and output queues q_in = Queue() q_out = Queue() # create child processes and start proclist = [Process(target=process_in_out_queues, args=(_inner, q_in, q_out)) for _ in range(nproc)] for proc in proclist: proc.daemon = True proc.start() # populate queue (no need to block in serial put()) sent = [q_in.put(x, block=False) for x in enumerate(inputs)] for _ in range(nproc): # add sentinel for each process q_in.put((None, None)) # get results res = [] for _ in range(len(sent)): x = q_out.get() if pbar: pbar.update() res.append(x) # close processes and unwrap results for proc in proclist: proc.join() if pbar: pbar.close() # unwrap results in order results = [out for _, out in sorted(res, key=itemgetter(0))] # raise exceptions here for res in results: if isinstance(res, Exception): raise res return results
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def epoch(self): """GPS epoch associated with these data :type: `~astropy.time.Time` """
try: if self._epoch is None: return None return Time(*modf(self._epoch)[::-1], format='gps', scale='utc') except AttributeError: self._epoch = None return self._epoch
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def override_unit(self, unit, parse_strict='raise'): """Forcefully reset the unit of these data Use of this method is discouraged in favour of `to()`, which performs accurate conversions from one unit to another. The method should really only be used when the original unit of the array is plain wrong. Parameters unit : `~astropy.units.Unit`, `str` the unit to force onto this array parse_strict : `str`, optional how to handle errors in the unit parsing, default is to raise the underlying exception from `astropy.units` Raises ------ ValueError if a `str` cannot be parsed as a valid unit """
self._unit = parse_unit(unit, parse_strict=parse_strict)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def flatten(self, order='C'): """Return a copy of the array collapsed into one dimension. Any index information is removed as part of the flattening, and the result is returned as a `~astropy.units.Quantity` array. Parameters order : {'C', 'F', 'A', 'K'}, optional 'C' means to flatten in row-major (C-style) order. 'F' means to flatten in column-major (Fortran- style) order. 'A' means to flatten in column-major order if `a` is Fortran *contiguous* in memory, row-major order otherwise. 'K' means to flatten `a` in the order the elements occur in memory. The default is 'C'. Returns ------- y : `~astropy.units.Quantity` A copy of the input array, flattened to one dimension. See Also -------- ravel : Return a flattened array. flat : A 1-D flat iterator over the array. Examples -------- <Quantity [1., 2., 3., 4.] m> """
return super(Array, self).flatten(order=order).view(Quantity)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def from_timeseries(ts1, ts2, stride, fftlength=None, overlap=None, window=None, nproc=1, **kwargs): """Calculate the coherence `Spectrogram` between two `TimeSeries`. Parameters timeseries : :class:`~gwpy.timeseries.TimeSeries` input time-series to process. stride : `float` number of seconds in single PSD (column of spectrogram). fftlength : `float` number of seconds in single FFT. overlap : `int`, optiona, default: fftlength number of seconds of overlap between FFTs, defaults to no overlap window : `timeseries.window.Window`, optional, default: `None` window function to apply to timeseries prior to FFT. nproc : `int`, default: ``1`` maximum number of independent frame reading processes, default is set to single-process file reading. Returns ------- spectrogram : :class:`~gwpy.spectrogram.Spectrogram` time-frequency power spectrogram as generated from the input time-series. """
# format FFT parameters if fftlength is None: fftlength = stride / 2. # get size of spectrogram nsteps = int(ts1.size // (stride * ts1.sample_rate.value)) nproc = min(nsteps, nproc) # single-process return if nsteps == 0 or nproc == 1: return _from_timeseries(ts1, ts2, stride, fftlength=fftlength, overlap=overlap, window=window, **kwargs) # wrap spectrogram generator def _specgram(queue_, tsa, tsb): try: queue_.put(_from_timeseries(tsa, tsb, stride, fftlength=fftlength, overlap=overlap, window=window, **kwargs)) except Exception as exc: # pylint: disable=broad-except queue_.put(exc) # otherwise build process list stepperproc = int(ceil(nsteps / nproc)) nsamp = [stepperproc * ts.sample_rate.value * stride for ts in (ts1, ts2)] queue = ProcessQueue(nproc) processlist = [] for i in range(nproc): process = Process(target=_specgram, args=(queue, ts1[i * nsamp[0]:(i + 1) * nsamp[0]], ts2[i * nsamp[1]:(i + 1) * nsamp[1]])) process.daemon = True processlist.append(process) process.start() if ((i + 1) * nsamp[0]) >= ts1.size: break # get data data = [] for process in processlist: result = queue.get() if isinstance(result, Exception): raise result else: data.append(result) # and block for process in processlist: process.join() # format and return out = SpectrogramList(*data) out.sort(key=lambda spec: spec.epoch.gps) return out.join()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def read_channel_list_file(*source): """Read a `~gwpy.detector.ChannelList` from a Channel List File """
# read file(s) config = configparser.ConfigParser(dict_type=OrderedDict) source = file_list(source) success_ = config.read(*source) if len(success_) != len(source): raise IOError("Failed to read one or more CLF files") # create channel list out = ChannelList() out.source = source append = out.append # loop over all groups and channels for group in config.sections(): params = OrderedDict(config.items(group)) channels = params.pop('channels').strip('\n').split('\n') if 'flow' in params or 'fhigh' in params: low = params.pop('flow', 0) high = params.pop('fhigh', inf) if isinstance(high, string_types) and high.lower() == 'nyquist': high = inf frange = float(low), float(high) else: frange = None for channel in channels: try: match = CHANNEL_DEFINITION.match(channel).groupdict() except AttributeError as exc: exc.args = ('Cannot parse %r as channel list entry' % channel,) raise # remove Nones from match match = dict((k, v) for k, v in match.items() if v is not None) match.setdefault('safe', 'safe') match.setdefault('fidelity', 'clean') # create channel and copy group params safe = match.get('safe', 'safe').lower() != 'unsafe' channel = Channel(match.pop('name'), frequency_range=frange, safe=safe, sample_rate=match.pop('sample_rate')) channel.params = params.copy() channel.params.update(match) channel.group = group # extract those params for which the Channel has an attribute for key in ['frametype']: setattr(channel, key, channel.params.pop(key, None)) append(channel) return out
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def write_channel_list_file(channels, fobj): """Write a `~gwpy.detector.ChannelList` to a INI-format channel list file """
if not isinstance(fobj, FILE_LIKE): with open(fobj, "w") as fobj: return write_channel_list_file(channels, fobj) out = configparser.ConfigParser(dict_type=OrderedDict) for channel in channels: group = channel.group if not out.has_section(group): out.add_section(group) for param, value in channel.params.items(): out.set(group, param, value) if channel.sample_rate is not None: entry = '%s %s' % (str(channel), str(channel.sample_rate.to('Hz').value)) else: entry = str(channel) entry += ' %s' % channel.params.get('safe', 'safe') entry += ' %s' % channel.params.get('fidelity', 'clean') try: clist = out.get(group, 'channels') except configparser.NoOptionError: out.set(group, 'channels', '\n%s' % entry) else: out.set(group, 'channels', clist + '\n%s' % entry) out.write(fobj)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def register_method(func, name=None, deprecated=False): """Register a method of calculating an average spectrogram. Parameters func : `callable` function to execute name : `str`, optional name of the method, defaults to ``func.__name__`` deprecated : `bool`, optional whether this method is deprecated (`True`) or not (`False`) Returns ------- name : `str` the registered name of the function, which may differ pedantically from what was given by the user. """
# warn about deprecated functions if deprecated: func = deprecated_function( func, "the {0!r} PSD methods is deprecated, and will be removed " "in a future release, please consider using {1!r} instead".format( name, name.split('-', 1)[1], ), ) if name is None: name = func.__name__ name = _format_name(name) METHODS[name] = func return name
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_method(name): """Return the PSD method registered with the given name. """
# find method name = _format_name(name) try: return METHODS[name] except KeyError as exc: exc.args = ("no PSD method registered with name {0!r}".format(name),) raise
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def inspiral_range_psd(psd, snr=8, mass1=1.4, mass2=1.4, horizon=False): """Compute the inspiral sensitive distance PSD from a GW strain PSD Parameters psd : `~gwpy.frequencyseries.FrequencySeries` the instrumental power-spectral-density data snr : `float`, optional the signal-to-noise ratio for which to calculate range, default: `8` mass1 : `float`, `~astropy.units.Quantity`, optional the mass (`float` assumed in solar masses) of the first binary component, default: `1.4` mass2 : `float`, `~astropy.units.Quantity`, optional the mass (`float` assumed in solar masses) of the second binary component, default: `1.4` horizon : `bool`, optional if `True`, return the maximal 'horizon' sensitive distance, otherwise return the angle-averaged range, default: `False` Returns ------- rspec : `~gwpy.frequencyseries.FrequencySeries` the calculated inspiral sensitivity PSD [Mpc^2 / Hz] """
# compute chirp mass and symmetric mass ratio mass1 = units.Quantity(mass1, 'solMass').to('kg') mass2 = units.Quantity(mass2, 'solMass').to('kg') mtotal = mass1 + mass2 mchirp = (mass1 * mass2) ** (3/5.) / mtotal ** (1/5.) # compute ISCO fisco = (constants.c ** 3 / (constants.G * 6**1.5 * pi * mtotal)).to('Hz') # calculate integral pre-factor prefactor = ( (1.77**2 * 5 * constants.c ** (1/3.) * (mchirp * constants.G / constants.c ** 2) ** (5/3.)) / (96 * pi ** (4/3.) * snr ** 2) ) # calculate inspiral range ASD in m^2/Hz integrand = 1 / psd * psd.frequencies ** (-7/3.) * prefactor # restrict to ISCO integrand = integrand[psd.frequencies.value < fisco.value] # normalize and return if integrand.f0.value == 0.0: integrand[0] = 0.0 if horizon: integrand *= 2.26 ** 2 return integrand.to('Mpc^2 / Hz')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def inspiral_range(psd, snr=8, mass1=1.4, mass2=1.4, fmin=None, fmax=None, horizon=False): """Calculate the inspiral sensitive distance from a GW strain PSD The method returns the distance (in megaparsecs) to which an compact binary inspiral with the given component masses would be detectable given the instrumental PSD. The calculation is as defined in: https://dcc.ligo.org/LIGO-T030276/public Parameters psd : `~gwpy.frequencyseries.FrequencySeries` the instrumental power-spectral-density data snr : `float`, optional the signal-to-noise ratio for which to calculate range, default: `8` mass1 : `float`, `~astropy.units.Quantity`, optional the mass (`float` assumed in solar masses) of the first binary component, default: `1.4` mass2 : `float`, `~astropy.units.Quantity`, optional the mass (`float` assumed in solar masses) of the second binary component, default: `1.4` fmin : `float`, optional the lower frequency cut-off of the integral, default: `psd.df` fmax : `float`, optional the maximum frequency limit of the integral, defaults to innermost stable circular orbit (ISCO) frequency horizon : `bool`, optional if `True`, return the maximal 'horizon' sensitive distance, otherwise return the angle-averaged range, default: `False` Returns ------- range : `~astropy.units.Quantity` the calculated inspiral range [Mpc] Examples -------- Grab some data for LIGO-Livingston around GW150914 and generate a PSD Now we can calculate the :func:`inspiral_range`: 70.4612102889 Mpc """
mass1 = units.Quantity(mass1, 'solMass').to('kg') mass2 = units.Quantity(mass2, 'solMass').to('kg') mtotal = mass1 + mass2 # compute ISCO fisco = (constants.c ** 3 / (constants.G * 6**1.5 * pi * mtotal)).to('Hz') # format frequency limits fmax = units.Quantity(fmax or fisco, 'Hz') if fmax > fisco: warnings.warn("Upper frequency bound greater than %s-%s ISCO " "frequency of %s, using ISCO" % (mass1, mass2, fisco)) fmax = fisco if fmin is None: fmin = psd.df # avoid using 0 as lower limit fmin = units.Quantity(fmin, 'Hz') # integrate f = psd.frequencies.to('Hz') condition = (f >= fmin) & (f < fmax) integrand = inspiral_range_psd(psd[condition], snr=snr, mass1=mass1, mass2=mass2, horizon=horizon) result = units.Quantity( integrate.trapz(integrand.value, f.value[condition]), unit=integrand.unit * units.Hertz) return (result ** (1/2.)).to('Mpc')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def burst_range_spectrum(psd, snr=8, energy=1e-2): """Calculate the frequency-dependent GW burst range from a strain PSD Parameters psd : `~gwpy.frequencyseries.FrequencySeries` the instrumental power-spectral-density data snr : `float`, optional the signal-to-noise ratio for which to calculate range, default: `8` energy : `float`, optional the relative energy output of the GW burst, default: `0.01` (GRB-like burst) Returns ------- rangespec : `~gwpy.frequencyseries.FrequencySeries` the burst range `FrequencySeries` [Mpc (default)] """
# calculate frequency dependent range in parsecs a = (constants.G * energy * constants.M_sun * 0.4 / (pi**2 * constants.c))**(1/2.) dspec = psd ** (-1/2.) * a / (snr * psd.frequencies) # convert to output unit rspec = dspec.to('Mpc') # rescale 0 Hertz (which has 0 range always) if rspec.f0.value == 0.0: rspec[0] = 0.0 return rspec
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def burst_range(psd, snr=8, energy=1e-2, fmin=100, fmax=500): """Calculate the integrated GRB-like GW burst range from a strain PSD Parameters psd : `~gwpy.frequencyseries.FrequencySeries` the instrumental power-spectral-density data snr : `float`, optional the signal-to-noise ratio for which to calculate range, default: ``8`` energy : `float`, optional the relative energy output of the GW burst, defaults to ``1e-2`` for a GRB-like burst fmin : `float`, optional the lower frequency cutoff of the burst range integral, default: ``100 Hz`` fmax : `float`, optional the upper frequency cutoff of the burst range integral, default: ``500 Hz`` Returns ------- range : `~astropy.units.Quantity` the GRB-like-burst sensitive range [Mpc (default)] Examples -------- Grab some data for LIGO-Livingston around GW150914 and generate a PSD Now we can calculate the :func:`burst_range`: 42.5055584195 Mpc """
freqs = psd.frequencies.value # restrict integral if not fmin: fmin = psd.f0 if not fmax: fmax = psd.span[1] condition = (freqs >= fmin) & (freqs < fmax) # calculate integrand and integrate integrand = burst_range_spectrum( psd[condition], snr=snr, energy=energy) ** 3 result = integrate.trapz(integrand.value, freqs[condition]) # normalize and return r = units.Quantity(result / (fmax - fmin), unit=integrand.unit) ** (1/3.) return r.to('Mpc')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def format_norm(kwargs, current=None): """Format a `~matplotlib.colors.Normalize` from a set of kwargs Returns ------- norm, kwargs the formatted `Normalize` instance, and the remaining keywords """
norm = kwargs.pop('norm', current) or 'linear' vmin = kwargs.pop('vmin', None) vmax = kwargs.pop('vmax', None) clim = kwargs.pop('clim', (vmin, vmax)) or (None, None) clip = kwargs.pop('clip', None) if norm == 'linear': norm = colors.Normalize() elif norm == 'log': norm = colors.LogNorm() elif not isinstance(norm, colors.Normalize): raise ValueError("unrecognised value for norm {!r}".format(norm)) for attr, value in (('vmin', clim[0]), ('vmax', clim[1]), ('clip', clip)): if value is not None: setattr(norm, attr, value) return norm, kwargs
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def identify_gwf(origin, filepath, fileobj, *args, **kwargs): """Identify a filename or file object as GWF This function is overloaded in that it will also identify a cache file as 'gwf' if the first entry in the cache contains a GWF file extension """
# pylint: disable=unused-argument # try and read file descriptor if fileobj is not None: loc = fileobj.tell() fileobj.seek(0) try: if fileobj.read(4) == GWF_SIGNATURE: return True finally: fileobj.seek(loc) if filepath is not None: if filepath.endswith('.gwf'): return True if filepath.endswith(('.lcf', '.cache')): try: cache = read_cache(filepath) except IOError: return False else: if cache[0].path.endswith('.gwf'): return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def open_gwf(filename, mode='r'): """Open a filename for reading or writing GWF format data Parameters filename : `str` the path to read from, or write to mode : `str`, optional either ``'r'`` (read) or ``'w'`` (write) Returns ------- `LDAStools.frameCPP.IFrameFStream` the input frame stream (if `mode='r'`), or `LDAStools.frameCPP.IFrameFStream` the output frame stream (if `mode='w'`) """
if mode not in ('r', 'w'): raise ValueError("mode must be either 'r' or 'w'") from LDAStools import frameCPP filename = urlparse(filename).path # strip file://localhost or similar if mode == 'r': return frameCPP.IFrameFStream(str(filename)) return frameCPP.OFrameFStream(str(filename))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def write_frames(filename, frames, compression=257, compression_level=6): """Write a list of frame objects to a file **Requires:** |LDAStools.frameCPP|_ Parameters filename : `str` path to write into frames : `list` of `LDAStools.frameCPP.FrameH` list of frames to write into file compression : `int`, optional enum value for compression scheme, default is ``GZIP`` compression_level : `int`, optional compression level for given scheme """
from LDAStools import frameCPP # open stream stream = open_gwf(filename, 'w') # write frames one-by-one if isinstance(frames, frameCPP.FrameH): frames = [frames] for frame in frames: stream.WriteFrame(frame, compression, compression_level)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_channel_type(channel, framefile): """Find the channel type in a given GWF file **Requires:** |LDAStools.frameCPP|_ Parameters channel : `str`, `~gwpy.detector.Channel` name of data channel to find framefile : `str` path of GWF file in which to search Returns ------- ctype : `str` the type of the channel ('adc', 'sim', or 'proc') Raises ------ ValueError if the channel is not found in the table-of-contents """
channel = str(channel) for name, type_ in _iter_channels(framefile): if channel == name: return type_ raise ValueError("%s not found in table-of-contents for %s" % (channel, framefile))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def channel_in_frame(channel, framefile): """Determine whether a channel is stored in this framefile **Requires:** |LDAStools.frameCPP|_ Parameters channel : `str` name of channel to find framefile : `str` path of GWF file to test Returns ------- inframe : `bool` whether this channel is included in the table of contents for the given framefile """
channel = str(channel) for name in iter_channel_names(framefile): if channel == name: return True return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _iter_channels(framefile): """Yields the name and type of each channel in a GWF file TOC **Requires:** |LDAStools.frameCPP|_ Parameters framefile : `str`, `LDAStools.frameCPP.IFrameFStream` path of GWF file, or open file stream, to read """
from LDAStools import frameCPP if not isinstance(framefile, frameCPP.IFrameFStream): framefile = open_gwf(framefile, 'r') toc = framefile.GetTOC() for typename in ('Sim', 'Proc', 'ADC'): typen = typename.lower() for name in getattr(toc, 'Get{0}'.format(typename))(): yield name, typen
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def data_segments(paths, channel, warn=True): """Returns the segments containing data for a channel **Requires:** |LDAStools.frameCPP|_ A frame is considered to contain data if a valid FrData structure (of any type) exists for the channel in that frame. No checks are directly made against the underlying FrVect structures. Parameters paths : `list` of `str` a list of GWF file paths channel : `str` the name to check in each frame warn : `bool`, optional emit a `UserWarning` when a channel is not found in a frame Returns ------- segments : `~gwpy.segments.SegmentList` the list of segments containing data """
segments = SegmentList() for path in paths: segments.extend(_gwf_channel_segments(path, channel, warn=warn)) return segments.coalesce()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _gwf_channel_segments(path, channel, warn=True): """Yields the segments containing data for ``channel`` in this GWF path """
stream = open_gwf(path) # get segments for frames toc = stream.GetTOC() secs = toc.GetGTimeS() nano = toc.GetGTimeN() dur = toc.GetDt() readers = [getattr(stream, 'ReadFr{0}Data'.format(type_.title())) for type_ in ("proc", "sim", "adc")] # for each segment, try and read the data for this channel for i, (s, ns, dt) in enumerate(zip(secs, nano, dur)): for read in readers: try: read(i, channel) except (IndexError, ValueError): continue readers = [read] # use this one from now on epoch = LIGOTimeGPS(s, ns) yield Segment(epoch, epoch + dt) break else: # none of the readers worked for this channel, warn if warn: warnings.warn( "{0!r} not found in frame {1} of {2}".format( channel, i, path), )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def fdfilter(data, *filt, **kwargs): """Filter a frequency-domain data object See Also -------- gwpy.frequencyseries.FrequencySeries.filter gwpy.spectrogram.Spectrogram.filter """
# parse keyword args inplace = kwargs.pop('inplace', False) analog = kwargs.pop('analog', False) fs = kwargs.pop('sample_rate', None) if kwargs: raise TypeError("filter() got an unexpected keyword argument '%s'" % list(kwargs.keys())[0]) # parse filter if fs is None: fs = 2 * (data.shape[-1] * data.df).to('Hz').value form, filt = parse_filter(filt, analog=analog, sample_rate=fs) lti = signal.lti(*filt) # generate frequency response freqs = data.frequencies.value.copy() fresp = numpy.nan_to_num(abs(lti.freqresp(w=freqs)[1])) # apply to array if inplace: data *= fresp return data new = data * fresp return new
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def main(args=None): """Parse command-line arguments, tconvert inputs, and print """
# define command line arguments parser = argparse.ArgumentParser(description=__doc__) parser.add_argument("-V", "--version", action="version", version=__version__, help="show version number and exit") parser.add_argument("-l", "--local", action="store_true", default=False, help="print datetimes in local timezone") parser.add_argument("-f", "--format", type=str, action="store", default=r"%Y-%m-%d %H:%M:%S.%f %Z", help="output datetime format (default: %(default)r)") parser.add_argument("input", help="GPS or datetime string to convert", nargs="*") # parse and convert args = parser.parse_args(args) input_ = " ".join(args.input) output = tconvert(input_) # print (now with timezones!) if isinstance(output, datetime.datetime): output = output.replace(tzinfo=tz.tzutc()) if args.local: output = output.astimezone(tz.tzlocal()) print(output.strftime(args.format)) else: print(output)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def timer(func): """Time a method and print its duration after return """
name = func.__name__ @wraps(func) def timed_func(self, *args, **kwargs): # pylint: disable=missing-docstring _start = time.time() out = func(self, *args, **kwargs) self.log(2, '{0} took {1:.1f} sec'.format(name, time.time() - _start)) return out return timed_func
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def to_float(unit): """Factory to build a converter from quantity string to float Examples -------- """
def converter(x): """Convert the input to a `float` in %s """ return Quantity(x, unit).value converter.__doc__ %= str(unit) # pylint: disable=no-member return converter
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def log_norm(func): """Wrap ``func`` to handle custom gwpy keywords for a LogNorm colouring """
@wraps(func) def decorated_func(*args, **kwargs): norm, kwargs = format_norm(kwargs) kwargs['norm'] = norm return func(*args, **kwargs) return decorated_func
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def xlim_as_gps(func): """Wrap ``func`` to handle pass limit inputs through `gwpy.time.to_gps` """
@wraps(func) def wrapped_func(self, left=None, right=None, **kw): if right is None and numpy.iterable(left): left, right = left kw['left'] = left kw['right'] = right gpsscale = self.get_xscale() in GPS_SCALES for key in ('left', 'right'): if gpsscale: try: kw[key] = numpy.longdouble(str(to_gps(kw[key]))) except TypeError: pass return func(self, **kw) return wrapped_func
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def restore_grid(func): """Wrap ``func`` to preserve the Axes current grid settings. """
@wraps(func) def wrapped_func(self, *args, **kwargs): grid = (self.xaxis._gridOnMinor, self.xaxis._gridOnMajor, self.yaxis._gridOnMinor, self.yaxis._gridOnMajor) try: return func(self, *args, **kwargs) finally: # reset grid self.xaxis.grid(grid[0], which="minor") self.xaxis.grid(grid[1], which="major") self.yaxis.grid(grid[2], which="minor") self.yaxis.grid(grid[3], which="major") return wrapped_func
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_epoch(self, epoch): """Set the epoch for the current GPS scale. This method will fail if the current X-axis scale isn't one of the GPS scales. See :ref:`gwpy-plot-gps` for more details. Parameters epoch : `float`, `str` GPS-compatible time or date object, anything parseable by :func:`~gwpy.time.to_gps` is fine. """
scale = self.get_xscale() return self.set_xscale(scale, epoch=epoch)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def imshow(self, array, *args, **kwargs): """Display an image, i.e. data on a 2D regular raster. If ``array`` is a :class:`~gwpy.types.Array2D` (e.g. a :class:`~gwpy.spectrogram.Spectrogram`), then the defaults are _different_ to those in the upstream :meth:`~matplotlib.axes.Axes.imshow` method. Namely, the defaults are - ``origin='lower'`` (coordinates start in lower-left corner) - ``aspect='auto'`` (pixels are not forced to be square) - ``interpolation='none'`` (no image interpolation is used) In all other usage, the defaults from the upstream matplotlib method are unchanged. Parameters array : array-like or PIL image The image data. *args, **kwargs All arguments and keywords are passed to the inherited :meth:`~matplotlib.axes.Axes.imshow` method. See Also -------- matplotlib.axes.Axes.imshow for details of the image rendering """
if isinstance(array, Array2D): return self._imshow_array2d(array, *args, **kwargs) image = super(Axes, self).imshow(array, *args, **kwargs) self.autoscale(enable=None, axis='both', tight=None) return image
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _imshow_array2d(self, array, origin='lower', interpolation='none', aspect='auto', **kwargs): """Render an `~gwpy.types.Array2D` using `Axes.imshow` """
# NOTE: If you change the defaults for this method, please update # the docstring for `imshow` above. # calculate extent extent = tuple(array.xspan) + tuple(array.yspan) if self.get_xscale() == 'log' and extent[0] == 0.: extent = (1e-300,) + extent[1:] if self.get_yscale() == 'log' and extent[2] == 0.: extent = extent[:2] + (1e-300,) + extent[3:] kwargs.setdefault('extent', extent) return self.imshow(array.value.T, origin=origin, aspect=aspect, interpolation=interpolation, **kwargs)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def pcolormesh(self, *args, **kwargs): """Create a pseudocolor plot with a non-regular rectangular grid. When using GWpy, this method can be called with a single argument that is an :class:`~gwpy.types.Array2D`, for which the ``X`` and ``Y`` coordinate arrays will be determined from the indexing. In all other usage, all ``args`` and ``kwargs`` are passed directly to :meth:`~matplotlib.axes.Axes.pcolormesh`. Notes ----- Unlike the upstream :meth:`matplotlib.axes.Axes.pcolormesh`, this method respects the current grid settings. See Also -------- matplotlib.axes.Axes.pcolormesh """
if len(args) == 1 and isinstance(args[0], Array2D): return self._pcolormesh_array2d(*args, **kwargs) return super(Axes, self).pcolormesh(*args, **kwargs)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _pcolormesh_array2d(self, array, *args, **kwargs): """Render an `~gwpy.types.Array2D` using `Axes.pcolormesh` """
x = numpy.concatenate((array.xindex.value, array.xspan[-1:])) y = numpy.concatenate((array.yindex.value, array.yspan[-1:])) xcoord, ycoord = numpy.meshgrid(x, y, copy=False, sparse=True) return self.pcolormesh(xcoord, ycoord, array.value.T, *args, **kwargs)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def plot_mmm(self, data, lower=None, upper=None, **kwargs): """Plot a `Series` as a line, with a shaded region around it. The ``data`` `Series` is drawn, while the ``lower`` and ``upper`` `Series` are plotted lightly below and above, with a fill between them and the ``data``. All three `Series` should have the same `~Series.index` array. Parameters data : `~gwpy.types.Series` Data to plot normally. lower : `~gwpy.types.Series` Lower boundary (on Y-axis) for shade. upper : `~gwpy.types.Series` Upper boundary (on Y-axis) for shade. **kwargs Any other keyword arguments acceptable for :meth:`~matplotlib.Axes.plot`. Returns ------- artists : `tuple` All of the drawn artists: - `~matplotlib.lines.Line2d` for ``data``, - `~matplotlib.lines.Line2D` for ``lower``, if given - `~matplotlib.lines.Line2D` for ``upper``, if given - `~matplitlib.collections.PolyCollection` for shading See Also -------- matplotlib.axes.Axes.plot for a full description of acceptable ``*args`` and ``**kwargs`` """
alpha = kwargs.pop('alpha', .1) # plot mean line, = self.plot(data, **kwargs) out = [line] # modify keywords for shading kwargs.update({ 'label': '', 'linewidth': line.get_linewidth() / 2, 'color': line.get_color(), 'alpha': alpha * 2, }) # plot lower and upper Series fill = [data.xindex.value, data.value, data.value] for i, bound in enumerate((lower, upper)): if bound is not None: out.extend(self.plot(bound, **kwargs)) fill[i+1] = bound.value # fill between out.append(self.fill_between( *fill, alpha=alpha, color=kwargs['color'], rasterized=kwargs.get('rasterized', True))) return out
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def tile(self, x, y, w, h, color=None, anchor='center', edgecolors='face', linewidth=0.8, **kwargs): """Plot rectanguler tiles based onto these `Axes`. ``x`` and ``y`` give the anchor point for each tile, with ``w`` and ``h`` giving the extent in the X and Y axis respectively. Parameters x, y, w, h : `array_like`, shape (n, ) Input data color : `array_like`, shape (n, ) Array of amplitudes for tile color anchor : `str`, optional Anchor point for tiles relative to ``(x, y)`` coordinates, one of - ``'center'`` - center tile on ``(x, y)`` - ``'ll'`` - ``(x, y)`` defines lower-left corner of tile - ``'lr'`` - ``(x, y)`` defines lower-right corner of tile - ``'ul'`` - ``(x, y)`` defines upper-left corner of tile - ``'ur'`` - ``(x, y)`` defines upper-right corner of tile **kwargs Other keywords are passed to :meth:`~matplotlib.collections.PolyCollection` Returns ------- collection : `~matplotlib.collections.PolyCollection` the collection of tiles drawn Examples -------- """
# get color and sort if color is not None and kwargs.get('c_sort', True): sortidx = color.argsort() x = x[sortidx] y = y[sortidx] w = w[sortidx] h = h[sortidx] color = color[sortidx] # define how to make a polygon for each tile if anchor == 'll': def _poly(x, y, w, h): return ((x, y), (x, y+h), (x+w, y+h), (x+w, y)) elif anchor == 'lr': def _poly(x, y, w, h): return ((x-w, y), (x-w, y+h), (x, y+h), (x, y)) elif anchor == 'ul': def _poly(x, y, w, h): return ((x, y-h), (x, y), (x+w, y), (x+w, y-h)) elif anchor == 'ur': def _poly(x, y, w, h): return ((x-w, y-h), (x-w, y), (x, y), (x, y-h)) elif anchor == 'center': def _poly(x, y, w, h): return ((x-w/2., y-h/2.), (x-w/2., y+h/2.), (x+w/2., y+h/2.), (x+w/2., y-h/2.)) else: raise ValueError("Unrecognised tile anchor {!r}".format(anchor)) # build collection cmap = kwargs.pop('cmap', rcParams['image.cmap']) coll = PolyCollection((_poly(*tile) for tile in zip(x, y, w, h)), edgecolors=edgecolors, linewidth=linewidth, **kwargs) if color is not None: coll.set_array(color) coll.set_cmap(cmap) out = self.add_collection(coll) self.autoscale_view() return out
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def colorbar(self, mappable=None, **kwargs): """Add a `~matplotlib.colorbar.Colorbar` to these `Axes` Parameters mappable : matplotlib data collection, optional collection against which to map the colouring, default will be the last added mappable artist (collection or image) fraction : `float`, optional fraction of space to steal from these `Axes` to make space for the new axes, default is ``0.`` if ``use_axesgrid=True`` is given (default), otherwise default is ``.15`` to match the upstream matplotlib default. **kwargs other keyword arguments to be passed to the :meth:`Plot.colorbar` generator Returns ------- cbar : `~matplotlib.colorbar.Colorbar` the newly added `Colorbar` See Also -------- Plot.colorbar """
fig = self.get_figure() if kwargs.get('use_axesgrid', True): kwargs.setdefault('fraction', 0.) if kwargs.get('fraction', 0.) == 0.: kwargs.setdefault('use_axesgrid', True) mappable, kwargs = gcbar.process_colorbar_kwargs( fig, mappable=mappable, ax=self, **kwargs) if isinstance(fig, Plot): # either we have created colorbar Axes using axesgrid1, or # the user already gave use_axesgrid=False, so we forcefully # disable axesgrid here in case fraction == 0., which causes # gridspec colorbars to fail. kwargs['use_axesgrid'] = False return fig.colorbar(mappable, **kwargs)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def tconvert(gpsordate='now'): """Convert GPS times to ISO-format date-times and vice-versa. Parameters input gps or date to convert, many input types are supported Returns ------- date : `datetime.datetime` or `LIGOTimeGPS` converted gps or date Notes ----- If the input object is a `float` or `LIGOTimeGPS`, it will get converted from GPS format into a `datetime.datetime`, otherwise the input will be converted into `LIGOTimeGPS`. Examples -------- Integers and floats are automatically converted from GPS to `datetime.datetime`: datetime.datetime(1980, 1, 6, 0, 0) datetime.datetime(2015, 9, 14, 9, 50, 45, 391000) while strings are automatically converted to `~gwpy.time.LIGOTimeGPS`: LIGOTimeGPS(1126259462, 391000000) Additionally, a few special-case words as supported, which all return `~gwpy.time.LIGOTimeGPS`: """
# convert from GPS into datetime try: float(gpsordate) # if we can 'float' it, then its probably a GPS time except (TypeError, ValueError): return to_gps(gpsordate) return from_gps(gpsordate)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def from_gps(gps): """Convert a GPS time into a `datetime.datetime`. Parameters gps : `LIGOTimeGPS`, `int`, `float` GPS time to convert Returns ------- datetime : `datetime.datetime` ISO-format datetime equivalent of input GPS time Examples -------- datetime.datetime(2017, 1, 1, 0, 0) datetime.datetime(2015, 9, 14, 9, 50, 45, 391000) """
try: gps = LIGOTimeGPS(gps) except (ValueError, TypeError, RuntimeError): gps = LIGOTimeGPS(float(gps)) sec, nano = gps.gpsSeconds, gps.gpsNanoSeconds date = Time(sec, format='gps', scale='utc').datetime return date + datetime.timedelta(microseconds=nano*1e-3)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _str_to_datetime(datestr): """Convert `str` to `datetime.datetime`. """
# try known string try: return DATE_STRINGS[str(datestr).lower()]() except KeyError: # any other string pass # use maya try: import maya return maya.when(datestr).datetime() except ImportError: pass # use dateutil.parse with warnings.catch_warnings(): # don't allow lazy passing of time-zones warnings.simplefilter("error", RuntimeWarning) try: return dateparser.parse(datestr) except RuntimeWarning: raise ValueError("Cannot parse date string with timezone " "without maya, please install maya") except (ValueError, TypeError) as exc: # improve error reporting exc.args = ("Cannot parse date string {0!r}: {1}".format( datestr, exc.args[0]),) raise
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _time_to_gps(time): """Convert a `Time` into `LIGOTimeGPS`. This method uses `datetime.datetime` underneath, which restricts Parameters time : `~astropy.time.Time` formatted `Time` object to convert Returns ------- gps : `LIGOTimeGPS` Nano-second precision `LIGOTimeGPS` time """
time = time.utc date = time.datetime micro = date.microsecond if isinstance(date, datetime.datetime) else 0 return LIGOTimeGPS(int(time.gps), int(micro*1e3))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def with_read_hdf5(func): """Decorate an HDF5-reading function to open a filepath if needed ``func`` should be written to presume an `h5py.Group` as the first positional argument. """
@wraps(func) def decorated_func(fobj, *args, **kwargs): # pylint: disable=missing-docstring if not isinstance(fobj, h5py.HLObject): if isinstance(fobj, FILE_LIKE): fobj = fobj.name with h5py.File(fobj, 'r') as h5f: return func(h5f, *args, **kwargs) return func(fobj, *args, **kwargs) return decorated_func
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def find_dataset(h5o, path=None): """Find and return the relevant dataset inside the given H5 object If ``path=None`` is given, and ``h5o`` contains a single dataset, that will be returned Parameters h5o : `h5py.File`, `h5py.Group` the HDF5 object in which to search path : `str`, optional the path (relative to ``h5o``) of the desired data set Returns ------- dset : `h5py.Dataset` the recovered dataset object Raises ------ ValueError if ``path=None`` and the HDF5 object contains multiple datasets KeyError if ``path`` is given but is not found within the HDF5 object """
# find dataset if isinstance(h5o, h5py.Dataset): return h5o elif path is None and len(h5o) == 1: path = list(h5o.keys())[0] elif path is None: raise ValueError("Please specify the HDF5 path via the " "``path=`` keyword argument") return h5o[path]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def with_write_hdf5(func): """Decorate an HDF5-writing function to open a filepath if needed ``func`` should be written to take the object to be written as the first argument, and then presume an `h5py.Group` as the second. This method uses keywords ``append`` and ``overwrite`` as follows if the output file already exists: - ``append=False, overwrite=False``: raise `~exceptions.IOError` - ``append=True``: open in mode ``a`` - ``append=False, overwrite=True``: open in mode ``w`` """
@wraps(func) def decorated_func(obj, fobj, *args, **kwargs): # pylint: disable=missing-docstring if not isinstance(fobj, h5py.HLObject): append = kwargs.get('append', False) overwrite = kwargs.get('overwrite', False) if os.path.exists(fobj) and not (overwrite or append): raise IOError("File exists: %s" % fobj) with h5py.File(fobj, 'a' if append else 'w') as h5f: return func(obj, h5f, *args, **kwargs) return func(obj, fobj, *args, **kwargs) return decorated_func
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_dataset(parent, path, overwrite=False, **kwargs): """Create a new dataset inside the parent HDF5 object Parameters parent : `h5py.Group`, `h5py.File` the object in which to create a new dataset path : `str` the path at which to create the new dataset overwrite : `bool` if `True`, delete any existing dataset at the desired path, default: `False` **kwargs other arguments are passed directly to :meth:`h5py.Group.create_dataset` Returns ------- dataset : `h5py.Dataset` the newly created dataset """
# force deletion of existing dataset if path in parent and overwrite: del parent[path] # create new dataset with improved error handling try: return parent.create_dataset(path, **kwargs) except RuntimeError as exc: if str(exc) == 'Unable to create link (Name already exists)': exc.args = ('{0}: {1!r}, pass overwrite=True ' 'to ignore existing datasets'.format(str(exc), path),) raise
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def format_db_selection(selection, engine=None): """Format a column filter selection as a SQL database WHERE string """
# parse selection for SQL query if selection is None: return '' selections = [] for col, op_, value in parse_column_filters(selection): if engine and engine.name == 'postgresql': col = '"%s"' % col try: opstr = [key for key in OPERATORS if OPERATORS[key] is op_][0] except KeyError: raise ValueError("Cannot format database 'WHERE' command with " "selection operator %r" % op_) selections.append('{0} {1} {2!r}'.format(col, opstr, value)) if selections: return 'WHERE %s' % ' AND '.join(selections) return ''
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def fetch(engine, tablename, columns=None, selection=None, **kwargs): """Fetch data from an SQL table into an `EventTable` Parameters engine : `sqlalchemy.engine.Engine` the database engine to use when connecting table : `str`, The name of table you are attempting to receive triggers from. selection other filters you would like to supply underlying reader method for the given format .. note:: For now it will attempt to automatically connect you to a specific DB. In the future, this may be an input argument. Returns ------- table : `GravitySpyTable` """
import pandas as pd # parse columns for SQL query if columns is None: columnstr = '*' else: columnstr = ', '.join('"%s"' % c for c in columns) # parse selection for SQL query selectionstr = format_db_selection(selection, engine=engine) # build SQL query qstr = 'SELECT %s FROM %s %s' % (columnstr, tablename, selectionstr) # perform query tab = pd.read_sql(qstr, engine, **kwargs) # Convert unicode columns to string types = tab.apply(lambda x: pd.api.types.infer_dtype(x.values)) if not tab.empty: for col in types[types == 'unicode'].index: tab[col] = tab[col].astype(str) return Table.from_pandas(tab).filled()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def q_scan(data, mismatch=DEFAULT_MISMATCH, qrange=DEFAULT_QRANGE, frange=DEFAULT_FRANGE, duration=None, sampling=None, **kwargs): """Transform data by scanning over a `QTiling` This utility is provided mainly to allow direct manipulation of the `QTiling.transform` output. Most users probably just want to use :meth:`~gwpy.timeseries.TimeSeries.q_transform`, which wraps around this. Parameters data : `~gwpy.timeseries.TimeSeries` or `ndarray` the time- or frequency-domain input data mismatch : `float`, optional maximum allowed fractional mismatch between neighbouring tiles qrange : `tuple` of `float`, optional `(low, high)` range of Qs to scan frange : `tuple` of `float`, optional `(low, high)` range of frequencies to scan duration : `float`, optional duration (seconds) of input, required if `data` is not a `TimeSeries` sampling : `float`, optional sample rate (Hertz) of input, required if `data` is not a `TimeSeries` **kwargs other keyword arguments to be passed to :meth:`QTiling.transform`, including ``'epoch'`` and ``'search'`` Returns ------- qgram : `QGram` the raw output of :meth:`QTiling.transform` far : `float` expected false alarm rate (Hertz) of white Gaussian noise with the same peak energy and total duration as `qgram` """
from gwpy.timeseries import TimeSeries # prepare input if isinstance(data, TimeSeries): duration = abs(data.span) sampling = data.sample_rate.to('Hz').value kwargs.update({'epoch': data.t0.value}) data = data.fft().value # return a raw Q-transform and its significance qgram, N = QTiling(duration, sampling, mismatch=mismatch, qrange=qrange, frange=frange).transform(data, **kwargs) far = 1.5 * N * numpy.exp(-qgram.peak['energy']) / duration return (qgram, far)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _iter_qs(self): """Iterate over the Q values """
# work out how many Qs we need cumum = log(self.qrange[1] / self.qrange[0]) / 2**(1/2.) nplanes = int(max(ceil(cumum / self.deltam), 1)) dq = cumum / nplanes # pylint: disable=invalid-name for i in xrange(nplanes): yield self.qrange[0] * exp(2**(1/2.) * dq * (i + .5))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def transform(self, fseries, **kwargs): """Compute the time-frequency plane at fixed Q with the most significant tile Parameters fseries : `~gwpy.timeseries.FrequencySeries` the complex FFT of a time-series data set **kwargs other keyword arguments to pass to `QPlane.transform` Returns ------- out : `QGram` signal energies over the time-frequency plane containing the most significant tile N : `int` estimated number of statistically independent tiles See Also -------- QPlane.transform compute the Q-transform over a single time-frequency plane """
weight = 1 + numpy.log10(self.qrange[1]/self.qrange[0]) / numpy.sqrt(2) nind, nplanes, peak, result = (0, 0, 0, None) # identify the plane with the loudest tile for plane in self: nplanes += 1 nind += sum([1 + row.ntiles * row.deltam for row in plane]) result = plane.transform(fseries, **kwargs) if result.peak['energy'] > peak: out = result peak = out.peak['energy'] return (out, nind * weight / nplanes)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def farray(self): """Array of frequencies for the lower-edge of each frequency bin :type: `numpy.ndarray` """
bandwidths = 2 * pi ** (1/2.) * self.frequencies / self.q return self.frequencies - bandwidths / 2.
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def ntiles(self): """The number of tiles in this row :type: `int` """
tcum_mismatch = self.duration * 2 * pi * self.frequency / self.q return next_power_of_two(tcum_mismatch / self.deltam)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_window(self): """Generate the bi-square window for this row Returns ------- window : `numpy.ndarray` """
# real frequencies wfrequencies = self._get_indices() / self.duration # dimensionless frequencies xfrequencies = wfrequencies * self.qprime / self.frequency # normalize and generate bi-square window norm = self.ntiles / (self.duration * self.sampling) * ( 315 * self.qprime / (128 * self.frequency)) ** (1/2.) return (1 - xfrequencies ** 2) ** 2 * norm
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_data_indices(self): """Returns the index array of interesting frequencies for this row """
return numpy.round(self._get_indices() + 1 + self.frequency * self.duration).astype(int)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def interpolate(self, tres="<default>", fres="<default>", logf=False, outseg=None): """Interpolate this `QGram` over a regularly-gridded spectrogram Parameters tres : `float`, optional desired time resolution (seconds) of output `Spectrogram`, default is `abs(outseg) / 1000.` fres : `float`, `int`, `None`, optional desired frequency resolution (Hertz) of output `Spectrogram`, or, if ``logf=True``, the number of frequency samples; give `None` to skip this step and return the original resolution, default is 0.5 Hz or 500 frequency samples logf : `bool`, optional boolean switch to enable (`True`) or disable (`False`) use of log-sampled frequencies in the output `Spectrogram` outseg : `~gwpy.segments.Segment`, optional GPS `[start, stop)` segment for output `Spectrogram`, default is the full duration of the input Returns ------- out : `~gwpy.spectrogram.Spectrogram` output `Spectrogram` of normalised Q energy See Also -------- scipy.interpolate this method uses `~scipy.interpolate.InterpolatedUnivariateSpline` to cast all frequency rows to a common time-axis, and then `~scipy.interpolate.interp2d` to apply the desired frequency resolution across the band Notes ----- This method will return a `Spectrogram` of dtype ``float32`` if ``norm`` is given, and ``float64`` otherwise. To optimize plot rendering with `~matplotlib.axes.Axes.pcolormesh`, the output `~gwpy.spectrogram.Spectrogram` can be given a log-sampled frequency axis by passing `logf=True` at runtime. The `fres` argument is then the number of points on the frequency axis. Note, this is incompatible with `~matplotlib.axes.Axes.imshow`. It is also highly recommended to use the `outseg` keyword argument when only a small window around a given GPS time is of interest. """
from scipy.interpolate import (interp2d, InterpolatedUnivariateSpline) from ..spectrogram import Spectrogram if outseg is None: outseg = self.energies[0].span frequencies = self.plane.frequencies dtype = self.energies[0].dtype # build regular Spectrogram from peak-Q data by interpolating each # (Q, frequency) `TimeSeries` to have the same time resolution if tres == "<default>": tres = abs(Segment(outseg)) / 1000. xout = numpy.arange(*outseg, step=tres) nx = xout.size ny = frequencies.size out = Spectrogram(numpy.empty((nx, ny), dtype=dtype), t0=outseg[0], dt=tres, frequencies=frequencies) # record Q in output out.q = self.plane.q # interpolate rows for i, row in enumerate(self.energies): xrow = numpy.arange(row.x0.value, (row.x0 + row.duration).value, row.dx.value) interp = InterpolatedUnivariateSpline(xrow, row.value) out[:, i] = interp(xout).astype(dtype, casting="same_kind", copy=False) if fres is None: return out # interpolate the spectrogram to increase its frequency resolution # --- this is done because Duncan doesn't like interpolated images # since they don't support log scaling interp = interp2d(xout, frequencies, out.value.T, kind='cubic') if not logf: if fres == "<default>": fres = .5 outfreq = numpy.arange( self.plane.frange[0], self.plane.frange[1], fres, dtype=dtype) else: if fres == "<default>": fres = 500 # using `~numpy.logspace` here to support numpy-1.7.1 for EPEL7, # but numpy-1.12.0 introduced the function `~numpy.geomspace` logfmin = numpy.log10(self.plane.frange[0]) logfmax = numpy.log10(self.plane.frange[1]) outfreq = numpy.logspace(logfmin, logfmax, num=int(fres)) new = type(out)( interp(xout, outfreq).T.astype( dtype, casting="same_kind", copy=False), t0=outseg[0], dt=tres, frequencies=outfreq, ) new.q = self.plane.q return new
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def table(self, snrthresh=5.5): """Represent this `QPlane` as an `EventTable` Parameters snrthresh : `float`, optional lower inclusive threshold on individual tile SNR to keep in the table, default: 5.5 Returns ------- out : `~gwpy.table.EventTable` a table of time-frequency tiles on this `QPlane` Notes ----- Only tiles with signal energy greater than or equal to `snrthresh ** 2 / 2` will be stored in the output `EventTable`. """
from ..table import EventTable # get plane properties freqs = self.plane.frequencies bws = 2 * (freqs - self.plane.farray) # collect table data as a recarray names = ('time', 'frequency', 'duration', 'bandwidth', 'energy') rec = numpy.recarray((0,), names=names, formats=['f8'] * len(names)) for f, bw, row in zip(freqs, bws, self.energies): ind, = (row.value >= snrthresh ** 2 / 2.).nonzero() new = ind.size if new > 0: rec.resize((rec.size + new,), refcheck=False) rec['time'][-new:] = row.times.value[ind] rec['frequency'][-new:] = f rec['duration'][-new:] = row.dt.to('s').value rec['bandwidth'][-new:] = bw rec['energy'][-new:] = row.value[ind] # save to a table out = EventTable(rec, copy=False) out.meta['q'] = self.plane.q return out
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def define(cls, start, step, num, dtype=None): """Define a new `Index`. The output is basically:: start + numpy.arange(num) * step Parameters start : `Number` The starting value of the index. step : `Number` The step size of the index. num : `int` The size of the index (number of samples). dtype : `numpy.dtype`, `None`, optional The desired dtype of the index, if not given, defaults to the higher-precision dtype from ``start`` and ``step``. Returns ------- index : `Index` A new `Index` created from the given parameters. """
if dtype is None: dtype = max( numpy.array(start, subok=True, copy=False).dtype, numpy.array(step, subok=True, copy=False).dtype, ) start = start.astype(dtype, copy=False) step = step.astype(dtype, copy=False) return cls(start + numpy.arange(num, dtype=dtype) * step, copy=False)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def regular(self): """`True` if this index is linearly increasing """
try: return self.info.meta['regular'] except (TypeError, KeyError): if self.info.meta is None: self.info.meta = {} self.info.meta['regular'] = self.is_regular() return self.info.meta['regular']