idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
22,500
def truncate_transfer ( transfer , ncorner = None ) : nsamp = transfer . size ncorner = ncorner if ncorner else 0 out = transfer . copy ( ) out [ 0 : ncorner ] = 0 out [ ncorner : nsamp ] *= planck ( nsamp - ncorner , nleft = 5 , nright = 5 ) return out
Smoothly zero the edges of a frequency domain transfer function
22,501
def truncate_impulse ( impulse , ntaps , window = 'hanning' ) : out = impulse . copy ( ) trunc_start = int ( ntaps / 2 ) trunc_stop = out . size - trunc_start window = signal . get_window ( window , ntaps ) out [ 0 : trunc_start ] *= window [ trunc_start : ntaps ] out [ trunc_stop : out . size ] *= window [ 0 : trunc_start ] out [ trunc_start : trunc_stop ] = 0 return out
Smoothly truncate a time domain impulse response
22,502
def fir_from_transfer ( transfer , ntaps , window = 'hanning' , ncorner = None ) : transfer = truncate_transfer ( transfer , ncorner = ncorner ) impulse = npfft . irfft ( transfer ) impulse = truncate_impulse ( impulse , ntaps = ntaps , window = window ) out = numpy . roll ( impulse , int ( ntaps / 2 - 1 ) ) [ 0 : ntaps ] return out
Design a Type II FIR filter given an arbitrary transfer function
22,503
def bilinear_zpk ( zeros , poles , gain , fs = 1.0 , unit = 'Hz' ) : zeros = numpy . array ( zeros , dtype = float , copy = False ) zeros = zeros [ numpy . isfinite ( zeros ) ] poles = numpy . array ( poles , dtype = float , copy = False ) gain = gain unit = Unit ( unit ) if unit == Unit ( 'Hz' ) : zeros *= - 2 * pi poles *= - 2 * pi elif unit != Unit ( 'rad/s' ) : raise ValueError ( "zpk can only be given with unit='Hz' " "or 'rad/s'" ) fs = 2 * Quantity ( fs , 'Hz' ) . value dpoles = ( 1 + poles / fs ) / ( 1 - poles / fs ) dzeros = ( 1 + zeros / fs ) / ( 1 - zeros / fs ) dzeros = numpy . concatenate ( ( dzeros , - numpy . ones ( len ( dpoles ) - len ( dzeros ) ) , ) ) dgain = gain * numpy . prod ( fs - zeros ) / numpy . prod ( fs - poles ) return dzeros , dpoles , dgain
Convert an analogue ZPK filter to digital using a bilinear transform
22,504
def parse_filter ( args , analog = False , sample_rate = None ) : if analog and not sample_rate : raise ValueError ( "Must give sample_rate frequency to convert " "analog filter to digital" ) if isinstance ( args , tuple ) and len ( args ) == 1 : args = args [ 0 ] if isinstance ( args , numpy . ndarray ) and args . ndim == 1 : b , a = args , [ 1. ] if analog : return 'ba' , signal . bilinear ( b , a ) return 'ba' , ( b , a ) if isinstance ( args , LinearTimeInvariant ) : lti = args elif ( isinstance ( args , numpy . ndarray ) and args . ndim == 2 and args . shape [ 1 ] == 6 ) : lti = signal . lti ( * signal . sos2zpk ( args ) ) else : lti = signal . lti ( * args ) try : lti = lti . to_zpk ( ) except AttributeError : pass if analog : return 'zpk' , bilinear_zpk ( lti . zeros , lti . poles , lti . gain , fs = sample_rate ) return 'zpk' , ( lti . zeros , lti . poles , lti . gain )
Parse arbitrary input args into a TF or ZPK filter definition
22,505
def lowpass ( frequency , sample_rate , fstop = None , gpass = 2 , gstop = 30 , type = 'iir' , ** kwargs ) : sample_rate = _as_float ( sample_rate ) frequency = _as_float ( frequency ) if fstop is None : fstop = min ( frequency * 1.5 , sample_rate / 2. ) if type == 'iir' : return _design_iir ( frequency , fstop , sample_rate , gpass , gstop , ** kwargs ) return _design_fir ( frequency , fstop , sample_rate , gpass , gstop , ** kwargs )
Design a low - pass filter for the given cutoff frequency
22,506
def highpass ( frequency , sample_rate , fstop = None , gpass = 2 , gstop = 30 , type = 'iir' , ** kwargs ) : sample_rate = _as_float ( sample_rate ) frequency = _as_float ( frequency ) if fstop is None : fstop = frequency * 2 / 3. if type == 'iir' : return _design_iir ( frequency , fstop , sample_rate , gpass , gstop , ** kwargs ) return _design_fir ( frequency , fstop , sample_rate , gpass , gstop , ** kwargs )
Design a high - pass filter for the given cutoff frequency
22,507
def bandpass ( flow , fhigh , sample_rate , fstop = None , gpass = 2 , gstop = 30 , type = 'iir' , ** kwargs ) : sample_rate = _as_float ( sample_rate ) flow = _as_float ( flow ) fhigh = _as_float ( fhigh ) if fstop is None : fstop = ( flow * 2 / 3. , min ( fhigh * 1.5 , sample_rate / 2. ) ) fstop = ( _as_float ( fstop [ 0 ] ) , _as_float ( fstop [ 1 ] ) ) if type == 'iir' : return _design_iir ( ( flow , fhigh ) , fstop , sample_rate , gpass , gstop , ** kwargs ) return _design_fir ( ( flow , fhigh ) , fstop , sample_rate , gpass , gstop , pass_zero = False , ** kwargs )
Design a band - pass filter for the given cutoff frequencies
22,508
def notch ( frequency , sample_rate , type = 'iir' , ** kwargs ) : frequency = Quantity ( frequency , 'Hz' ) . value sample_rate = Quantity ( sample_rate , 'Hz' ) . value nyq = 0.5 * sample_rate df = 1.0 df2 = 0.1 low1 = ( frequency - df ) / nyq high1 = ( frequency + df ) / nyq low2 = ( frequency - df2 ) / nyq high2 = ( frequency + df2 ) / nyq if type == 'iir' : kwargs . setdefault ( 'gpass' , 1 ) kwargs . setdefault ( 'gstop' , 10 ) kwargs . setdefault ( 'ftype' , 'ellip' ) return signal . iirdesign ( [ low1 , high1 ] , [ low2 , high2 ] , output = 'zpk' , ** kwargs ) else : raise NotImplementedError ( "Generating %r notch filters has not been " "implemented yet" % type )
Design a ZPK notch filter for the given frequency and sampling rate
22,509
def welch ( timeseries , segmentlength , noverlap = None , ** kwargs ) : freqs , psd_ = scipy . signal . welch ( timeseries . value , noverlap = noverlap , fs = timeseries . sample_rate . decompose ( ) . value , nperseg = segmentlength , ** kwargs ) unit = scale_timeseries_unit ( timeseries . unit , kwargs . get ( 'scaling' , 'density' ) , ) return FrequencySeries ( psd_ , unit = unit , frequencies = freqs , name = timeseries . name , epoch = timeseries . epoch , channel = timeseries . channel , )
Calculate a PSD of this TimeSeries using Welch s method .
22,510
def bartlett ( timeseries , segmentlength , ** kwargs ) : kwargs . pop ( 'noverlap' , None ) return welch ( timeseries , segmentlength , noverlap = 0 , ** kwargs )
Calculate a PSD using Bartlett s method
22,511
def median ( timeseries , segmentlength , ** kwargs ) : if scipy_version <= '1.1.9999' : raise ValueError ( "median average PSD estimation requires scipy >= 1.2.0" , ) kwargs . setdefault ( 'average' , 'median' ) return welch ( timeseries , segmentlength , ** kwargs )
Calculate a PSD using Welch s method with a median average
22,512
def rayleigh ( timeseries , segmentlength , noverlap = 0 ) : stepsize = segmentlength - noverlap if noverlap : numsegs = 1 + int ( ( timeseries . size - segmentlength ) / float ( noverlap ) ) else : numsegs = int ( timeseries . size // segmentlength ) tmpdata = numpy . ndarray ( ( numsegs , int ( segmentlength // 2 + 1 ) ) ) for i in range ( numsegs ) : tmpdata [ i , : ] = welch ( timeseries [ i * stepsize : i * stepsize + segmentlength ] , segmentlength ) std = tmpdata . std ( axis = 0 ) mean = tmpdata . mean ( axis = 0 ) return FrequencySeries ( std / mean , unit = '' , copy = False , f0 = 0 , epoch = timeseries . epoch , df = timeseries . sample_rate . value / segmentlength , channel = timeseries . channel , name = 'Rayleigh spectrum of %s' % timeseries . name )
Calculate a Rayleigh statistic spectrum
22,513
def csd ( timeseries , other , segmentlength , noverlap = None , ** kwargs ) : try : freqs , csd_ = scipy . signal . csd ( timeseries . value , other . value , noverlap = noverlap , fs = timeseries . sample_rate . decompose ( ) . value , nperseg = segmentlength , ** kwargs ) except AttributeError as exc : exc . args = ( '{}, scipy>=0.16 is required' . format ( str ( exc ) ) , ) raise unit = scale_timeseries_unit ( timeseries . unit , kwargs . get ( 'scaling' , 'density' ) ) return FrequencySeries ( csd_ , unit = unit , frequencies = freqs , name = str ( timeseries . name ) + '---' + str ( other . name ) , epoch = timeseries . epoch , channel = timeseries . channel )
Calculate the CSD of two TimeSeries using Welch s method
22,514
def duration ( self ) : return units . Quantity ( self . span [ 1 ] - self . span [ 0 ] , self . xunit , dtype = float )
Duration of this series in seconds
22,515
def read ( cls , source , * args , ** kwargs ) : from . io . core import read as timeseries_reader return timeseries_reader ( cls , source , * args , ** kwargs )
Read data into a TimeSeries
22,516
def fetch ( cls , channel , start , end , host = None , port = None , verbose = False , connection = None , verify = False , pad = None , allow_tape = None , scaled = None , type = None , dtype = None ) : return cls . DictClass . fetch ( [ channel ] , start , end , host = host , port = port , verbose = verbose , connection = connection , verify = verify , pad = pad , scaled = scaled , allow_tape = allow_tape , type = type , dtype = dtype ) [ str ( channel ) ]
Fetch data from NDS
22,517
def fetch_open_data ( cls , ifo , start , end , sample_rate = 4096 , tag = None , version = None , format = 'hdf5' , host = GWOSC_DEFAULT_HOST , verbose = False , cache = None , ** kwargs ) : from . io . losc import fetch_losc_data return fetch_losc_data ( ifo , start , end , sample_rate = sample_rate , tag = tag , version = version , format = format , verbose = verbose , cache = cache , host = host , cls = cls , ** kwargs )
Fetch open - access data from the LIGO Open Science Center
22,518
def find ( cls , channel , start , end , frametype = None , pad = None , scaled = None , dtype = None , nproc = 1 , verbose = False , ** readargs ) : return cls . DictClass . find ( [ channel ] , start , end , frametype = frametype , verbose = verbose , pad = pad , scaled = scaled , dtype = dtype , nproc = nproc , ** readargs ) [ str ( channel ) ]
Find and read data from frames for a channel
22,519
def plot ( self , method = 'plot' , figsize = ( 12 , 4 ) , xscale = 'auto-gps' , ** kwargs ) : kwargs . update ( figsize = figsize , xscale = xscale ) return super ( TimeSeriesBase , self ) . plot ( method = method , ** kwargs )
Plot the data for this timeseries
22,520
def from_nds2_buffer ( cls , buffer_ , scaled = None , copy = True , ** metadata ) : channel = Channel . from_nds2 ( buffer_ . channel ) metadata . setdefault ( 'channel' , channel ) metadata . setdefault ( 'epoch' , LIGOTimeGPS ( buffer_ . gps_seconds , buffer_ . gps_nanoseconds ) ) metadata . setdefault ( 'sample_rate' , channel . sample_rate ) metadata . setdefault ( 'unit' , channel . unit ) metadata . setdefault ( 'name' , buffer_ . name ) scaled = _dynamic_scaled ( scaled , channel . name ) slope = buffer_ . signal_slope offset = buffer_ . signal_offset null_scaling = slope == 1. and offset == 0. if scaled and not null_scaling : data = buffer_ . data . copy ( ) * slope + offset copy = False else : data = buffer_ . data return cls ( data , copy = copy , ** metadata )
Construct a new series from an nds2 . buffer object
22,521
def from_lal ( cls , lalts , copy = True ) : from . . utils . lal import from_lal_unit try : unit = from_lal_unit ( lalts . sampleUnits ) except ( TypeError , ValueError ) as exc : warnings . warn ( "%s, defaulting to 'dimensionless'" % str ( exc ) ) unit = None channel = Channel ( lalts . name , sample_rate = 1 / lalts . deltaT , unit = unit , dtype = lalts . data . data . dtype ) out = cls ( lalts . data . data , channel = channel , t0 = lalts . epoch , dt = lalts . deltaT , unit = unit , name = lalts . name , copy = False ) if copy : return out . copy ( ) return out
Generate a new TimeSeries from a LAL TimeSeries of any type .
22,522
def to_lal ( self ) : import lal from . . utils . lal import ( find_typed_function , to_lal_unit ) try : unit = to_lal_unit ( self . unit ) except ValueError as e : warnings . warn ( "%s, defaulting to lal.DimensionlessUnit" % str ( e ) ) unit = lal . DimensionlessUnit create = find_typed_function ( self . dtype , 'Create' , 'TimeSeries' ) lalts = create ( self . name , lal . LIGOTimeGPS ( self . epoch . gps ) , 0 , self . dt . value , unit , self . shape [ 0 ] ) lalts . data . data = self . value return lalts
Convert this TimeSeries into a LAL TimeSeries .
22,523
def from_pycbc ( cls , pycbcseries , copy = True ) : return cls ( pycbcseries . data , t0 = pycbcseries . start_time , dt = pycbcseries . delta_t , copy = copy )
Convert a pycbc . types . timeseries . TimeSeries into a TimeSeries
22,524
def to_pycbc ( self , copy = True ) : from pycbc import types return types . TimeSeries ( self . value , delta_t = self . dt . to ( 's' ) . value , epoch = self . epoch . gps , copy = copy )
Convert this TimeSeries into a PyCBC ~pycbc . types . timeseries . TimeSeries
22,525
def coalesce ( self ) : self . sort ( key = lambda ts : ts . t0 . value ) i = j = 0 N = len ( self ) while j < N : this = self [ j ] j += 1 if j < N and this . is_contiguous ( self [ j ] ) == 1 : while j < N and this . is_contiguous ( self [ j ] ) : try : this = self [ i ] = this . append ( self [ j ] ) except ValueError as exc : if 'cannot resize this array' in str ( exc ) : this = this . copy ( ) this = self [ i ] = this . append ( self [ j ] ) else : raise j += 1 else : self [ i ] = this i += 1 del self [ i : ] return self
Merge contiguous elements of this list into single objects
22,526
def join ( self , pad = None , gap = None ) : if not self : return self . EntryClass ( numpy . empty ( ( 0 , ) * self . EntryClass . _ndim ) ) self . sort ( key = lambda t : t . epoch . gps ) out = self [ 0 ] . copy ( ) for series in self [ 1 : ] : out . append ( series , gap = gap , pad = pad ) return out
Concatenate all of the elements of this list into a single object
22,527
def copy ( self ) : out = type ( self ) ( ) for series in self : out . append ( series . copy ( ) ) return out
Return a copy of this list with each element copied to new memory
22,528
def to_lal_type_str ( pytype ) : if pytype in LAL_TYPE_FROM_STR : return pytype if pytype in LAL_TYPE_STR : return LAL_TYPE_STR [ pytype ] try : dtype = numpy . dtype ( pytype ) return LAL_TYPE_STR_FROM_NUMPY [ dtype . type ] except ( TypeError , KeyError ) : raise ValueError ( "Failed to map {!r} to LAL type string" )
Convert the input python type to a LAL type string
22,529
def find_typed_function ( pytype , prefix , suffix , module = lal ) : laltype = to_lal_type_str ( pytype ) return getattr ( module , '{0}{1}{2}' . format ( prefix , laltype , suffix ) )
Returns the lal method for the correct type
22,530
def to_lal_unit ( aunit ) : if isinstance ( aunit , string_types ) : aunit = units . Unit ( aunit ) aunit = aunit . decompose ( ) lunit = lal . Unit ( ) for base , power in zip ( aunit . bases , aunit . powers ) : try : lalbase = LAL_UNIT_FROM_ASTROPY [ base ] except KeyError : lalbase = None for eqbase in base . find_equivalent_units ( ) : try : lalbase = LAL_UNIT_FROM_ASTROPY [ eqbase ] except KeyError : continue if lalbase is None : raise ValueError ( "LAL has no unit corresponding to %r" % base ) lunit *= lalbase ** power return lunit
Convert the input unit into a LALUnit
22,531
def from_lal_unit ( lunit ) : return reduce ( operator . mul , ( units . Unit ( str ( LAL_UNIT_INDEX [ i ] ) ) ** exp for i , exp in enumerate ( lunit . unitNumerator ) ) )
Convert a LALUnit into a ~astropy . units . Unit
22,532
def to_lal_ligotimegps ( gps ) : gps = to_gps ( gps ) return lal . LIGOTimeGPS ( gps . gpsSeconds , gps . gpsNanoSeconds )
Convert the given GPS time to a lal . LIGOTimeGPS object
22,533
def _get_property_columns ( tabletype , columns ) : from ligo . lw . lsctables import gpsproperty as GpsProperty rowvars = vars ( tabletype . RowType ) extracols = { } for key in columns : prop = rowvars [ key ] if isinstance ( prop , GpsProperty ) : extracols [ key ] = ( prop . s_name , prop . ns_name ) return extracols
Returns list of GPS columns required to read gpsproperties for a table
22,534
def _get_column_dtype ( llwcol ) : try : dtype = llwcol . dtype if dtype is numpy . dtype ( 'O' ) : raise AttributeError return dtype except AttributeError : try : llwtype = llwcol . parentNode . validcolumns [ llwcol . Name ] except AttributeError : try : return type ( llwcol [ 0 ] ) except IndexError : return None else : from ligo . lw . types import ( ToPyType , ToNumPyType ) try : return ToNumPyType [ llwtype ] except KeyError : return ToPyType [ llwtype ]
Get the data type of a LIGO_LW Column
22,535
def read_table ( source , tablename = None , ** kwargs ) : from ligo . lw import table as ligolw_table from ligo . lw . lsctables import TableByName read_kw = kwargs convert_kw = { 'rename' : None , 'use_numpy_dtypes' : False , } for key in filter ( kwargs . __contains__ , convert_kw ) : convert_kw [ key ] = kwargs . pop ( key ) if convert_kw [ 'rename' ] is None : convert_kw [ 'rename' ] = { } try : columns = list ( kwargs . pop ( 'columns' ) ) except KeyError : columns = None try : read_kw [ 'columns' ] = list ( kwargs . pop ( 'ligolw_columns' ) ) except KeyError : read_kw [ 'columns' ] = columns convert_kw [ 'columns' ] = columns or read_kw [ 'columns' ] if tablename : tableclass = TableByName [ ligolw_table . Table . TableName ( tablename ) ] if convert_kw [ 'columns' ] is not None : readcols = set ( read_kw [ 'columns' ] ) propcols = _get_property_columns ( tableclass , convert_kw [ 'columns' ] ) for col in propcols : try : readcols . remove ( col ) except KeyError : continue readcols . update ( propcols [ col ] ) read_kw [ 'columns' ] = list ( readcols ) return Table ( read_ligolw_table ( source , tablename = tablename , ** read_kw ) , ** convert_kw )
Read a Table from one or more LIGO_LW XML documents
22,536
def write_table ( table , target , tablename = None , ilwdchar_compat = None , ** kwargs ) : if tablename is None : tablename = table . meta . get ( 'tablename' , None ) if tablename is None : raise ValueError ( "please pass ``tablename=`` to specify the target " "LIGO_LW Table Name" ) try : llwtable = table_to_ligolw ( table , tablename , ilwdchar_compat = ilwdchar_compat or False , ) except LigolwElementError as exc : if ilwdchar_compat is not None : raise try : llwtable = table_to_ligolw ( table , tablename , ilwdchar_compat = True ) except Exception : raise exc return write_ligolw_tables ( target , [ llwtable ] , ** kwargs )
Write a ~astropy . table . Table to file in LIGO_LW XML format
22,537
def read_ascii_series ( input_ , array_type = Series , unpack = True , ** kwargs ) : xarr , yarr = loadtxt ( input_ , unpack = unpack , ** kwargs ) return array_type ( yarr , xindex = xarr )
Read a Series from an ASCII file
22,538
def write_ascii_series ( series , output , ** kwargs ) : xarr = series . xindex . value yarr = series . value return savetxt ( output , column_stack ( ( xarr , yarr ) ) , ** kwargs )
Write a Series to a file in ASCII format
22,539
def channel_dict_kwarg ( value , channels , types = None , astype = None ) : if types is not None and isinstance ( value , tuple ( types ) ) : out = dict ( ( c , value ) for c in channels ) elif isinstance ( value , ( tuple , list ) ) : out = dict ( zip ( channels , value ) ) elif value is None : out = dict ( ) elif isinstance ( value , dict ) : out = value . copy ( ) else : return None if astype is not None : return dict ( ( key , astype ( out [ key ] ) ) for key in out ) return out
Format the given kwarg value in a dict with one value per channel
22,540
def import_gwf_library ( library , package = __package__ ) : try : return importlib . import_module ( '.%s' % library , package = package ) except ImportError as exc : exc . args = ( 'Cannot import %s frame API: %s' % ( library , str ( exc ) ) , ) raise
Utility method to import the relevant timeseries . io . gwf frame API
22,541
def get_default_gwf_api ( ) : for lib in APIS : try : import_gwf_library ( lib ) except ImportError : continue else : return lib raise ImportError ( "no GWF API available, please install a third-party GWF " "library ({}) and try again" . format ( ', ' . join ( APIS ) ) )
Return the preferred GWF library
22,542
def print_verbose ( * args , ** kwargs ) : if kwargs . pop ( 'verbose' , False ) is True : gprint ( * args , ** kwargs )
Utility to print something only if verbose = True is given
22,543
def set_parameter ( connection , parameter , value , verbose = False ) : value = str ( value ) try : if not connection . set_parameter ( parameter , value ) : raise ValueError ( "invalid parameter or value" ) except ( AttributeError , ValueError ) as exc : warnings . warn ( 'failed to set {}={!r}: {}' . format ( parameter , value , str ( exc ) ) , io_nds2 . NDSWarning ) else : print_verbose ( ' [{}] set {}={!r}' . format ( connection . get_host ( ) , parameter , value ) , verbose = verbose , )
Set a parameter for the connection handling errors as warnings
22,544
def _pad_series ( ts , pad , start , end ) : span = ts . span pada = max ( int ( ( span [ 0 ] - start ) * ts . sample_rate . value ) , 0 ) padb = max ( int ( ( end - span [ 1 ] ) * ts . sample_rate . value ) , 0 ) if pada or padb : return ts . pad ( ( pada , padb ) , mode = 'constant' , constant_values = ( pad , ) ) return ts
Pad a timeseries to match the specified [ start end ) limits
22,545
def _create_series ( ndschan , value , start , end , series_class = TimeSeries ) : channel = Channel . from_nds2 ( ndschan ) nsamp = int ( ( end - start ) * channel . sample_rate . value ) return series_class ( numpy_ones ( nsamp ) * value , t0 = start , sample_rate = channel . sample_rate , unit = channel . unit , channel = channel )
Create a timeseries to cover the specified [ start end ) limits
22,546
def _get_data_segments ( channels , start , end , connection ) : allsegs = io_nds2 . get_availability ( channels , start , end , connection = connection ) return allsegs . intersection ( allsegs . keys ( ) )
Get available data segments for the given channels
22,547
def in_git_clone ( ) : gitdir = '.git' return os . path . isdir ( gitdir ) and ( os . path . isdir ( os . path . join ( gitdir , 'objects' ) ) and os . path . isdir ( os . path . join ( gitdir , 'refs' ) ) and os . path . exists ( os . path . join ( gitdir , 'HEAD' ) ) )
Returns True if the current directory is a git repository
22,548
def reuse_dist_file ( filename ) : if not os . path . isfile ( filename ) : return False try : import git except ImportError : return True else : try : git . Repo ( ) . tags except ( TypeError , git . GitError ) : return True else : return False
Returns True if a distribution file can be reused
22,549
def get_gitpython_version ( ) : if not in_git_clone ( ) : return 'GitPython' try : gitv = subprocess . check_output ( 'git --version' , shell = True ) except ( OSError , IOError , subprocess . CalledProcessError ) : git_version = '0.0.0' else : if isinstance ( gitv , bytes ) : gitv = gitv . decode ( 'utf-8' ) git_version = gitv . strip ( ) . split ( ) [ 2 ] if LooseVersion ( git_version ) >= '2.15' : return 'GitPython>=2.1.8' return 'GitPython'
Determine the required version of GitPython
22,550
def get_setup_requires ( ) : if { '--help' , '--help-commands' } . intersection ( sys . argv ) : return list ( ) reqlist = [ ] for cmd , dependencies in SETUP_REQUIRES . items ( ) : if cmd in sys . argv : reqlist . extend ( dependencies ) return reqlist
Return the list of packages required for this setup . py run
22,551
def get_scripts ( scripts_dir = 'bin' ) : scripts = [ ] for ( dirname , _ , filenames ) in os . walk ( scripts_dir ) : scripts . extend ( [ os . path . join ( dirname , fn ) for fn in filenames ] ) return scripts
Get relative file paths for all files under the scripts_dir
22,552
def _parse_years ( years ) : result = [ ] for part in years . split ( ',' ) : if '-' in part : a , b = part . split ( '-' ) a , b = int ( a ) , int ( b ) result . extend ( range ( a , b + 1 ) ) else : a = int ( part ) result . append ( a ) return result
Parse string of ints include ranges into a list of int
22,553
def _format_years ( years ) : def sub ( x ) : return x [ 1 ] - x [ 0 ] ranges = [ ] for k , iterable in groupby ( enumerate ( sorted ( years ) ) , sub ) : rng = list ( iterable ) if len ( rng ) == 1 : s = str ( rng [ 0 ] [ 1 ] ) else : s = "{}-{}" . format ( rng [ 0 ] [ 1 ] , rng [ - 1 ] [ 1 ] ) ranges . append ( s ) return ", " . join ( ranges )
Format a list of ints into a string including ranges
22,554
def update_copyright ( path , year ) : with open ( path , "r" ) as fobj : text = fobj . read ( ) . rstrip ( ) match = COPYRIGHT_REGEX . search ( text ) x = match . start ( "years" ) y = match . end ( "years" ) if text [ y - 1 ] == " " : y -= 1 yearstr = match . group ( "years" ) years = set ( _parse_years ( yearstr ) ) | { year } with open ( path , "w" ) as fobj : print ( text [ : x ] + _format_years ( years ) + text [ y : ] , file = fobj )
Update a file s copyright statement to include the given year
22,555
def percentile ( self , percentile ) : rows , columns = self . shape out = numpy . zeros ( rows ) for i in range ( rows ) : cumsumvals = numpy . cumsum ( self . value [ i , : ] ) abs_cumsumvals_minus_percentile = numpy . abs ( cumsumvals - percentile ) minindex = abs_cumsumvals_minus_percentile . argmin ( ) val = self . bins [ minindex ] out [ i ] = val name = '%s %s%% percentile' % ( self . name , percentile ) return FrequencySeries ( out , epoch = self . epoch , channel = self . channel , frequencies = self . bins [ : - 1 ] , name = name )
Calculate a given spectral percentile for this SpectralVariance
22,556
def ndstype ( self ) : if self . type is not None : return io_nds2 . Nds2ChannelType . find ( self . type ) . value
NDS type integer for this channel .
22,557
def ndsname ( self ) : if self . type not in [ None , 'raw' , 'reduced' , 'online' ] : return '%s,%s' % ( self . name , self . type ) return self . name
Name of this channel as stored in the NDS database
22,558
def query ( cls , name , use_kerberos = None , debug = False ) : channellist = ChannelList . query ( name , use_kerberos = use_kerberos , debug = debug ) if not channellist : raise ValueError ( "No channels found matching '%s'" % name ) if len ( channellist ) > 1 : raise ValueError ( "%d channels found matching '%s', please refine " "search, or use `ChannelList.query` to return " "all results" % ( len ( channellist ) , name ) ) return channellist [ 0 ]
Query the LIGO Channel Information System for the Channel matching the given name
22,559
def from_nds2 ( cls , nds2channel ) : name = nds2channel . name sample_rate = nds2channel . sample_rate unit = nds2channel . signal_units if not unit : unit = None ctype = nds2channel . channel_type_to_string ( nds2channel . channel_type ) dtype = { nds2channel . DATA_TYPE_INT16 : numpy . int16 , nds2channel . DATA_TYPE_INT32 : numpy . int32 , nds2channel . DATA_TYPE_INT64 : numpy . int64 , nds2channel . DATA_TYPE_FLOAT32 : numpy . float32 , nds2channel . DATA_TYPE_FLOAT64 : numpy . float64 , nds2channel . DATA_TYPE_COMPLEX32 : numpy . complex64 , } . get ( nds2channel . data_type ) return cls ( name , sample_rate = sample_rate , unit = unit , dtype = dtype , type = ctype )
Generate a new channel using an existing nds2 . channel object
22,560
def parse_channel_name ( cls , name , strict = True ) : match = cls . MATCH . search ( name ) if match is None or ( strict and ( match . start ( ) != 0 or match . end ( ) != len ( name ) ) ) : raise ValueError ( "Cannot parse channel name according to LIGO " "channel-naming convention T990033" ) return match . groupdict ( )
Decompose a channel name string into its components
22,561
def copy ( self ) : new = type ( self ) ( str ( self ) ) new . _init_from_channel ( self ) return new
Returns a copy of this channel
22,562
def from_names ( cls , * names ) : new = cls ( ) for namestr in names : for name in cls . _split_names ( namestr ) : new . append ( Channel ( name ) ) return new
Create a new ChannelList from a list of names
22,563
def _split_names ( namestr ) : out = [ ] namestr = QUOTE_REGEX . sub ( '' , namestr ) while True : namestr = namestr . strip ( '\' \n' ) if ',' not in namestr : break for nds2type in io_nds2 . Nds2ChannelType . names ( ) + [ '' ] : if nds2type and ',%s' % nds2type in namestr : try : channel , ctype , namestr = namestr . split ( ',' , 2 ) except ValueError : channel , ctype = namestr . split ( ',' ) namestr = '' out . append ( '%s,%s' % ( channel , ctype ) ) break elif nds2type == '' and ',' in namestr : channel , namestr = namestr . split ( ',' , 1 ) out . append ( channel ) break if namestr : out . append ( namestr ) return out
Split a comma - separated list of channel names .
22,564
def find ( self , name ) : for i , chan in enumerate ( self ) : if name == chan . name : return i raise ValueError ( name )
Find the Channel with a specific name in this ChannelList .
22,565
def query ( cls , name , use_kerberos = None , debug = False ) : from . io import cis return cis . query ( name , use_kerberos = use_kerberos , debug = debug )
Query the LIGO Channel Information System a ChannelList .
22,566
def query_nds2_availability ( cls , channels , start , end , ctype = 126 , connection = None , host = None , port = None ) : start = int ( to_gps ( start ) ) end = int ( ceil ( to_gps ( end ) ) ) chans = io_nds2 . find_channels ( channels , connection = connection , unique = True , epoch = ( start , end ) , type = ctype ) availability = io_nds2 . get_availability ( chans , start , end , connection = connection ) return type ( availability ) ( zip ( channels , availability . values ( ) ) )
Query for when data are available for these channels in NDS2
22,567
def get_gravityspy_triggers ( tablename , engine = None , ** kwargs ) : from sqlalchemy . engine import create_engine from sqlalchemy . exc import ProgrammingError if engine is None : conn_kw = { } for key in ( 'db' , 'host' , 'user' , 'passwd' ) : try : conn_kw [ key ] = kwargs . pop ( key ) except KeyError : pass engine = create_engine ( get_connection_str ( ** conn_kw ) ) try : return GravitySpyTable ( fetch ( engine , tablename , ** kwargs ) ) except ProgrammingError as exc : if 'relation "%s" does not exist' % tablename in str ( exc ) : msg = exc . args [ 0 ] msg = msg . replace ( 'does not exist' , 'does not exist, the following tablenames are ' 'acceptable:\n %s\n' % '\n ' . join ( engine . table_names ( ) ) ) exc . args = ( msg , ) raise
Fetch data into an GravitySpyTable
22,568
def get_connection_str ( db = 'gravityspy' , host = 'gravityspy.ciera.northwestern.edu' , user = None , passwd = None ) : if ( not user ) or ( not passwd ) : user = os . getenv ( 'GRAVITYSPY_DATABASE_USER' , None ) passwd = os . getenv ( 'GRAVITYSPY_DATABASE_PASSWD' , None ) if ( not user ) or ( not passwd ) : raise ValueError ( 'Remember to either pass ' 'or export GRAVITYSPY_DATABASE_USER ' 'and export GRAVITYSPY_DATABASE_PASSWD in order ' 'to access the Gravity Spy Data: ' 'https://secrets.ligo.org/secrets/144/' ' description is username and secret is password.' ) return 'postgresql://{0}:{1}@{2}:5432/{3}' . format ( user , passwd , host , db )
Create string to pass to create_engine
22,569
def get_timezone_offset ( ifo , dt = None ) : import pytz dt = dt or datetime . datetime . now ( ) offset = pytz . timezone ( get_timezone ( ifo ) ) . utcoffset ( dt ) return offset . days * 86400 + offset . seconds + offset . microseconds * 1e-6
Return the offset in seconds between UTC and the given interferometer
22,570
def normalize_fft_params ( series , kwargs = None , func = None ) : if kwargs is None : kwargs = dict ( ) samp = series . sample_rate fftlength = kwargs . pop ( 'fftlength' , None ) or series . duration overlap = kwargs . pop ( 'overlap' , None ) window = kwargs . pop ( 'window' , None ) if func is None : method = library = None else : method = func . __name__ library = _fft_library ( func ) nfft = seconds_to_samples ( fftlength , samp ) noverlap = _normalize_overlap ( overlap , window , nfft , samp , method = method ) window = _normalize_window ( window , nfft , library , series . dtype ) if window is not None : kwargs [ 'window' ] = window if library == 'lal' and kwargs . get ( 'plan' , None ) is None : from . _lal import generate_fft_plan kwargs [ 'plan' ] = generate_fft_plan ( nfft , dtype = series . dtype ) kwargs . update ( { 'nfft' : nfft , 'noverlap' : noverlap , } ) return kwargs
Normalize a set of FFT parameters for processing
22,571
def _normalize_overlap ( overlap , window , nfft , samp , method = 'welch' ) : if method == 'bartlett' : return 0 if overlap is None and isinstance ( window , string_types ) : return recommended_overlap ( window , nfft ) if overlap is None : return 0 return seconds_to_samples ( overlap , samp )
Normalise an overlap in physical units to a number of samples
22,572
def _normalize_window ( window , nfft , library , dtype ) : if library == '_lal' and isinstance ( window , numpy . ndarray ) : from . _lal import window_from_array return window_from_array ( window ) if library == '_lal' : from . _lal import generate_window return generate_window ( nfft , window = window , dtype = dtype ) if isinstance ( window , string_types ) : window = canonical_name ( window ) if isinstance ( window , string_types + ( tuple , ) ) : return get_window ( window , nfft ) return None
Normalise a window specification for a PSD calculation
22,573
def set_fft_params ( func ) : @ wraps ( func ) def wrapped_func ( series , method_func , * args , ** kwargs ) : if isinstance ( series , tuple ) : data = series [ 0 ] else : data = series normalize_fft_params ( data , kwargs = kwargs , func = method_func ) return func ( series , method_func , * args , ** kwargs ) return wrapped_func
Decorate a method to automatically convert quantities to samples
22,574
def psd ( timeseries , method_func , * args , ** kwargs ) : return _psdn ( timeseries , method_func , * args , ** kwargs )
Generate a PSD using a method function
22,575
def _psdn ( timeseries , method_func , * args , ** kwargs ) : try : timeseries , other = timeseries except ValueError : return method_func ( timeseries , kwargs . pop ( 'nfft' ) , * args , ** kwargs ) else : return method_func ( timeseries , other , kwargs . pop ( 'nfft' ) , * args , ** kwargs )
Generate a PSD using a method function with FFT arguments in samples
22,576
def average_spectrogram ( timeseries , method_func , stride , * args , ** kwargs ) : try : timeseries , other = timeseries except ValueError : timeseries = timeseries other = None from . . . spectrogram import Spectrogram nproc = kwargs . pop ( 'nproc' , 1 ) epoch = timeseries . t0 . value nstride = seconds_to_samples ( stride , timeseries . sample_rate ) kwargs [ 'fftlength' ] = kwargs . pop ( 'fftlength' , stride ) or stride normalize_fft_params ( timeseries , kwargs = kwargs , func = method_func ) nfft = kwargs [ 'nfft' ] noverlap = kwargs [ 'noverlap' ] if nstride > timeseries . size : raise ValueError ( "stride cannot be greater than the duration of " "this TimeSeries" ) if nfft > nstride : raise ValueError ( "fftlength cannot be greater than stride" ) if noverlap >= nfft : raise ValueError ( "overlap must be less than fftlength" ) def _psd ( series ) : psd_ = _psdn ( series , method_func , * args , ** kwargs ) del psd_ . epoch return psd_ tschunks = _chunk_timeseries ( timeseries , nstride , noverlap ) if other is not None : otherchunks = _chunk_timeseries ( other , nstride , noverlap ) tschunks = zip ( tschunks , otherchunks ) psds = mp_utils . multiprocess_with_queues ( nproc , _psd , tschunks ) return Spectrogram . from_spectra ( * psds , epoch = epoch , dt = stride )
Generate an average spectrogram using a method function
22,577
def spectrogram ( timeseries , method_func , ** kwargs ) : from . . . spectrogram import Spectrogram sampling = timeseries . sample_rate . to ( 'Hz' ) . value nproc = kwargs . pop ( 'nproc' , 1 ) nfft = kwargs . pop ( 'nfft' ) noverlap = kwargs . pop ( 'noverlap' ) nstride = nfft - noverlap if noverlap >= nfft : raise ValueError ( "overlap must be less than fftlength" ) def _psd ( series ) : return method_func ( series , nfft = nfft , ** kwargs ) [ 1 ] chunks = [ ] x = 0 while x + nfft <= timeseries . size : y = min ( timeseries . size , x + nfft ) chunks . append ( ( x , y ) ) x += nstride tschunks = ( timeseries . value [ i : j ] for i , j in chunks ) psds = mp_utils . multiprocess_with_queues ( nproc , _psd , tschunks ) numtimes = 1 + int ( ( timeseries . size - nstride ) / nstride ) numfreqs = int ( nfft / 2 + 1 ) data = numpy . zeros ( ( numtimes , numfreqs ) , dtype = timeseries . dtype ) data [ : len ( psds ) ] = psds unit = fft_utils . scale_timeseries_unit ( timeseries . unit , scaling = kwargs . get ( 'scaling' , 'density' ) ) out = Spectrogram ( numpy . empty ( ( numtimes , numfreqs ) , dtype = timeseries . dtype ) , copy = False , dt = nstride * timeseries . dt , t0 = timeseries . t0 , f0 = 0 , df = sampling / nfft , unit = unit , name = timeseries . name , channel = timeseries . channel ) density = nfft // nstride weights = get_window ( 'triangle' , density ) for i in range ( numtimes ) : x = max ( 0 , i + 1 - density ) y = min ( i + 1 , numtimes - density + 1 ) if x == 0 : wgt = weights [ - y : ] elif y == numtimes - density + 1 : wgt = weights [ : y - x ] else : wgt = weights out . value [ i , : ] = numpy . average ( data [ x : y ] , axis = 0 , weights = wgt ) return out
Generate a spectrogram using a method function
22,578
def get_color_label ( self ) : if self . args . norm : return 'Normalized to {}' . format ( self . args . norm ) if len ( self . units ) == 1 and self . usetex : return r'ASD $\left({0}\right)$' . format ( self . units [ 0 ] . to_string ( 'latex' ) . strip ( '$' ) ) elif len ( self . units ) == 1 : return 'ASD ({0})' . format ( self . units [ 0 ] . to_string ( 'generic' ) ) return super ( Spectrogram , self ) . get_color_label ( )
Text for colorbar label
22,579
def get_stride ( self ) : fftlength = float ( self . args . secpfft ) overlap = fftlength * self . args . overlap stride = fftlength - overlap nfft = self . duration / stride ffps = int ( nfft / ( self . width * 0.8 ) ) if ffps > 3 : return max ( 2 * fftlength , ffps * stride + fftlength - 1 ) return None
Calculate the stride for the spectrogram
22,580
def get_spectrogram ( self ) : args = self . args fftlength = float ( args . secpfft ) overlap = fftlength * args . overlap self . log ( 2 , "Calculating spectrogram secpfft: %s, overlap: %s" % ( fftlength , overlap ) ) stride = self . get_stride ( ) if stride : specgram = self . timeseries [ 0 ] . spectrogram ( stride , fftlength = fftlength , overlap = overlap , window = args . window ) nfft = stride * ( stride // ( fftlength - overlap ) ) self . log ( 3 , 'Spectrogram calc, stride: %s, fftlength: %s, ' 'overlap: %sf, #fft: %d' % ( stride , fftlength , overlap , nfft ) ) else : specgram = self . timeseries [ 0 ] . spectrogram2 ( fftlength = fftlength , overlap = overlap , window = args . window ) nfft = specgram . shape [ 0 ] self . log ( 3 , 'HR-Spectrogram calc, fftlength: %s, overlap: %s, ' '#fft: %d' % ( fftlength , overlap , nfft ) ) return specgram ** ( 1 / 2. )
Calculate the spectrogram to be plotted
22,581
def get_ylabel ( self ) : if len ( self . units ) == 1 : return r'ASD $\left({0}\right)$' . format ( self . units [ 0 ] . to_string ( 'latex' ) . strip ( '$' ) ) return 'ASD'
Text for y - axis label
22,582
def read ( cls , source , * args , ** kwargs ) : if io_cache . is_cache ( source ) : from . cache import preformat_cache source = preformat_cache ( source , * args [ 1 : ] , start = kwargs . get ( 'start' ) , end = kwargs . get ( 'end' ) ) pad = kwargs . pop ( 'pad' , None ) gap = kwargs . pop ( 'gap' , 'raise' if pad is None else 'pad' ) joiner = _join_factory ( cls , gap , pad ) return io_read_multi ( joiner , cls , source , * args , ** kwargs )
Read data from a source into a gwpy . timeseries object .
22,583
def _join_factory ( cls , gap , pad ) : if issubclass ( cls , dict ) : def _join ( data ) : out = cls ( ) data = list ( data ) while data : tsd = data . pop ( 0 ) out . append ( tsd , gap = gap , pad = pad ) del tsd return out else : from . . import TimeSeriesBaseList def _join ( arrays ) : list_ = TimeSeriesBaseList ( * arrays ) return list_ . join ( pad = pad , gap = gap ) return _join
Build a joiner for the given cls and the given padding options
22,584
def table_from_file ( source , ifo = None , columns = None , selection = None , loudest = False , extended_metadata = True ) : if isinstance ( source , h5py . File ) : source , ifo = _find_table_group ( source , ifo = ifo ) if columns is None : columns = list ( _get_columns ( source ) ) readcols = set ( columns ) selection = parse_column_filters ( selection or [ ] ) if selection : readcols . update ( list ( zip ( * selection ) ) [ 0 ] ) meta = { 'ifo' : ifo } meta . update ( source . attrs ) if extended_metadata : meta . update ( _get_extended_metadata ( source ) ) if loudest : loudidx = source [ 'loudest' ] [ : ] data = [ ] for name in readcols : try : arr = source [ name ] [ : ] except KeyError : if name in GET_COLUMN : arr = GET_COLUMN [ name ] ( source ) else : raise if loudest : arr = arr [ loudidx ] data . append ( Table . Column ( arr , name = name ) ) return filter_table ( Table ( data , meta = meta ) , selection ) [ columns ]
Read a Table from a PyCBC live HDF5 file
22,585
def _find_table_group ( h5file , ifo = None ) : exclude = ( 'background' , ) if ifo is None : try : ifo , = [ key for key in h5file if key not in exclude ] except ValueError as exc : exc . args = ( "PyCBC live HDF5 file contains dataset groups " "for multiple interferometers, please specify " "the prefix of the relevant interferometer via " "the `ifo` keyword argument, e.g: `ifo=G1`" , ) raise try : return h5file [ ifo ] , ifo except KeyError as exc : exc . args = ( "No group for ifo %r in PyCBC live HDF5 file" % ifo , ) raise
Find the right h5py . Group within the given h5py . File
22,586
def _get_columns ( h5group ) : columns = set ( ) for name in sorted ( h5group ) : if ( not isinstance ( h5group [ name ] , h5py . Dataset ) or name == 'template_boundaries' ) : continue if name . endswith ( '_template' ) and name [ : - 9 ] in columns : continue columns . add ( name ) return columns - META_COLUMNS
Find valid column names from a PyCBC HDF5 Group
22,587
def _get_extended_metadata ( h5group ) : meta = dict ( ) try : psd = h5group [ 'psd' ] except KeyError : pass else : from gwpy . frequencyseries import FrequencySeries meta [ 'psd' ] = FrequencySeries ( psd [ : ] , f0 = 0 , df = psd . attrs [ 'delta_f' ] , name = 'pycbc_live' ) for key in META_COLUMNS - { 'psd' } : try : value = h5group [ key ] [ : ] except KeyError : pass else : meta [ key ] = value return meta
Extract the extended metadata for a PyCBC table in HDF5
22,588
def filter_empty_files ( files , ifo = None ) : return type ( files ) ( [ f for f in files if not empty_hdf5_file ( f , ifo = ifo ) ] )
Remove empty PyCBC - HDF5 files from a list
22,589
def empty_hdf5_file ( h5f , ifo = None ) : h5f = h5f . file if list ( h5f ) == [ ] : return True if ifo is not None and ( ifo not in h5f or list ( h5f [ ifo ] ) == [ 'psd' ] ) : return True return False
Determine whether PyCBC - HDF5 file is empty
22,590
def identify_pycbc_live ( origin , filepath , fileobj , * args , ** kwargs ) : if identify_hdf5 ( origin , filepath , fileobj , * args , ** kwargs ) and ( filepath is not None and PYCBC_FILENAME . match ( basename ( filepath ) ) ) : return True return False
Identify a PyCBC Live file as an HDF5 with the correct name
22,591
def get_new_snr ( h5group , q = 6. , n = 2. ) : newsnr = h5group [ 'snr' ] [ : ] . copy ( ) rchisq = h5group [ 'chisq' ] [ : ] idx = numpy . where ( rchisq > 1. ) [ 0 ] newsnr [ idx ] *= _new_snr_scale ( rchisq [ idx ] , q = q , n = n ) return newsnr
Calculate the new SNR column for this PyCBC HDF5 table group
22,592
def get_mchirp ( h5group ) : mass1 = h5group [ 'mass1' ] [ : ] mass2 = h5group [ 'mass2' ] [ : ] return ( mass1 * mass2 ) ** ( 3 / 5. ) / ( mass1 + mass2 ) ** ( 1 / 5. )
Calculate the chipr mass column for this PyCBC HDF5 table group
22,593
def format_nd_slice ( item , ndim ) : if not isinstance ( item , tuple ) : item = ( item , ) return item [ : ndim ] + ( None , ) * ( ndim - len ( item ) )
Preformat a getitem argument as an N - tuple
22,594
def slice_axis_attributes ( old , oldaxis , new , newaxis , slice_ ) : slice_ = as_slice ( slice_ ) index = '{}index' . format origin = '{}0' . format delta = 'd{}' . format if hasattr ( old , '_{}index' . format ( oldaxis ) ) : setattr ( new , index ( newaxis ) , getattr ( old , index ( oldaxis ) ) [ slice_ ] ) elif isinstance ( slice_ , slice ) or not numpy . sum ( slice_ ) : if isinstance ( slice_ , slice ) : offset = slice_ . start or 0 step = slice_ . step or 1 else : offset = 0 step = 1 dx = getattr ( old , delta ( oldaxis ) ) x0 = getattr ( old , origin ( oldaxis ) ) setattr ( new , origin ( newaxis ) , x0 + offset * dx ) setattr ( new , delta ( newaxis ) , dx * step ) else : setattr ( new , index ( newaxis ) , getattr ( old , index ( oldaxis ) ) [ slice_ ] ) return new
Set axis metadata for new by slicing an axis of old
22,595
def null_slice ( slice_ ) : try : slice_ = as_slice ( slice_ ) except TypeError : return False if isinstance ( slice_ , numpy . ndarray ) and numpy . all ( slice_ ) : return True if isinstance ( slice_ , slice ) and slice_ in ( slice ( None , None , None ) , slice ( 0 , None , 1 ) ) : return True
Returns True if a slice will have no affect
22,596
def as_slice ( slice_ ) : if isinstance ( slice_ , ( Integral , numpy . integer , type ( None ) ) ) : return slice ( 0 , None , 1 ) if isinstance ( slice_ , ( slice , numpy . ndarray ) ) : return slice_ if isinstance ( slice_ , ( list , tuple ) ) : return tuple ( map ( as_slice , slice_ ) ) raise TypeError ( "Cannot format {!r} as slice" . format ( slice_ ) )
Convert an object to a slice if possible
22,597
def query ( name , use_kerberos = None , debug = False ) : url = '%s/?q=%s' % ( CIS_API_URL , name ) more = True out = ChannelList ( ) while more : reply = _get ( url , use_kerberos = use_kerberos , debug = debug ) try : out . extend ( map ( parse_json , reply [ u'results' ] ) ) except KeyError : pass except TypeError : out . extend ( map ( parse_json , reply ) ) break more = 'next' in reply and reply [ 'next' ] is not None if more : url = reply [ 'next' ] else : break out . sort ( key = lambda c : c . name ) return out
Query the Channel Information System for details on the given channel name
22,598
def _get ( url , use_kerberos = None , debug = False ) : from ligo . org import request try : response = request ( url , debug = debug , use_kerberos = use_kerberos ) except HTTPError : raise ValueError ( "Channel not found at URL %s " "Information System. Please double check the " "name and try again." % url ) if isinstance ( response , bytes ) : response = response . decode ( 'utf-8' ) return json . loads ( response )
Perform a GET query against the CIS
22,599
def parse_json ( data ) : sample_rate = data [ 'datarate' ] unit = data [ 'units' ] dtype = CIS_DATA_TYPE [ data [ 'datatype' ] ] model = data [ 'source' ] url = data [ 'displayurl' ] return Channel ( data [ 'name' ] , sample_rate = sample_rate , unit = unit , dtype = dtype , model = model , url = url )
Parse the input data dict into a Channel .