idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
22,300
def derive_signature ( key , qs ) : key , qs = ( key or "" , qs or "" ) return hmac . new ( key . encode ( ) , qs . encode ( ) , hashlib . sha1 ) . hexdigest ( )
Derives the signature from the supplied query string using the key .
22,301
def sign ( key , qs ) : sig = derive_signature ( key , qs ) return "%s&%s" % ( qs , urlencode ( [ ( "sig" , sig ) ] ) )
Signs the query string using the key .
22,302
def verify_signature ( key , qs ) : unsigned_qs = re . sub ( r'&?sig=[^&]*' , '' , qs ) sig = derive_signature ( key , unsigned_qs ) return urlparse . parse_qs ( qs ) . get ( "sig" , [ None ] ) [ 0 ] == sig
Verifies that the signature in the query string is correct .
22,303
def canonical_name ( name ) : if name . lower ( ) == 'planck' : return 'planck' try : return scipy_windows . _win_equiv [ name . lower ( ) ] . __name__ except AttributeError : try : return getattr ( scipy_windows , name . lower ( ) ) . __name__ except AttributeError : pass except KeyError : pass raise ValueError ( 'no window function in scipy.signal equivalent to %r' % name , )
Find the canonical name for the given window in scipy . signal
22,304
def recommended_overlap ( name , nfft = None ) : try : name = canonical_name ( name ) except KeyError as exc : raise ValueError ( str ( exc ) ) try : rov = ROV [ name ] except KeyError : raise ValueError ( "no recommended overlap for %r window" % name ) if nfft : return int ( ceil ( nfft * rov ) ) return rov
Returns the recommended fractional overlap for the given window
22,305
def planck ( N , nleft = 0 , nright = 0 ) : w = numpy . ones ( N ) if nleft : w [ 0 ] *= 0 zleft = numpy . array ( [ nleft * ( 1. / k + 1. / ( k - nleft ) ) for k in range ( 1 , nleft ) ] ) w [ 1 : nleft ] *= expit ( - zleft ) if nright : w [ N - 1 ] *= 0 zright = numpy . array ( [ - nright * ( 1. / ( k - nright ) + 1. / k ) for k in range ( 1 , nright ) ] ) w [ N - nright : N - 1 ] *= expit ( - zright ) return w
Return a Planck taper window .
22,306
def bool_env ( key , default = False ) : try : return os . environ [ key ] . lower ( ) in TRUE except KeyError : return default
Parse an environment variable as a boolean switch
22,307
def call ( cmd , stdout = PIPE , stderr = PIPE , on_error = 'raise' , ** kwargs ) : if isinstance ( cmd , ( list , tuple ) ) : cmdstr = ' ' . join ( cmd ) kwargs . setdefault ( 'shell' , False ) else : cmdstr = str ( cmd ) kwargs . setdefault ( 'shell' , True ) proc = Popen ( cmd , stdout = stdout , stderr = stderr , ** kwargs ) out , err = proc . communicate ( ) if proc . returncode : if on_error == 'ignore' : pass elif on_error == 'warn' : e = CalledProcessError ( proc . returncode , cmdstr ) warnings . warn ( str ( e ) ) else : raise CalledProcessError ( proc . returncode , cmdstr ) return out . decode ( 'utf-8' ) , err . decode ( 'utf-8' )
Call out to the shell using subprocess . Popen
22,308
def read_with_columns ( func ) : def wrapper ( * args , ** kwargs ) : columns = kwargs . pop ( "columns" , None ) tab = func ( * args , ** kwargs ) if columns is None : return tab return tab [ columns ] return _safe_wraps ( wrapper , func )
Decorate a Table read method to use the columns keyword
22,309
def read_with_selection ( func ) : def wrapper ( * args , ** kwargs ) : selection = kwargs . pop ( 'selection' , None ) or [ ] tab = func ( * args , ** kwargs ) if selection : return filter_table ( tab , selection ) return tab return _safe_wraps ( wrapper , func )
Decorate a Table read method to apply selection keyword
22,310
def decorate_registered_reader ( name , data_class = EventTable , columns = True , selection = True , ) : reader = registry . get_reader ( name , data_class ) wrapped = ( read_with_columns ( read_with_selection ( reader ) ) ) return registry . register_reader ( name , data_class , wrapped , force = True )
Wrap an existing registered reader to use GWpy s input decorators
22,311
def table_from_root ( source , treename = None , columns = None , ** kwargs ) : import root_numpy try : selection = kwargs . pop ( 'selection' ) except KeyError : filters = None else : rootfilters = [ ] filters = [ ] for col , op_ , value in parse_column_filters ( selection ) : try : opstr = [ key for key in OPERATORS if OPERATORS [ key ] is op_ ] [ 0 ] except ( IndexError , KeyError ) : filters . append ( ( col , op_ , value ) ) else : rootfilters . append ( '{0} {1} {2!r}' . format ( col , opstr , value ) ) kwargs [ 'selection' ] = ' && ' . join ( rootfilters ) if not isinstance ( source , string_types ) : source = source . name if treename is None : trees = root_numpy . list_trees ( source ) if len ( trees ) == 1 : treename = trees [ 0 ] elif not trees : raise ValueError ( "No trees found in %s" % source ) else : raise ValueError ( "Multiple trees found in %s, please select on " "via the `treename` keyword argument, e.g. " "`treename='events'`. Available trees are: %s." % ( source , ', ' . join ( map ( repr , trees ) ) ) ) t = Table ( root_numpy . root2array ( source , treename , branches = columns , ** kwargs ) ) if filters : return filter_table ( t , * filters ) return t
Read a Table from a ROOT tree
22,312
def table_to_root ( table , filename , ** kwargs ) : import root_numpy root_numpy . array2root ( table . as_array ( ) , filename , ** kwargs )
Write a Table to a ROOT file
22,313
def _gps_scale_factory ( unit ) : class FixedGPSScale ( GPSScale ) : name = str ( '{0}s' . format ( unit . long_names [ 0 ] if unit . long_names else unit . names [ 0 ] ) ) def __init__ ( self , axis , epoch = None ) : super ( FixedGPSScale , self ) . __init__ ( axis , epoch = epoch , unit = unit ) return FixedGPSScale
Construct a GPSScale for this unit
22,314
def set_epoch ( self , epoch ) : if epoch is None : self . _epoch = None return if isinstance ( epoch , ( Number , Decimal ) ) : self . _epoch = float ( epoch ) else : self . _epoch = float ( to_gps ( epoch ) )
Set the GPS epoch
22,315
def set_unit ( self , unit ) : if unit is None or ( isinstance ( unit , units . NamedUnit ) and unit . physical_type == 'time' ) : self . _unit = unit return if isinstance ( unit , Number ) : unit = units . Unit ( unit * units . second ) try : unit = units . Unit ( unit ) except ValueError as exc : try : unit = units . Unit ( str ( unit ) . rstrip ( 's' ) ) except ValueError : raise exc dec = unit . decompose ( ) if dec . bases != [ units . second ] : raise ValueError ( "Cannot set GPS unit to %s" % unit ) for other in TIME_UNITS : if other . decompose ( ) . scale == dec . scale : self . _unit = other return raise ValueError ( "Unrecognised unit: %s" % unit )
Set the GPS step scale
22,316
def get_unit_name ( self ) : if not self . unit : return None name = sorted ( self . unit . names , key = len ) [ - 1 ] return '%ss' % name
Returns the name of the unit for this GPS scale
22,317
def transform_non_affine ( self , values ) : scale = self . scale or 1 epoch = self . epoch or 0 values = numpy . asarray ( values ) if self . _parents or ( epoch == 0 and scale == 1 ) : return self . _transform ( values , float ( epoch ) , float ( scale ) ) flat = values . flatten ( ) def _trans ( x ) : return self . _transform_decimal ( x , epoch , scale ) return numpy . asarray ( list ( map ( _trans , flat ) ) ) . reshape ( values . shape )
Transform an array of GPS times .
22,318
def deprecated_function ( func , warning = DEPRECATED_FUNCTION_WARNING ) : @ wraps ( func ) def wrapped_func ( * args , ** kwargs ) : warnings . warn ( DEPRECATED_FUNCTION_WARNING . format ( func ) , category = DeprecationWarning , stacklevel = 2 , ) return func ( * args , ** kwargs ) return wrapped_func
Adds a DeprecationWarning to a function
22,319
def return_as ( returntype ) : def decorator ( func ) : def wrapped ( * args , ** kwargs ) : result = func ( * args , ** kwargs ) try : return returntype ( result ) except ( TypeError , ValueError ) as exc : exc . args = ( 'failed to cast return from {0} as {1}: {2}' . format ( func . __name__ , returntype . __name__ , str ( exc ) ) , ) raise try : return wraps ( func ) ( wrapped ) except AttributeError : wrapped . __doc__ == func . __doc__ return wrapped return decorator
Decorator to cast return of function as the given type
22,320
def format_citations ( zid , url = 'https://zenodo.org/' , hits = 10 , tag_prefix = 'v' ) : url = ( '{url}/api/records/?' 'page=1&' 'size={hits}&' 'q=conceptrecid:"{id}"&' 'sort=-version&' 'all_versions=True' . format ( id = zid , url = url , hits = hits ) ) metadata = requests . get ( url ) . json ( ) lines = [ ] for i , hit in enumerate ( metadata [ 'hits' ] [ 'hits' ] ) : version = hit [ 'metadata' ] [ 'version' ] [ len ( tag_prefix ) : ] lines . append ( '-' * len ( version ) ) lines . append ( version ) lines . append ( '-' * len ( version ) ) lines . append ( '' ) lines . append ( '.. image:: {badge}\n' ' :target: {doi}' . format ( ** hit [ 'links' ] ) ) if i < hits - 1 : lines . append ( '' ) return '\n' . join ( lines )
Query and format a citations page from Zenodo entries
22,321
def read ( source , channels , start = None , end = None , scaled = None , type = None , series_class = TimeSeries ) : source = file_list ( source ) ctype = channel_dict_kwarg ( type , channels , ( str , ) ) out = series_class . DictClass ( ) for i , file_ in enumerate ( source ) : if i == 1 : for name in out : out [ name ] = numpy . require ( out [ name ] , requirements = [ 'O' ] ) out . append ( read_gwf ( file_ , channels , start = start , end = end , ctype = ctype , scaled = scaled , series_class = series_class ) , copy = False ) return out
Read a dict of series from one or more GWF files
22,322
def read_gwf ( filename , channels , start = None , end = None , scaled = None , ctype = None , series_class = TimeSeries ) : if not start : start = 0 if not end : end = 0 span = Segment ( start , end ) stream = io_gwf . open_gwf ( filename , 'r' ) nframes = stream . GetNumberOfFrames ( ) out = series_class . DictClass ( ) i = 0 while True : this = i i += 1 try : frame = stream . ReadFrameNSubset ( this , 0 ) except IndexError : if this >= nframes : break raise if not _need_frame ( frame , start , end ) : continue epoch = LIGOTimeGPS ( * frame . GetGTime ( ) ) for channel in channels : _scaled = _dynamic_scaled ( scaled , channel ) try : new = _read_channel ( stream , this , str ( channel ) , ctype . get ( channel , None ) , epoch , start , end , scaled = _scaled , series_class = series_class ) except _Skip : continue try : out [ channel ] . append ( new ) except KeyError : out [ channel ] = numpy . require ( new , requirements = [ 'O' ] ) if all ( span in out [ channel ] . span for channel in out ) : break for channel in channels : if channel not in out : msg = "Failed to read {0!r} from {1!r}" . format ( str ( channel ) , filename ) if start or end : msg += ' for {0}' . format ( span ) raise ValueError ( msg ) return out
Read a dict of series data from a single GWF file
22,323
def _read_channel ( stream , num , name , ctype , epoch , start , end , scaled = True , series_class = TimeSeries ) : data = _get_frdata ( stream , num , name , ctype = ctype ) return read_frdata ( data , epoch , start , end , scaled = scaled , series_class = series_class )
Read a channel from a specific frame in a stream
22,324
def _get_frdata ( stream , num , name , ctype = None ) : ctypes = ( ctype , ) if ctype else ( 'adc' , 'proc' , 'sim' ) for ctype in ctypes : _reader = getattr ( stream , 'ReadFr{0}Data' . format ( ctype . title ( ) ) ) try : return _reader ( num , name ) except IndexError as exc : if FRERR_NO_CHANNEL_OF_TYPE . match ( str ( exc ) ) : continue raise raise ValueError ( "no Fr{{Adc,Proc,Sim}}Data structures with the " "name {0}" . format ( name ) )
Brute force - ish method to return the FrData structure for a channel
22,325
def read_frdata ( frdata , epoch , start , end , scaled = True , series_class = TimeSeries ) : datastart = epoch + frdata . GetTimeOffset ( ) try : trange = frdata . GetTRange ( ) except AttributeError : trange = 0. if ( end and datastart >= end ) or ( trange and datastart + trange < start ) : raise _Skip ( ) try : slope = frdata . GetSlope ( ) bias = frdata . GetBias ( ) except AttributeError : slope = None bias = None null_scaling = True else : null_scaling = slope == 1. and bias == 0. out = None for j in range ( frdata . data . size ( ) ) : try : new = read_frvect ( frdata . data [ j ] , datastart , start , end , name = frdata . GetName ( ) , series_class = series_class ) except _Skip : continue if scaled and not null_scaling : new *= slope new += bias if slope is not None : new . override_unit ( 'count' ) if out is None : out = new else : out . append ( new ) return out
Read a series from an FrData structure
22,326
def read_frvect ( vect , epoch , start , end , name = None , series_class = TimeSeries ) : if vect . GetName ( ) and name and vect . GetName ( ) != name : raise _Skip ( ) arr = vect . GetDataArray ( ) nsamp = arr . size dim = vect . GetDim ( 0 ) dx = dim . dx x0 = dim . startX dimstart = epoch + x0 dimend = dimstart + nsamp * dx nxstart = int ( max ( 0. , float ( start - dimstart ) ) / dx ) if nxstart >= nsamp : raise _Skip ( ) if end : nxend = int ( nsamp - ceil ( max ( 0. , float ( dimend - end ) ) / dx ) ) else : nxend = None if nxstart or nxend : arr = arr [ nxstart : nxend ] unit = vect . GetUnitY ( ) or None series = series_class ( arr , t0 = dimstart + nxstart * dx , dt = dx , name = name , channel = name , unit = unit , copy = False ) series . channel . sample_rate = series . sample_rate . value series . channel . unit = unit series . channel . dtype = series . dtype return series
Read an array from an FrVect structure
22,327
def write ( tsdict , outfile , start = None , end = None , name = 'gwpy' , run = 0 , compression = 257 , compression_level = 6 ) : if not start : starts = { LIGOTimeGPS ( tsdict [ key ] . x0 . value ) for key in tsdict } if len ( starts ) != 1 : raise RuntimeError ( "Cannot write multiple TimeSeries to a single " "frame with different start times, " "please write into different frames" ) start = list ( starts ) [ 0 ] if not end : ends = { tsdict [ key ] . span [ 1 ] for key in tsdict } if len ( ends ) != 1 : raise RuntimeError ( "Cannot write multiple TimeSeries to a single " "frame with different end times, " "please write into different frames" ) end = list ( ends ) [ 0 ] duration = end - start start = LIGOTimeGPS ( start ) ifos = { ts . channel . ifo for ts in tsdict . values ( ) if ts . channel and ts . channel . ifo and hasattr ( frameCPP , 'DETECTOR_LOCATION_{0}' . format ( ts . channel . ifo ) ) } frame = io_gwf . create_frame ( time = start , duration = duration , name = name , run = run , ifos = ifos ) for i , key in enumerate ( tsdict ) : try : ctype = tsdict [ key ] . channel . _ctype or 'proc' except AttributeError : ctype = 'proc' append_to_frame ( frame , tsdict [ key ] . crop ( start , end ) , type = ctype , channelid = i ) io_gwf . write_frames ( outfile , [ frame ] , compression = compression , compression_level = compression_level )
Write data to a GWF file using the frameCPP API
22,328
def append_to_frame ( frame , timeseries , type = 'proc' , channelid = 0 ) : if timeseries . channel : channel = str ( timeseries . channel ) else : channel = str ( timeseries . name ) offset = float ( LIGOTimeGPS ( timeseries . t0 . value ) - LIGOTimeGPS ( * frame . GetGTime ( ) ) ) if type . lower ( ) == 'adc' : frdata = frameCPP . FrAdcData ( channel , 0 , channelid , 16 , timeseries . sample_rate . value , ) frdata . SetTimeOffset ( offset ) append = frame . AppendFrAdcData elif type . lower ( ) == 'proc' : frdata = frameCPP . FrProcData ( channel , str ( timeseries . name ) , frameCPP . FrProcData . TIME_SERIES , frameCPP . FrProcData . UNKNOWN_SUB_TYPE , offset , abs ( timeseries . span ) , 0. , 0. , 0. , 0. , ) append = frame . AppendFrProcData elif type . lower ( ) == 'sim' : frdata = frameCPP . FrSimData ( str ( timeseries . channel ) , str ( timeseries . name ) , timeseries . sample_rate . value , offset , 0. , 0. , ) append = frame . AppendFrSimData else : raise RuntimeError ( "Invalid channel type {!r}, please select one of " "'adc, 'proc', or 'sim'" . format ( type ) ) frdata . AppendData ( create_frvect ( timeseries ) ) append ( frdata )
Append data from a TimeSeries to a ~frameCPP . FrameH
22,329
def create_frvect ( timeseries ) : dims = frameCPP . Dimension ( timeseries . size , timeseries . dx . value , str ( timeseries . dx . unit ) , 0 ) vect = frameCPP . FrVect ( timeseries . name or '' , FRVECT_TYPE_FROM_NUMPY [ timeseries . dtype . type ] , 1 , dims , str ( timeseries . unit ) ) vect . GetDataArray ( ) [ : ] = numpy . require ( timeseries . value , requirements = [ 'C' ] ) return vect
Create a ~frameCPP . FrVect from a TimeSeries
22,330
def _bool_segments ( array , start = 0 , delta = 1 , minlen = 1 ) : array = iter ( array ) i = 0 while True : try : val = next ( array ) except StopIteration : return if val : n = 1 try : while next ( array ) : n += 1 except StopIteration : return finally : if n >= minlen : yield ( start + i * delta , start + ( i + n ) * delta ) i += n i += 1
Yield segments of consecutive True values in a boolean array
22,331
def to_dqflag ( self , name = None , minlen = 1 , dtype = None , round = False , label = None , description = None ) : from . . segments import DataQualityFlag if dtype is None : dtype = self . t0 . dtype if isinstance ( dtype , numpy . dtype ) : dtype = dtype . type start = dtype ( self . t0 . value ) dt = dtype ( self . dt . value ) active = _bool_segments ( self . value , start , dt , minlen = int ( minlen ) ) known = [ tuple ( map ( dtype , self . span ) ) ] out = DataQualityFlag ( name = name or self . name , active = active , known = known , label = label or self . name , description = description ) if round : return out . round ( ) return out
Convert this series into a ~gwpy . segments . DataQualityFlag .
22,332
def bits ( self ) : try : return self . _bits except AttributeError : if self . dtype . name . startswith ( ( 'uint' , 'int' ) ) : nbits = self . itemsize * 8 self . bits = Bits ( [ 'Bit %d' % b for b in range ( nbits ) ] , channel = self . channel , epoch = self . epoch ) return self . bits elif hasattr ( self . channel , 'bits' ) : self . bits = self . channel . bits return self . bits return None
list of Bits for this StateVector
22,333
def boolean ( self ) : try : return self . _boolean except AttributeError : nbits = len ( self . bits ) boolean = numpy . zeros ( ( self . size , nbits ) , dtype = bool ) for i , sample in enumerate ( self . value ) : boolean [ i , : ] = [ int ( sample ) >> j & 1 for j in range ( nbits ) ] self . _boolean = Array2D ( boolean , name = self . name , x0 = self . x0 , dx = self . dx , y0 = 0 , dy = 1 ) return self . boolean
A mapping of this StateVector to a 2 - D array containing all binary bits as booleans for each time point .
22,334
def get_bit_series ( self , bits = None ) : if bits is None : bits = [ b for b in self . bits if b not in { None , '' } ] bindex = [ ] for bit in bits : try : bindex . append ( ( self . bits . index ( bit ) , bit ) ) except ( IndexError , ValueError ) as exc : exc . args = ( 'Bit %r not found in StateVector' % bit , ) raise self . _bitseries = StateTimeSeriesDict ( ) for i , bit in bindex : self . _bitseries [ bit ] = StateTimeSeries ( self . value >> i & 1 , name = bit , epoch = self . x0 . value , channel = self . channel , sample_rate = self . sample_rate ) return self . _bitseries
Get the StateTimeSeries for each bit of this StateVector .
22,335
def read ( cls , source , * args , ** kwargs ) : return super ( StateVector , cls ) . read ( source , * args , ** kwargs )
Read data into a StateVector
22,336
def to_dqflags ( self , bits = None , minlen = 1 , dtype = float , round = False ) : from . . segments import DataQualityDict out = DataQualityDict ( ) bitseries = self . get_bit_series ( bits = bits ) for bit , sts in bitseries . items ( ) : out [ bit ] = sts . to_dqflag ( name = bit , minlen = minlen , round = round , dtype = dtype , description = self . bits . description [ bit ] ) return out
Convert this StateVector into a ~gwpy . segments . DataQualityDict
22,337
def fetch ( cls , channel , start , end , bits = None , host = None , port = None , verbose = False , connection = None , type = Nds2ChannelType . any ( ) ) : new = cls . DictClass . fetch ( [ channel ] , start , end , host = host , port = port , verbose = verbose , connection = connection ) [ channel ] if bits : new . bits = bits return new
Fetch data from NDS into a StateVector .
22,338
def plot ( self , format = 'segments' , bits = None , ** kwargs ) : if format == 'timeseries' : return super ( StateVector , self ) . plot ( ** kwargs ) if format == 'segments' : from . . plot import Plot kwargs . setdefault ( 'xscale' , 'auto-gps' ) return Plot ( * self . to_dqflags ( bits = bits ) . values ( ) , projection = 'segments' , ** kwargs ) raise ValueError ( "'format' argument must be one of: 'timeseries' or " "'segments'" )
Plot the data for this StateVector
22,339
def resample ( self , rate ) : rate1 = self . sample_rate . value if isinstance ( rate , units . Quantity ) : rate2 = rate . value else : rate2 = float ( rate ) if ( rate2 / rate1 ) . is_integer ( ) : raise NotImplementedError ( "StateVector upsampling has not " "been implemented yet, sorry." ) elif ( rate1 / rate2 ) . is_integer ( ) : factor = int ( rate1 / rate2 ) newsize = int ( self . size / factor ) old = self . value . reshape ( ( newsize , self . size // newsize ) ) if self . bits : nbits = len ( self . bits ) else : max_ = self . value . max ( ) nbits = int ( ceil ( log ( max_ , 2 ) ) ) if max_ else 1 bits = range ( nbits ) itr = numpy . nditer ( [ old , None ] , flags = [ 'external_loop' , 'reduce_ok' ] , op_axes = [ None , [ 0 , - 1 ] ] , op_flags = [ [ 'readonly' ] , [ 'readwrite' , 'allocate' ] ] ) dtype = self . dtype type_ = self . dtype . type for x , y in itr : y [ ... ] = numpy . sum ( [ type_ ( ( x >> bit & 1 ) . all ( ) * ( 2 ** bit ) ) for bit in bits ] , dtype = self . dtype ) new = StateVector ( itr . operands [ 1 ] , dtype = dtype ) new . __metadata_finalize__ ( self ) new . _unit = self . unit new . sample_rate = rate2 return new elif rate1 < rate2 : raise ValueError ( "New sample rate must be multiple of input " "series rate if upsampling a StateVector" ) else : raise ValueError ( "New sample rate must be divisor of input " "series rate if downsampling a StateVector" )
Resample this StateVector to a new rate
22,340
def read ( cls , source , format = None , coalesce = False , ** kwargs ) : def combiner ( listofseglists ) : out = cls ( seg for seglist in listofseglists for seg in seglist ) if coalesce : return out . coalesce ( ) return out return io_read_multi ( combiner , cls , source , format = format , ** kwargs )
Read segments from file into a SegmentList
22,341
def write ( self , target , * args , ** kwargs ) : return io_registry . write ( self , target , * args , ** kwargs )
Write this SegmentList to a file
22,342
def table_from_cwb ( source , * args , ** kwargs ) : return EventTable . read ( source , 'waveburst' , * args , format = 'root' , ** kwargs )
Read an EventTable from a Coherent WaveBurst ROOT file
22,343
def get_backend_mod ( name = None ) : if name is None : name = get_backend ( ) backend_name = ( name [ 9 : ] if name . startswith ( "module://" ) else "matplotlib.backends.backend_{}" . format ( name . lower ( ) ) ) return importlib . import_module ( backend_name )
Returns the imported module for the given backend name
22,344
def _group_axes_data ( inputs , separate = None , flat = False ) : if separate is None and inputs : if any ( isinstance ( x , iterable_types + ( dict , ) ) for x in inputs ) : separate = True elif not all ( type ( x ) is type ( inputs [ 0 ] ) for x in inputs ) : separate = True out = [ ] for x in inputs : if isinstance ( x , dict ) : x = list ( x . values ( ) ) if ( isinstance ( x , ( KeysView , ValuesView ) ) or isinstance ( x , ( list , tuple ) ) and ( not x or not numpy . isscalar ( x [ 0 ] ) ) ) : out . append ( x ) elif separate or not out : out . append ( [ x ] ) else : out [ - 1 ] . append ( x ) if flat : return [ s for group in out for s in group ] return out
Determine the number of axes from the input args to this Plot
22,345
def _init_axes ( self , data , method = 'plot' , xscale = None , sharex = False , sharey = False , geometry = None , separate = None , ** kwargs ) : if isinstance ( sharex , bool ) : sharex = "all" if sharex else "none" if isinstance ( sharey , bool ) : sharey = "all" if sharey else "none" axes_kw = { key : kwargs . pop ( key ) for key in utils . AXES_PARAMS if key in kwargs } if geometry is not None and geometry [ 0 ] * geometry [ 1 ] == len ( data ) : separate = True axes_groups = _group_axes_data ( data , separate = separate ) if geometry is None : geometry = ( len ( axes_groups ) , 1 ) nrows , ncols = geometry if axes_groups and nrows * ncols != len ( axes_groups ) : raise ValueError ( "cannot group data into {0} axes with a " "{1}x{2} grid" . format ( len ( axes_groups ) , nrows , ncols ) ) gs = GridSpec ( nrows , ncols ) axarr = numpy . empty ( ( nrows , ncols ) , dtype = object ) defxlabel = 'xlabel' not in axes_kw defylabel = 'ylabel' not in axes_kw flatdata = [ s for group in axes_groups for s in group ] for axis in ( 'x' , 'y' ) : unit = _common_axis_unit ( flatdata , axis = axis ) if unit : axes_kw . setdefault ( '{}label' . format ( axis ) , unit . to_string ( 'latex_inline_dimensional' ) ) for group , ( row , col ) in zip_longest ( axes_groups , itertools . product ( range ( nrows ) , range ( ncols ) ) , fillvalue = [ ] ) : shared_with = { "none" : None , "all" : axarr [ 0 , 0 ] , "row" : axarr [ row , 0 ] , "col" : axarr [ 0 , col ] } axes_kw [ "sharex" ] = shared_with [ sharex ] axes_kw [ "sharey" ] = shared_with [ sharey ] axes_kw [ 'xscale' ] = xscale if xscale else _parse_xscale ( group ) ax = axarr [ row , col ] = self . add_subplot ( gs [ row , col ] , ** axes_kw ) plot_func = getattr ( ax , method ) if method in ( 'imshow' , 'pcolormesh' ) : for obj in group : plot_func ( obj , ** kwargs ) elif group : plot_func ( * group , ** kwargs ) for axis , share , pos , n , def_ in ( ( ax . xaxis , sharex , row , nrows , defxlabel ) , ( ax . yaxis , sharey , col , ncols , defylabel ) , ) : if share == 'all' and pos < n - 1 : axis . set_label_text ( '' ) else : axis . isDefault_label = def_ return self . axes
Populate this figure with data creating Axes as necessary
22,346
def refresh ( self ) : for cbar in self . colorbars : cbar . draw_all ( ) self . canvas . draw ( )
Refresh the current figure
22,347
def close ( self ) : from matplotlib . pyplot import close for ax in self . axes [ : : - 1 ] : ax . set_xscale ( 'linear' ) ax . set_yscale ( 'linear' ) ax . cla ( ) close ( self )
Close the plot and release its memory .
22,348
def get_axes ( self , projection = None ) : if projection is None : return self . axes return [ ax for ax in self . axes if ax . name == projection . lower ( ) ]
Find all Axes optionally matching the given projection
22,349
def colorbar ( self , mappable = None , cax = None , ax = None , fraction = 0. , label = None , emit = True , ** kwargs ) : mappable , kwargs = gcbar . process_colorbar_kwargs ( self , mappable , ax , cax = cax , fraction = fraction , ** kwargs ) cbar = super ( Plot , self ) . colorbar ( mappable , ** kwargs ) self . colorbars . append ( cbar ) if label : cbar . set_label ( label ) if emit : ax = kwargs . pop ( 'ax' ) norm = mappable . norm cmap = mappable . get_cmap ( ) for map_ in ax . collections + ax . images : map_ . set_norm ( norm ) map_ . set_cmap ( cmap ) return cbar
Add a colorbar to the current Plot
22,350
def add_colorbar ( self , * args , ** kwargs ) : warnings . warn ( "{0}.add_colorbar was renamed {0}.colorbar, this warnings will " "result in an error in the future" . format ( type ( self ) . __name__ ) , DeprecationWarning ) return self . colorbar ( * args , ** kwargs )
DEPRECATED use Plot . colorbar instead
22,351
def add_segments_bar ( self , segments , ax = None , height = 0.14 , pad = 0.1 , sharex = True , location = 'bottom' , ** plotargs ) : if not ax : ax = self . gca ( ) axes_kw = { 'pad' : pad , 'add_to_figure' : True , 'sharex' : ax if sharex is True else sharex or None , 'axes_class' : get_projection_class ( 'segments' ) , } if axes_kw [ 'sharex' ] is ax and not ax . get_autoscalex_on ( ) : axes_kw [ 'xlim' ] = ax . get_xlim ( ) try : axes_kw [ 'epoch' ] = ax . get_epoch ( ) except AttributeError : pass if ax . get_axes_locator ( ) : divider = ax . get_axes_locator ( ) . _axes_divider else : from mpl_toolkits . axes_grid1 import make_axes_locatable divider = make_axes_locatable ( ax ) if location not in { 'top' , 'bottom' } : raise ValueError ( "Segments can only be positoned at 'top' or " "'bottom'." ) segax = divider . append_axes ( location , height , ** axes_kw ) if axes_kw [ 'sharex' ] is ax and location == 'bottom' : segax . set_xlabel ( ax . get_xlabel ( ) ) segax . xaxis . isDefault_label = ax . xaxis . isDefault_label ax . set_xlabel ( "" ) setp ( ax . get_xticklabels ( ) , visible = False ) segax . plot ( segments , ** plotargs ) segax . grid ( b = False , which = 'both' , axis = 'y' ) segax . autoscale ( axis = 'y' , tight = True ) return segax
Add a segment bar Plot indicating state information .
22,352
def get_hacr_channels ( db = None , gps = None , connection = None , ** conectkwargs ) : if connection is None : if gps is None : gps = from_gps ( 'now' ) if db is None : db = get_database_names ( gps , gps ) [ 0 ] connection = connect ( db = db , ** conectkwargs ) out = query ( "select channel from job where monitorName = 'chacr'" ) return [ r [ 0 ] for r in out ]
Return the names of all channels present in the given HACR database
22,353
def get_hacr_triggers ( channel , start , end , columns = HACR_COLUMNS , pid = None , monitor = 'chacr' , selection = None , ** connectkwargs ) : if columns is None : columns = HACR_COLUMNS columns = list ( columns ) span = Segment ( * map ( to_gps , ( start , end ) ) ) selectionstr = 'and %s' % format_db_selection ( selection , engine = None ) [ 6 : ] databases = get_database_names ( start , end ) rows = [ ] for db in databases : conn = connect ( db , ** connectkwargs ) cursor = conn . cursor ( ) pids = query ( "select process_id, gps_start, gps_stop " "from job where monitorName = %r and channel = %r" % ( monitor , str ( channel ) ) , connection = conn ) for p , s , e in pids : if pid is not None and int ( p ) != int ( pid ) : continue tspan = Segment ( float ( s ) , float ( e ) ) if not tspan . intersects ( span ) : continue q = ( 'select %s from mhacr where process_id = %d and ' 'gps_start > %s and gps_start < %d %s order by gps_start asc' % ( ', ' . join ( columns ) , int ( p ) , span [ 0 ] , span [ 1 ] , selectionstr ) ) n = cursor . execute ( q ) if n == 0 : continue rows . extend ( cursor . fetchall ( ) ) return EventTable ( rows = rows , names = columns )
Fetch a table of HACR triggers in the given interval
22,354
def connect ( db , host = HACR_DATABASE_SERVER , user = HACR_DATABASE_USER , passwd = HACR_DATABASE_PASSWD ) : try : import pymysql except ImportError as e : e . args = ( 'pymysql is required to fetch HACR triggers' , ) raise return pymysql . connect ( host = host , user = user , passwd = passwd , db = db )
Connect to the given SQL database
22,355
def query ( querystr , connection = None , ** connectkwargs ) : if connection is None : connection = connect ( ** connectkwargs ) cursor = connection . cursor ( ) cursor . execute ( querystr ) return cursor . fetchall ( )
Execute a query of the given SQL database
22,356
def add_filter ( self , filter_ , frequencies = None , dB = True , analog = False , sample_rate = None , ** kwargs ) : if not analog : if not sample_rate : raise ValueError ( "Must give sample_rate frequency to display " "digital (analog=False) filter" ) sample_rate = Quantity ( sample_rate , 'Hz' ) . value dt = 2 * pi / sample_rate if not isinstance ( frequencies , ( type ( None ) , int ) ) : frequencies = numpy . atleast_1d ( frequencies ) . copy ( ) frequencies *= dt _ , fcomp = parse_filter ( filter_ , analog = False ) if analog : lti = signal . lti ( * fcomp ) else : lti = signal . dlti ( * fcomp , dt = dt ) w , mag , phase = lti . bode ( w = frequencies ) if not dB : mag = 10 ** ( mag / 10. ) mline = self . maxes . plot ( w , mag , ** kwargs ) [ 0 ] pline = self . paxes . plot ( w , phase , ** kwargs ) [ 0 ] return mline , pline
Add a linear time - invariant filter to this BodePlot
22,357
def add_frequencyseries ( self , spectrum , dB = True , power = False , ** kwargs ) : kwargs . setdefault ( 'label' , spectrum . name ) mag = numpy . absolute ( spectrum . value ) if dB : mag = to_db ( mag ) if not power : mag *= 2. phase = numpy . angle ( spectrum . value , deg = True ) w = spectrum . frequencies . value mline = self . maxes . plot ( w , mag , ** kwargs ) [ 0 ] pline = self . paxes . plot ( w , phase , ** kwargs ) [ 0 ] return mline , pline
Plot the magnitude and phase of a complex - valued FrequencySeries
22,358
def read_omega_scan_config ( source ) : out = ChannelList ( ) append = out . append if isinstance ( source , FILE_LIKE ) : close = False else : source = open ( source , 'r' ) close = True try : section = None while True : try : line = next ( source ) except StopIteration : break if line == '' or line == '\n' or line . startswith ( '#' ) : continue elif line . startswith ( '[' ) : section = line [ 1 : - 2 ] elif line . startswith ( '{' ) : append ( parse_omega_channel ( source , section ) ) else : raise RuntimeError ( "Failed to parse Omega config line:\n%s" % line ) finally : if close : source . close ( ) return out
Parse an Omega - scan configuration file into a ChannelList
22,359
def parse_omega_channel ( fobj , section = None ) : params = OrderedDict ( ) while True : line = next ( fobj ) if line == '}\n' : break key , value = line . split ( ':' , 1 ) params [ key . strip ( ) . rstrip ( ) ] = omega_param ( value ) out = Channel ( params . get ( 'channelName' ) , sample_rate = params . get ( 'sampleFrequency' ) , frametype = params . get ( 'frameType' ) , frequency_range = params . get ( 'searchFrequencyRange' ) ) out . group = section out . params = params return out
Parse a Channel from an Omega - scan configuration file
22,360
def omega_param ( val ) : val = val . strip ( ) . rstrip ( ) if val . startswith ( ( '"' , "'" ) ) : return str ( val [ 1 : - 1 ] ) if val . startswith ( '[' ) : return tuple ( map ( float , val [ 1 : - 1 ] . split ( ) ) ) return float ( val )
Parse a value from an Omega - scan configuration file
22,361
def write_omega_scan_config ( channellist , fobj , header = True ) : if isinstance ( fobj , FILE_LIKE ) : close = False else : fobj = open ( fobj , 'w' ) close = True try : if header : print ( '# Q Scan configuration file' , file = fobj ) print ( '# Generated with GWpy from a ChannelList' , file = fobj ) group = None for channel in channellist : if channel . group != group : group = channel . group print ( '\n[%s]' % group , file = fobj ) print ( "" , file = fobj ) print_omega_channel ( channel , file = fobj ) finally : if close : fobj . close ( )
Write a ChannelList to an Omega - pipeline scan configuration file
22,362
def print_omega_channel ( channel , file = sys . stdout ) : print ( '{' , file = file ) try : params = channel . params . copy ( ) except AttributeError : params = OrderedDict ( ) params . setdefault ( 'channelName' , str ( channel ) ) params . setdefault ( 'alwaysPlotFlag' , int ( params . pop ( 'important' , False ) ) ) if channel . frametype : params . setdefault ( 'frameType' , channel . frametype ) if channel . sample_rate is not None : params . setdefault ( 'sampleFrequency' , channel . sample_rate . to ( 'Hz' ) . value ) if channel . frequency_range is not None : low , high = channel . frequency_range . to ( 'Hz' ) . value params . setdefault ( 'searchFrequencyRange' , ( low , high ) ) if 'qlow' in params or 'qhigh' in params : qlow = params . pop ( 'qlow' , 'sqrt(11)' ) qhigh = params . pop ( 'qhigh' , 64 ) params . setdefault ( 'searchQRange' , ( qlow , qhigh ) ) for key in [ 'channelName' , 'frameType' ] : if key not in params : raise KeyError ( "No %r defined for %s" % ( key , str ( channel ) ) ) for key , value in params . items ( ) : key = '%s:' % str ( key ) if isinstance ( value , tuple ) : value = '[%s]' % ' ' . join ( map ( str , value ) ) elif isinstance ( value , float ) and value . is_integer ( ) : value = int ( value ) elif isinstance ( value , str ) : value = repr ( value ) print ( ' {0: <30} {1}' . format ( key , value ) , file = file ) print ( '}' , file = file )
Print a Channel in Omega - pipeline scan format
22,363
def _get_nds2_name ( channel ) : if hasattr ( channel , 'ndsname' ) : return channel . ndsname if hasattr ( channel , 'channel_type' ) : return '%s,%s' % ( channel . name , channel . channel_type_to_string ( channel . channel_type ) ) return str ( channel )
Returns the NDS2 - formatted name for a channel
22,364
def parse_nds_env ( env = 'NDSSERVER' ) : hosts = [ ] for host in os . getenv ( env ) . split ( ',' ) : try : host , port = host . rsplit ( ':' , 1 ) except ValueError : port = None else : port = int ( port ) if ( host , port ) not in hosts : hosts . append ( ( host , port ) ) return hosts
Parse the NDSSERVER environment variable into a list of hosts
22,365
def connect ( host , port = None ) : import nds2 if port is None and NDS1_HOSTNAME . match ( host ) : port = 8088 if port is None : return nds2 . connection ( host ) return nds2 . connection ( host , port )
Open an nds2 . connection to a given host and port
22,366
def auth_connect ( host , port = None ) : try : return connect ( host , port ) except RuntimeError as exc : if 'Request SASL authentication' not in str ( exc ) : raise warnings . warn ( 'Error authenticating against {0}:{1}' . format ( host , port ) , NDSWarning ) kinit ( ) return connect ( host , port )
Open an nds2 . connection handling simple authentication errors
22,367
def open_connection ( func ) : @ wraps ( func ) def wrapped_func ( * args , ** kwargs ) : if kwargs . get ( 'connection' , None ) is None : try : host = kwargs . pop ( 'host' ) except KeyError : raise TypeError ( "one of `connection` or `host` is required " "to query NDS2 server" ) kwargs [ 'connection' ] = auth_connect ( host , kwargs . pop ( 'port' , None ) ) return func ( * args , ** kwargs ) return wrapped_func
Decorate a function to create a nds2 . connection if required
22,368
def parse_nds2_enums ( func ) : @ wraps ( func ) def wrapped_func ( * args , ** kwargs ) : for kwd , enum_ in ( ( 'type' , Nds2ChannelType ) , ( 'dtype' , Nds2DataType ) ) : if kwargs . get ( kwd , None ) is None : kwargs [ kwd ] = enum_ . any ( ) elif not isinstance ( kwargs [ kwd ] , int ) : kwargs [ kwd ] = enum_ . find ( kwargs [ kwd ] ) . value return func ( * args , ** kwargs ) return wrapped_func
Decorate a function to translate a type string into an integer
22,369
def reset_epoch ( func ) : @ wraps ( func ) def wrapped_func ( * args , ** kwargs ) : connection = kwargs . get ( 'connection' , None ) epoch = connection . current_epoch ( ) if connection else None try : return func ( * args , ** kwargs ) finally : if epoch is not None : connection . set_epoch ( epoch . gps_start , epoch . gps_stop ) return wrapped_func
Wrap a function to reset the epoch when finished
22,370
def find_channels ( channels , connection = None , host = None , port = None , sample_rate = None , type = Nds2ChannelType . any ( ) , dtype = Nds2DataType . any ( ) , unique = False , epoch = 'ALL' ) : if not isinstance ( epoch , tuple ) : epoch = ( epoch or 'All' , ) connection . set_epoch ( * epoch ) if isinstance ( sample_rate , ( int , float ) ) : sample_rate = ( sample_rate , sample_rate ) elif sample_rate is None : sample_rate = tuple ( ) out = [ ] for name in _get_nds2_names ( channels ) : out . extend ( _find_channel ( connection , name , type , dtype , sample_rate , unique = unique ) ) return out
Query an NDS2 server for channel information
22,371
def _find_channel ( connection , name , ctype , dtype , sample_rate , unique = False ) : name , ctype = _strip_ctype ( name , ctype , connection . get_protocol ( ) ) found = connection . find_channels ( name , ctype , dtype , * sample_rate ) if not unique : return found if len ( found ) == 2 : found = [ c for c in found if c . channel_type != Nds2ChannelType . ONLINE . value ] if len ( found ) != 1 : raise ValueError ( "unique NDS2 channel match not found for %r" % name ) return found
Internal method to find a single channel
22,372
def _strip_ctype ( name , ctype , protocol = 2 ) : try : name , ctypestr = name . rsplit ( ',' , 1 ) except ValueError : pass else : ctype = Nds2ChannelType . find ( ctypestr ) . value if protocol == 1 and ctype in ( Nds2ChannelType . STREND . value , Nds2ChannelType . MTREND . value ) : name += ',{0}' . format ( ctypestr ) return name , ctype
Strip the ctype from a channel name for the given nds server version
22,373
def get_availability ( channels , start , end , connection = None , host = None , port = None ) : from . . segments import ( Segment , SegmentList , SegmentListDict ) connection . set_epoch ( start , end ) names = list ( map ( _get_nds2_name , find_channels ( channels , epoch = ( start , end ) , connection = connection , unique = True ) , ) ) result = connection . get_availability ( names ) out = SegmentListDict ( ) for name , result in zip ( channels , result ) : out [ name ] = SegmentList ( [ Segment ( s . gps_start , s . gps_stop ) for s in result . simple_list ( ) ] ) return out
Query an NDS2 server for data availability
22,374
def minute_trend_times ( start , end ) : if start % 60 : start = int ( start ) // 60 * 60 if end % 60 : end = int ( end ) // 60 * 60 + 60 return int ( start ) , int ( end )
Expand a [ start end ) interval for use in querying for minute trends
22,375
def find ( cls , name ) : try : return cls . _member_map_ [ name ] except KeyError : for ctype in cls . _member_map_ . values ( ) : if ctype . name == name : return ctype raise ValueError ( '%s is not a valid %s' % ( name , cls . __name__ ) )
Returns the NDS2 channel type corresponding to the given name
22,376
def find ( cls , dtype ) : try : return cls . _member_map_ [ dtype ] except KeyError : try : dtype = numpy . dtype ( dtype ) . type except TypeError : for ndstype in cls . _member_map_ . values ( ) : if ndstype . value is dtype : return ndstype else : for ndstype in cls . _member_map_ . values ( ) : if ndstype . value and ndstype . numpy_dtype is dtype : return ndstype raise ValueError ( '%s is not a valid %s' % ( dtype , cls . __name__ ) )
Returns the NDS2 type corresponding to the given python type
22,377
def reconnect ( connection ) : if isinstance ( connection , FflConnection ) : return type ( connection ) ( connection . ffldir ) kw = { 'context' : connection . _context } if connection . port != 80 else { } return connection . __class__ ( connection . host , port = connection . port , ** kw )
Open a new datafind connection based on an existing connection
22,378
def _type_priority ( ifo , ftype , trend = None ) : for trendname , trend_regex in [ ( 'm-trend' , MINUTE_TREND_TYPE ) , ( 's-trend' , SECOND_TREND_TYPE ) , ] : if trend == trendname and trend_regex . match ( ftype ) : return 0 , len ( ftype ) for reg , prio in { HIGH_PRIORITY_TYPE : 1 , re . compile ( r'[A-Z]\d_C' ) : 6 , LOW_PRIORITY_TYPE : 10 , MINUTE_TREND_TYPE : 10 , SECOND_TREND_TYPE : 10 , } . items ( ) : if reg . search ( ftype ) : return prio , len ( ftype ) return 5 , len ( ftype )
Prioritise the given GWF type based on its name or trend status .
22,379
def on_tape ( * files ) : for path in files : try : if os . stat ( path ) . st_blocks == 0 : return True except AttributeError : return False return False
Determine whether any of the given files are on tape
22,380
def with_connection ( func ) : @ wraps ( func ) def wrapped ( * args , ** kwargs ) : if kwargs . get ( 'connection' ) is None : kwargs [ 'connection' ] = _choose_connection ( host = kwargs . get ( 'host' ) , port = kwargs . get ( 'port' ) ) try : return func ( * args , ** kwargs ) except HTTPException : kwargs [ 'connection' ] = reconnect ( kwargs [ 'connection' ] ) return func ( * args , ** kwargs ) return wrapped
Decorate a function to open a new datafind connection if required
22,381
def find_best_frametype ( channel , start , end , frametype_match = None , allow_tape = True , connection = None , host = None , port = None ) : try : return find_frametype ( channel , gpstime = ( start , end ) , frametype_match = frametype_match , allow_tape = allow_tape , on_gaps = 'error' , connection = connection , host = host , port = port ) except RuntimeError : ftout = find_frametype ( channel , gpstime = ( start , end ) , frametype_match = frametype_match , return_all = True , allow_tape = allow_tape , on_gaps = 'ignore' , connection = connection , host = host , port = port ) try : if isinstance ( ftout , dict ) : return { key : ftout [ key ] [ 0 ] for key in ftout } return ftout [ 0 ] except IndexError : raise ValueError ( "Cannot find any valid frametypes for channel(s)" )
Intelligently select the best frametype from which to read this channel
22,382
def find_types ( observatory , match = None , trend = None , connection = None , ** connection_kw ) : return sorted ( connection . find_types ( observatory , match = match ) , key = lambda x : _type_priority ( observatory , x , trend = trend ) )
Find the available data types for a given observatory .
22,383
def find_urls ( observatory , frametype , start , end , on_gaps = 'error' , connection = None , ** connection_kw ) : return connection . find_urls ( observatory , frametype , start , end , on_gaps = on_gaps )
Find the URLs of files of a given data type in a GPS interval .
22,384
def ffl_path ( self , site , frametype ) : try : return self . paths [ ( site , frametype ) ] except KeyError : self . _find_paths ( ) return self . paths [ ( site , frametype ) ]
Returns the path of the FFL file for the given site and frametype
22,385
def find_types ( self , site = None , match = r'^(?!lastfile|spectro|\.).*' ) : self . _find_paths ( ) types = [ tag for ( site_ , tag ) in self . paths if site in ( None , site_ ) ] if match is not None : match = re . compile ( match ) return list ( filter ( match . search , types ) ) return types
Return the list of known data types .
22,386
def find_urls ( self , site , frametype , gpsstart , gpsend , match = None , on_gaps = 'warn' ) : span = Segment ( gpsstart , gpsend ) cache = [ e for e in self . _read_ffl_cache ( site , frametype ) if e . observatory == site and e . description == frametype and e . segment . intersects ( span ) ] urls = [ e . path for e in cache ] missing = SegmentList ( [ span ] ) - cache_segments ( cache ) if match : match = re . compile ( match ) urls = list ( filter ( match . search , urls ) ) if on_gaps == 'ignore' or not missing : return urls msg = 'Missing segments: \n{0}' . format ( '\n' . join ( map ( str , missing ) ) ) if on_gaps == 'warn' : warnings . warn ( msg ) return urls raise RuntimeError ( msg )
Find all files of the given type in the [ start end ) GPS interval .
22,387
def read_series ( source , name , match = None ) : from ligo . lw . ligolw import ( LIGO_LW , Time , Array , Dim ) from ligo . lw . param import get_param xmldoc = read_ligolw ( source , contenthandler = series_contenthandler ( ) ) if match is None : match = dict ( ) def _is_match ( elem ) : try : if elem . Name != name : return False except AttributeError : return False for key , value in match . items ( ) : try : if get_param ( elem , key ) . pcdata != value : return False except ValueError : return False return True matches = filter ( _is_match , xmldoc . getElementsByTagName ( LIGO_LW . tagName ) ) try : elem , = matches except ValueError as exc : if not matches : exc . args = ( "no LIGO_LW elements found matching request" , ) else : exc . args = ( 'multiple LIGO_LW elements found matching request, ' 'please consider using `match=` to select the ' 'correct element' , ) raise array , = elem . getElementsByTagName ( Array . tagName ) dims = array . getElementsByTagName ( Dim . tagName ) xdim = dims [ 0 ] x0 = xdim . Start dx = xdim . Scale xunit = xdim . Unit try : ndim = dims [ 1 ] . n except IndexError : pass else : if ndim > 2 : raise ValueError ( "Cannot parse LIGO_LW Array with {} " "dimensions" . format ( ndim ) ) array_kw = { 'name' : array . Name , 'unit' : array . Unit , 'xunit' : xunit , } try : array_kw [ 'epoch' ] = to_gps ( elem . getElementsByTagName ( Time . tagName ) [ 0 ] . pcdata ) except IndexError : pass for key in ( 'channel' , ) : try : array_kw [ key ] = get_param ( elem , key ) except ValueError : pass try : xindex , value = array . array except ValueError : return Series ( array . array [ 0 ] , x0 = x0 , dx = dx , ** array_kw ) return Series ( value , xindex = xindex , ** array_kw )
Read a Series from LIGO_LW - XML
22,388
def make_plot ( self ) : args = self . args fftlength = float ( args . secpfft ) overlap = args . overlap self . log ( 2 , "Calculating spectrum secpfft: %s, overlap: %s" % ( fftlength , overlap ) ) if overlap is not None : overlap *= fftlength self . log ( 3 , 'Reference channel: ' + self . ref_chan ) groups = OrderedDict ( ) for series in self . timeseries : seg = series . span try : groups [ seg ] [ series . channel . name ] = series except KeyError : groups [ seg ] = OrderedDict ( ) groups [ seg ] [ series . channel . name ] = series plot = Plot ( figsize = self . figsize , dpi = self . dpi ) ax = plot . gca ( ) self . spectra = [ ] for seg in groups : refts = groups [ seg ] . pop ( self . ref_chan ) for name in groups [ seg ] : series = groups [ seg ] [ name ] coh = series . coherence ( refts , fftlength = fftlength , overlap = overlap , window = args . window ) label = name if len ( self . start_list ) > 1 : label += ', {0}' . format ( series . epoch . gps ) if self . usetex : label = label_to_latex ( label ) ax . plot ( coh , label = label ) self . spectra . append ( coh ) if args . xscale == 'log' and not args . xmin : args . xmin = 1 / fftlength return plot
Generate the coherence plot from all time series
22,389
def set_legend ( self ) : leg = super ( Coherence , self ) . set_legend ( ) if leg is not None : leg . set_title ( 'Coherence with:' ) return leg
Create a legend for this product
22,390
def parse_unit ( name , parse_strict = 'warn' , format = 'gwpy' ) : if name is None or isinstance ( name , units . UnitBase ) : return name try : return UNRECOGNIZED_UNITS [ name ] except KeyError : try : return units . Unit ( name , parse_strict = 'raise' ) except ValueError as exc : if ( parse_strict == 'raise' or 'did not parse as unit' not in str ( exc ) ) : raise GWpyFormat . warn = parse_strict != 'silent' return units . Unit ( name , parse_strict = 'silent' , format = format ) finally : GWpyFormat . warn = True
Attempt to intelligently parse a str as a ~astropy . units . Unit
22,391
def _row_from_frevent ( frevent , columns , selection ) : params = dict ( frevent . GetParam ( ) ) params [ 'time' ] = float ( LIGOTimeGPS ( * frevent . GetGTime ( ) ) ) params [ 'amplitude' ] = frevent . GetAmplitude ( ) params [ 'probability' ] = frevent . GetProbability ( ) params [ 'timeBefore' ] = frevent . GetTimeBefore ( ) params [ 'timeAfter' ] = frevent . GetTimeAfter ( ) params [ 'comment' ] = frevent . GetComment ( ) if not all ( op_ ( params [ c ] , t ) for c , op_ , t in selection ) : return None return [ params [ c ] for c in columns ]
Generate a table row from an FrEvent
22,392
def table_to_gwf ( table , filename , name , ** kwargs ) : from LDAStools . frameCPP import ( FrEvent , GPSTime ) write_kw = { key : kwargs . pop ( key ) for key in ( 'compression' , 'compression_level' ) if key in kwargs } frame = io_gwf . create_frame ( name = name , ** kwargs ) names = table . dtype . names for row in table : rowd = dict ( ( n , row [ n ] ) for n in names ) gps = LIGOTimeGPS ( rowd . pop ( 'time' , 0 ) ) frame . AppendFrEvent ( FrEvent ( str ( name ) , str ( rowd . pop ( 'comment' , '' ) ) , str ( rowd . pop ( 'inputs' , '' ) ) , GPSTime ( gps . gpsSeconds , gps . gpsNanoSeconds ) , float ( rowd . pop ( 'timeBefore' , 0 ) ) , float ( rowd . pop ( 'timeAfter' , 0 ) ) , int ( rowd . pop ( 'eventStatus' , 0 ) ) , float ( rowd . pop ( 'amplitude' , 0 ) ) , float ( rowd . pop ( 'probability' , - 1 ) ) , str ( rowd . pop ( 'statistics' , '' ) ) , list ( rowd . items ( ) ) , ) ) io_gwf . write_frames ( filename , [ frame ] , ** write_kw )
Create a new ~frameCPP . FrameH and fill it with data
22,393
def read ( cls , source , * args , ** kwargs ) : return io_registry . read ( cls , source , * args , ** kwargs )
Read data into a FrequencySeries
22,394
def ifft ( self ) : from . . timeseries import TimeSeries nout = ( self . size - 1 ) * 2 dift = npfft . irfft ( self . value * nout ) / 2 new = TimeSeries ( dift , epoch = self . epoch , channel = self . channel , unit = self . unit , dx = 1 / self . dx / nout ) return new
Compute the one - dimensional discrete inverse Fourier transform of this FrequencySeries .
22,395
def interpolate ( self , df ) : f0 = self . f0 . decompose ( ) . value N = ( self . size - 1 ) * ( self . df . decompose ( ) . value / df ) + 1 fsamples = numpy . arange ( 0 , numpy . rint ( N ) , dtype = self . dtype ) * df + f0 out = type ( self ) ( numpy . interp ( fsamples , self . frequencies . value , self . value ) ) out . __array_finalize__ ( self ) out . f0 = f0 out . df = df return out
Interpolate this FrequencySeries to a new resolution .
22,396
def from_lal ( cls , lalfs , copy = True ) : from . . utils . lal import from_lal_unit try : unit = from_lal_unit ( lalfs . sampleUnits ) except TypeError : unit = None channel = Channel ( lalfs . name , unit = unit , dtype = lalfs . data . data . dtype ) return cls ( lalfs . data . data , channel = channel , f0 = lalfs . f0 , df = lalfs . deltaF , epoch = float ( lalfs . epoch ) , dtype = lalfs . data . data . dtype , copy = copy )
Generate a new FrequencySeries from a LAL FrequencySeries of any type
22,397
def from_pycbc ( cls , fs , copy = True ) : return cls ( fs . data , f0 = 0 , df = fs . delta_f , epoch = fs . epoch , copy = copy )
Convert a pycbc . types . frequencyseries . FrequencySeries into a FrequencySeries
22,398
def to_pycbc ( self , copy = True ) : from pycbc import types if self . epoch is None : epoch = None else : epoch = self . epoch . gps return types . FrequencySeries ( self . value , delta_f = self . df . to ( 'Hz' ) . value , epoch = epoch , copy = copy )
Convert this FrequencySeries into a ~pycbc . types . frequencyseries . FrequencySeries
22,399
def _fetch_losc_data_file ( url , * args , ** kwargs ) : cls = kwargs . pop ( 'cls' , TimeSeries ) cache = kwargs . pop ( 'cache' , None ) verbose = kwargs . pop ( 'verbose' , False ) if url . endswith ( '.gz' ) : ext = os . path . splitext ( url [ : - 3 ] ) [ - 1 ] else : ext = os . path . splitext ( url ) [ - 1 ] if ext == '.hdf5' : kwargs . setdefault ( 'format' , 'hdf5.losc' ) elif ext == '.txt' : kwargs . setdefault ( 'format' , 'ascii.losc' ) elif ext == '.gwf' : kwargs . setdefault ( 'format' , 'gwf' ) with _download_file ( url , cache , verbose = verbose ) as rem : if ext == ".gwf" and ( not args or args [ 0 ] is None ) : args = ( _gwf_channel ( rem , cls , kwargs . get ( "verbose" ) ) , ) if verbose : print ( 'Reading data...' , end = ' ' ) try : series = cls . read ( rem , * args , ** kwargs ) except Exception as exc : if verbose : print ( '' ) exc . args = ( "Failed to read LOSC data from %r: %s" % ( url , str ( exc ) ) , ) raise else : if ext == '.gwf' and isinstance ( series , StateVector ) : try : bits = { } for bit in str ( series . unit ) . split ( ) : a , b = bit . split ( ':' , 1 ) bits [ int ( a ) ] = b series . bits = bits series . override_unit ( '' ) except ( TypeError , ValueError ) : pass if verbose : print ( '[Done]' ) return series
Internal function for fetching a single LOSC file and returning a Series