bugged stringlengths 4 228k | fixed stringlengths 0 96.3M | __index_level_0__ int64 0 481k |
|---|---|---|
def populate_SkyLocTable(skyloctable,coinc,adt60,adt90,adt60dD60,adt90dD90,\ pt,grid_fname,skymap_fname=None): """ populate a row in a skyloctable """ row = skyloctable.RowType() row.end_time = coinc.time row.set_ifos(coinc.ifo_list) rhosquared = 0.0 for ifo in coinc.ifo_list: rhosquared += coinc.snr[ifo]*coinc.snr[if... | def populate_SkyLocTable(skyloctable,coinc,adt60,adt90,arank60,arank90,\ pt,grid_fname,skymap_fname=None): """ populate a row in a skyloctable """ row = skyloctable.RowType() row.end_time = coinc.time row.set_ifos(coinc.ifo_list) rhosquared = 0.0 for ifo in coinc.ifo_list: rhosquared += coinc.snr[ifo]*coinc.snr[ifo] r... | 479,700 |
def populate_SkyLocTable(skyloctable,coinc,adt60,adt90,adt60dD60,adt90dD90,\ pt,grid_fname,skymap_fname=None): """ populate a row in a skyloctable """ row = skyloctable.RowType() row.end_time = coinc.time row.set_ifos(coinc.ifo_list) rhosquared = 0.0 for ifo in coinc.ifo_list: rhosquared += coinc.snr[ifo]*coinc.snr[if... | def populate_SkyLocTable(skyloctable,coinc,adt60,adt90,adt60dD60,adt90dD90,\ pt,grid_fname,skymap_fname=None): """ populate a row in a skyloctable """ row = skyloctable.RowType() row.end_time = coinc.time row.set_ifos(coinc.ifo_list) rhosquared = 0.0 for ifo in coinc.ifo_list: rhosquared += coinc.snr[ifo]*coinc.snr[if... | 479,701 |
def __init__(self, *args): pipeline.LigolwAddNode.__init__(self, *args) self.input_cache = [] self.output_cache = [] self.cache_dir = os.path.join(os.getcwd(), self.job().cache_dir) | def __init__(self, job, remove_input, *args): pipeline.LigolwAddNode.__init__(self, job, *args) self.input_cache = [] self.output_cache = [] self.cache_dir = os.path.join(os.getcwd(), self.job().cache_dir) | 479,702 |
def add_preserve_cache(self, cache): for c in cache: self.add_var_arg("--remove-input-except %s" % c.path()) | def add_preserve_cache(self, cache): for c in cache: self.add_var_arg("--remove-input-except %s" % c.path()) | 479,703 |
def make_lladd_fragment(dag, parents, tag, segment = None, input_cache = None, preserve_cache = None, extra_input_cache = None): node = LigolwAddNode(lladdjob) # link to parents for parent in parents: node.add_parent(parent) # build input cache if input_cache is None: # default is to use all output files from parents... | def make_lladd_fragment(dag, parents, tag, segment = None, input_cache = None, remove_input = False, preserve_cache = None, extra_input_cache = None): node = LigolwAddNode(lladdjob, remove_input = remove_input) # link to parents for parent in parents: node.add_parent(parent) # build input cache if input_cache is None... | 479,704 |
def make_multibinj_fragment(dag, seg, tag): flow = float(powerjob.get_opts()["low-freq-cutoff"]) fhigh = flow + float(powerjob.get_opts()["bandwidth"]) nodes = make_binj_fragment(dag, seg, tag, 0.0, flow, fhigh) return make_lladd_fragment(dag, nodes, tag) | def make_multibinj_fragment(dag, seg, tag): flow = float(powerjob.get_opts()["low-freq-cutoff"]) fhigh = flow + float(powerjob.get_opts()["bandwidth"]) nodes = make_binj_fragment(dag, seg, tag, 0.0, flow, fhigh) return make_lladd_fragment(dag, nodes, tag, remove_input = True) | 479,705 |
def get_coincs_from_coire(self,files): """ uses CoincInspiralUtils to get data from old-style (coire'd) coincs """ coincTrigs = CoincInspiralUtils.coincInspiralTable() inspTrigs = SnglInspiralUtils.ReadSnglInspiralFromFiles(files, \ mangle_event_id = True,verbose=None) #note that it's hardcoded to use snr as the statis... | def get_coincs_from_coire(self,files): """ uses CoincInspiralUtils to get data from old-style (coire'd) coincs """ coincTrigs = CoincInspiralUtils.coincInspiralTable() inspTrigs = SnglInspiralUtils.ReadSnglInspiralFromFiles(files, \ mangle_event_id = True,verbose=None) #note that it's hardcoded to use snr as the statis... | 479,706 |
def gridsky(resolution): """ grid the sky up into roughly square regions resolution is the length of a side the points get placed at the center of the squares and to first order each square has an area of resolution^2 """ latitude = 0.0 longitude = pi ds = pi*sqrt(2.0)*resolution/180.0 points = [(latitude-0.5*pi, longi... | def gridsky(resolution): """ grid the sky up into roughly square regions resolution is the length of a side the points get placed at the center of the squares and to first order each square has an area of resolution^2 """ latitude = 0.0 longitude = pi ds = pi*resolution/180.0 points = [(latitude-0.5*pi, longitude)] whi... | 479,707 |
def map_grids(coarsegrid,finegrid,coarseres=4.0): """ takes the two grids (lists of lat/lon tuples) and returns a dictionary where the points in the coarse grid are the keys and lists of tuples of points in the fine grid are the values """ fgtemp = finegrid coarsedict = {} ds = coarseres*pi/180 for cpt in coarsegrid: f... | def map_grids(coarsegrid,finegrid,coarseres=4.0): """ takes the two grids (lists of lat/lon tuples) and returns a dictionary where the points in the coarse grid are the keys and lists of tuples of points in the fine grid are the values """ fgtemp = finegrid[:] coarsedict = {} ds = coarseres*pi/180 for cpt in coarsegrid... | 479,708 |
def map_grids(coarsegrid,finegrid,coarseres=4.0): """ takes the two grids (lists of lat/lon tuples) and returns a dictionary where the points in the coarse grid are the keys and lists of tuples of points in the fine grid are the values """ fgtemp = finegrid coarsedict = {} ds = coarseres*pi/180 for cpt in coarsegrid: f... | def map_grids(coarsegrid,finegrid,coarseres=4.0): """ takes the two grids (lists of lat/lon tuples) and returns a dictionary where the points in the coarse grid are the keys and lists of tuples of points in the fine grid are the values """ fgtemp = finegrid coarsedict = {} ds = coarseres*pi/180.0 for cpt in coarsegrid:... | 479,709 |
def map_grids(coarsegrid,finegrid,coarseres=4.0): """ takes the two grids (lists of lat/lon tuples) and returns a dictionary where the points in the coarse grid are the keys and lists of tuples of points in the fine grid are the values """ fgtemp = finegrid coarsedict = {} ds = coarseres*pi/180 for cpt in coarsegrid: f... | def map_grids(coarsegrid,finegrid,coarseres=4.0): """ takes the two grids (lists of lat/lon tuples) and returns a dictionary where the points in the coarse grid are the keys and lists of tuples of points in the fine grid are the values """ fgtemp = finegrid coarsedict = {} ds = coarseres*pi/180 for cpt in coarsegrid: f... | 479,710 |
def IMRpeakAmp(m1,m2,spin1z,spin2z,d): """ IMRpeakAmp finds the peak amplitude of the waveform for a given source parameters and the source distance. usage: IMRpeakAmp(m1,m2,spin1z,spin2z,distance) e.g. spawaveApp.IMRpeakAmp(30,40,0.45,0.5,100) """ chi = spawaveform.computechi(m1, m2, spin1z, spin2z) imrfFinal = spa... | def IMRpeakAmp(m1,m2,spin1z,spin2z,d): """ IMRpeakAmp finds the peak amplitude of the waveform for a given source parameters and the source distance. usage: IMRpeakAmp(m1,m2,spin1z,spin2z,distance) e.g. spawaveApp.IMRpeakAmp(30,40,0.45,0.5,100) """ chi = spawaveform.computechi(m1, m2, spin1z, spin2z) imrfFinal = spa... | 479,711 |
def add_noninjections(self, param_func, database, *args): # iterate over burst<-->burst coincs cursor = database.connection.cursor() for coinc_event_id, time_slide_id in database.connection.cursor().execute(""" | def add_noninjections(self, param_func, database, *args): # iterate over burst<-->burst coincs cursor = database.connection.cursor() for coinc_event_id, time_slide_id in database.connection.cursor().execute(""" | 479,712 |
def get_username(): """ Try to retrieve the username from a variety of sources. First the environment variable LOGNAME is tried, if that is not set the environment variable USERNAME is tried, if that is not set the password database is consulted (only on Unix systems, if the import of the pwd module succedes), finally... | def get_username(): """ Try to retrieve the username from a variety of sources. First the environment variable LOGNAME is tried, if that is not set the environment variable USERNAME is tried, if that is not set the password database is consulted (only on Unix systems, if the import of the pwd module succedes), finally... | 479,713 |
def __readCache__(self,cacheListing=list()): """ Simple mehtod to read in a cache or list of cache files and return a list of files or an empty list if nothing found """ #Open the cache entry and search for those entrys fileListing=list() for entry in cacheListing: #Cache files listed themselves comment out following l... | def __readCache__(self,cacheListing=list()): """ Simple mehtod to read in a cache or list of cache files and return a list of files or an empty list if nothing found. It uses the pathing information from the files passed via cacheListing to aid in our filesystem search. """ #Open the cache entry and search for those en... | 479,714 |
def __readCache__(self,cacheListing=list()): """ Simple mehtod to read in a cache or list of cache files and return a list of files or an empty list if nothing found """ #Open the cache entry and search for those entrys fileListing=list() for entry in cacheListing: #Cache files listed themselves comment out following l... | def __readCache__(self,cacheListing=list()): """ Simple mehtod to read in a cache or list of cache files and return a list of files or an empty list if nothing found """ #Open the cache entry and search for those entrys finalList=list() for entry in cacheListing: #Cache files listed themselves comment out following lin... | 479,715 |
def __readCache__(self,cacheListing=list()): """ Simple mehtod to read in a cache or list of cache files and return a list of files or an empty list if nothing found """ #Open the cache entry and search for those entrys fileListing=list() for entry in cacheListing: #Cache files listed themselves comment out following l... | def __readCache__(self,cacheListing=list()): """ Simple mehtod to read in a cache or list of cache files and return a list of files or an empty list if nothing found """ #Open the cache entry and search for those entrys fileListing=list() for entry in cacheListing: #Cache files listed themselves comment out following l... | 479,716 |
def get_analyzeQscan_SEIS(self): """ This seeks out the html and png files associated with SEIS result of an analyzeQscan job. """ cacheList=list() cacheFiles=list() for sngl in self.coinc.sngls: timeString=str(float(sngl.time)).replace(".","_") myCacheMask="*/%s-analyseQscan_%s_%s*_seis_rds*.cache"%(sngl.ifo,sngl.ifo,... | def get_analyzeQscan_SEIS(self): """ This seeks out the html and png files associated with SEIS result of an analyzeQscan job. """ cacheList=list() cacheFiles=list() for sngl in self.coinc.sngls: timeString=str(float(sngl.time)).replace(".","_") myCacheMask="*%s*/%s-analyseQscan_%s_%s*_seis_rds*.cache"%\ (self.coinc.ty... | 479,717 |
def get_analyzeQscan_RDS(self): """ """ #analyseQscan.py_FG_RDS_full_data/H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.cache cacheList=list() cacheFiles=list() for sngl in self.coinc.sngls: timeString=str(float(sngl.time)).replace(".","_") myCacheMask="*/%s-analyseQscan_%s_%s_rds*.cache"%(sngl.ifo,sngl.ifo,... | def get_analyzeQscan_RDS(self): """ """ #analyseQscan.py_FG_RDS_full_data/H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.cache cacheList=list() cacheFiles=list() for sngl in self.coinc.sngls: timeString=str(float(sngl.time)).replace(".","_") myCacheMask="*%s*/%s-analyseQscan_%s_%s_rds*.cache"%\ (self.coint.ty... | 479,718 |
def get_analyzeQscan_HT(self): """ """ #analyseQscan.py_FG_HT_full_data/H1-analyseQscan_H1_931176926_116_ht-unspecified-gpstime.cache cacheList=list() cacheFiles=list() for sngl in self.coinc.sngls: timeString=str(float(sngl.time)).replace(".","_") myCacheMask="*/%s-analyseQscan_%s_%s*_ht*.cache"%(sngl.ifo,sngl.ifo,tim... | def get_analyzeQscan_HT(self): """ """ #analyseQscan.py_FG_HT_full_data/H1-analyseQscan_H1_931176926_116_ht-unspecified-gpstime.cache cacheList=list() cacheFiles=list() for sngl in self.coinc.sngls: timeString=str(float(sngl.time)).replace(".","_") myCacheMask="*%s*/%s-analyseQscan_%s_%s*_ht*.cache"\ %(self.coinc.type,... | 479,719 |
def insertAnalyzeQscanTable(self, images=None, thumbs=None, indexes=None, ranksOmega=None, imagesAQ=None, thumbsAQ=None, indexesAQ=None, ranksAQ=None): """ Insert a multiple IFO table with 5 cols with the AQ underneath this depends on the numer of IFO keys in indexes dictionary. The option channelRanks is not required ... | def insertAnalyzeQscanTable(self, images=None, thumbs=None, indexes=None, ranksOmega=None, imagesAQ=None, thumbsAQ=None, indexesAQ=None, ranksAQ=None): """ Insert a multiple IFO table with 5 cols with the AQ underneath this depends on the numer of IFO keys in indexes dictionary. The option channelRanks is not required ... | 479,720 |
def resetPicklePointer(self,filename=None): """ If you called the class definition with the wrong pickle path. You can reset it with this method. """ if filename==None: os.stdout.write("Path information to background pickle unchanged.\n") elif filename.__contains__("~"): self.__backgroundPickle__=os.path.expanduser(fil... | def resetPicklePointer(self,filename=None): """ If you called the class definition with the wrong pickle path. You can reset it with this method. """ if filename==None: sys.stdout.write("Path information to background pickle unchanged.\n") elif filename.__contains__("~"): self.__backgroundPickle__=os.path.expanduser(fi... | 479,721 |
def fetchInformationDualWindow(self,triggerTime=None,frontWindow=300,\ backWindow=150,ifoList='DEFAULT'): """ This method is responsible for queries to the data server. The results of the query become an internal list that can be converted into an HTML table. The arguments allow you to query with trigger time of inte... | def fetchInformationDualWindow(self,triggerTime=None,frontWindow=300,\ backWindow=150,ifoList='DEFAULT'): """ This method is responsible for queries to the data server. The results of the query become an internal list that can be converted into an HTML table. The arguments allow you to query with trigger time of inte... | 479,722 |
def createDQbackground(self,ifoEpochList=list(),pickleLocale=None): """ Two inputs a list of tuples (ifo,epochname) for each instrument. Also a place to save the potential pickle to for quick access later. """ if type(ifoEpochList) != type(list()): raise Exception, \ "Invalid input argument ifoEpochList,%s type(%s)"... | def createDQbackground(self,ifoEpochList=list(),pickleLocale=None): """ Two inputs a list of tuples (ifo,epochname) for each instrument. Also a place to save the potential pickle to for quick access later. """ if type(ifoEpochList) != type(list()): raise Exception, \ "Invalid input argument ifoEpochList,%s type(%s)"... | 479,723 |
def createDQbackground(self,ifoEpochList=list(),pickleLocale=None): """ Two inputs a list of tuples (ifo,epochname) for each instrument. Also a place to save the potential pickle to for quick access later. """ if type(ifoEpochList) != type(list()): raise Exception, \ "Invalid input argument ifoEpochList,%s type(%s)"... | def createDQbackground(self,ifoEpochList=list(),pickleLocale=None): """ Two inputs a list of tuples (ifo,epochname) for each instrument. Also a place to save the potential pickle to for quick access later. """ if type(ifoEpochList) != type(list()): raise Exception, \ "Invalid input argument ifoEpochList,%s type(%s)"... | 479,724 |
def append_process(xmldoc, comment = None, force = None, ds_sq_threshold = None, save_small_coincs = None, vetoes_name = None, verbose = None): process = llwapp.append_process(xmldoc, program = process_program_name, version = __version__, cvs_repository = u"lscsoft", cvs_entry_time = __date__, comment = comment) param... | def append_process(xmldoc, comment = None, force = None, ds_sq_threshold = None, save_small_coincs = None, vetoes_name = None, coinc_end_time_segment = None, verbose = None): process = llwapp.append_process(xmldoc, program = process_program_name, version = __version__, cvs_repository = u"lscsoft", cvs_entry_time = __da... | 479,725 |
def append_coinc(self, process_id, time_slide_id, coinc_def_id, events): # # populate the coinc_event and coinc_event_map tables # | def append_coinc(self, process_id, node, coinc_def_id, events): # # populate the coinc_event and coinc_event_map tables # | 479,726 |
def append_coinc(self, process_id, time_slide_id, coinc_def_id, events): # # populate the coinc_event and coinc_event_map tables # | def time_slide_id = node.time_slide_id append_coinc(self, time_slide_id = node.time_slide_id process_id, time_slide_id = node.time_slide_id time_slide_id, time_slide_id = node.time_slide_id coinc_def_id, time_slide_id = node.time_slide_id events): time_slide_id = node.time_slide_id # time_slide_id = node.time_sli... | 479,727 |
def append_coinc(self, process_id, time_slide_id, coinc_def_id, events): # # populate the coinc_event and coinc_event_map tables # | def append_coinc(self, process_id, time_slide_id, coinc_def_id, events): # # populate the coinc_event and coinc_event_map tables # | 479,728 |
def append_coinc(self, process_id, time_slide_id, coinc_def_id, events): # # populate the coinc_event and coinc_event_map tables # | def append_coinc(self, process_id, time_slide_id, coinc_def_id, events): # # populate the coinc_event and coinc_event_map tables # | 479,729 |
def ligolw_rinca( xmldoc, process_id, EventListType, CoincTables, coinc_definer_row, event_comparefunc, thresholds, ntuple_comparefunc = lambda events, offset_vector: False, small_coincs = False, veto_segments = None, verbose = False | def ligolw_rinca( xmldoc, process_id, EventListType, CoincTables, coinc_definer_row, event_comparefunc, thresholds, ntuple_comparefunc = lambda events, offset_vector: False, small_coincs = False, veto_segments = None, verbose = False | 479,730 |
def ligolw_rinca( xmldoc, process_id, EventListType, CoincTables, coinc_definer_row, event_comparefunc, thresholds, ntuple_comparefunc = lambda events, offset_vector: False, small_coincs = False, veto_segments = None, verbose = False | def ligolw_rinca( xmldoc, process_id, EventListType, CoincTables, coinc_definer_row, event_comparefunc, thresholds, ntuple_comparefunc = lambda events, offset_vector: False, small_coincs = False, veto_segments = None, verbose = False | 479,731 |
def write_abstract_dag(self): """ Write all the nodes in the workflow to the DAX file. """ if not self.__dax_file_path: # this workflow is not dax-compatible, so don't write a dax return try: dagfile = open( self.__dax_file_path, 'w' ) except: raise CondorDAGError, "Cannot open file " + self.__dag_file_path | def write_abstract_dag(self): """ Write all the nodes in the workflow to the DAX file. """ if not self.__dax_file_path: # this workflow is not dax-compatible, so don't write a dax return try: dagfile = open( self.__dax_file_path, 'w' ) except: raise CondorDAGError, "Cannot open file " + self.__dag_file_path | 479,732 |
def write_abstract_dag(self): """ Write all the nodes in the workflow to the DAX file. """ if not self.__dax_file_path: # this workflow is not dax-compatible, so don't write a dax return try: dagfile = open( self.__dax_file_path, 'w' ) except: raise CondorDAGError, "Cannot open file " + self.__dag_file_path | def write_abstract_dag(self): """ Write all the nodes in the workflow to the DAX file. """ if not self.__dax_file_path: # this workflow is not dax-compatible, so don't write a dax return try: dagfile = open( self.__dax_file_path, 'w' ) except: raise CondorDAGError, "Cannot open file " + self.__dag_file_path | 479,733 |
def write_abstract_dag(self): """ Write all the nodes in the workflow to the DAX file. """ if not self.__dax_file_path: # this workflow is not dax-compatible, so don't write a dax return try: dagfile = open( self.__dax_file_path, 'w' ) except: raise CondorDAGError, "Cannot open file " + self.__dag_file_path | def write_abstract_dag(self): """ Write all the nodes in the workflow to the DAX file. """ if not self.__dax_file_path: # this workflow is not dax-compatible, so don't write a dax return try: dagfile = open( self.__dax_file_path, 'w' ) except: raise CondorDAGError, "Cannot open file " + self.__dag_file_path | 479,734 |
def write_abstract_dag(self): """ Write all the nodes in the workflow to the DAX file. """ if not self.__dax_file_path: # this workflow is not dax-compatible, so don't write a dax return try: dagfile = open( self.__dax_file_path, 'w' ) except: raise CondorDAGError, "Cannot open file " + self.__dag_file_path | def write_abstract_dag(self): """ Write all the nodes in the workflow to the DAX file. """ if not self.__dax_file_path: # this workflow is not dax-compatible, so don't write a dax return try: dagfile = open( self.__dax_file_path, 'w' ) except: raise CondorDAGError, "Cannot open file " + self.__dag_file_path | 479,735 |
def write_abstract_dag(self): """ Write all the nodes in the workflow to the DAX file. """ if not self.__dax_file_path: # this workflow is not dax-compatible, so don't write a dax return try: dagfile = open( self.__dax_file_path, 'w' ) except: raise CondorDAGError, "Cannot open file " + self.__dag_file_path | def write_abstract_dag(self): """ Write all the nodes in the workflow to the DAX file. """ if not self.__dax_file_path: # this workflow is not dax-compatible, so don't write a dax return try: dagfile = open( self.__dax_file_path, 'w' ) except: raise CondorDAGError, "Cannot open file " + self.__dag_file_path | 479,736 |
def update_ids(connection, verbose = False): """ For internal use only. """ table_elems = dbtables.get_xml(connection).getElementsByTagName(ligolw.Table.tagName) for i, tbl in enumerate(table_elems): if verbose: print >>sys.stderr, "updating IDs: %d%%\r" % (100.0 * i / len(table_elems)), tbl.applyKeyMapping() if verbos... | def update_ids(connection, xmldoc=None, verbose = False): """ For internal use only. """ table_elems = dbtables.get_xml(connection).getElementsByTagName(ligolw.Table.tagName) for i, tbl in enumerate(table_elems): if verbose: print >>sys.stderr, "updating IDs: %d%%\r" % (100.0 * i / len(table_elems)), tbl.applyKeyMappin... | 479,737 |
def update_ids(connection, verbose = False): """ For internal use only. """ table_elems = dbtables.get_xml(connection).getElementsByTagName(ligolw.Table.tagName) for i, tbl in enumerate(table_elems): if verbose: print >>sys.stderr, "updating IDs: %d%%\r" % (100.0 * i / len(table_elems)), tbl.applyKeyMapping() if verbos... | def update_ids(connection, verbose = False): """ For internal use only. """ if xmldoc: table_elems = xmldoc.getElementsByTagName(ligolw.Table.tagName) else: table_elems = dbtables.get_xml(connection).getElementsByTagName(ligolw.Table.tagName) for i, tbl in enumerate(table_elems): if verbose: print >>sys.stderr, "updati... | 479,738 |
def insert_from_url(connection, url, preserve_ids = False, verbose = False): """ Parse and insert the LIGO Light Weight document at the URL into the database the at the given connection. """ # # load document. this process inserts the document's contents into # the database. the document is unlinked to delete databas... | def insert_from_url(connection, url, preserve_ids = False, verbose = False): """ Parse and insert the LIGO Light Weight document at the URL into the database the at the given connection. """ # # load document. this process inserts the document's contents into # the database. the document is unlinked to delete databas... | 479,739 |
def insert_from_url(connection, url, preserve_ids = False, verbose = False): """ Parse and insert the LIGO Light Weight document at the URL into the database the at the given connection. """ # # load document. this process inserts the document's contents into # the database. the document is unlinked to delete databas... | def insert_from_url(connection, url, preserve_ids = False, verbose = False): """ Parse and insert the LIGO Light Weight document at the URL into the database the at the given connection. """ # # load document. this process inserts the document's contents into # the database. the document is unlinked to delete databas... | 479,740 |
def insert_from_xmldoc(connection, xmldoc, preserve_ids = False, verbose = False): """ Insert the tables from an in-ram XML document into the database at the given connection. """ # # iterate over tables in the XML tree, reconstructing each inside # the database # for tbl in xmldoc.getElementsByTagName(ligolw.Table.ta... | defconnection.commit() insert_from_xmldoc(connection,connection.commit() xmldoc,connection.commit() preserve_idsconnection.commit() =connection.commit() False,connection.commit() verboseconnection.commit() =connection.commit() False):connection.commit() """connection.commit() Insertconnection.commit() theconnection.com... | 479,741 |
def insert_from_xmldoc(connection, xmldoc, preserve_ids = False, verbose = False): """ Insert the tables from an in-ram XML document into the database at the given connection. """ # # iterate over tables in the XML tree, reconstructing each inside # the database # for tbl in xmldoc.getElementsByTagName(ligolw.Table.ta... | def insert_from_xmldoc(connection, xmldoc, preserve_ids = False, verbose = False): """ Insert the tables from an in-ram XML document into the database at the given connection. """ # # iterate over tables in the XML tree, reconstructing each inside # the database # for tbl in xmldoc.getElementsByTagName(ligolw.Table.ta... | 479,742 |
def directional_horizon(ifos, RA, dec, gps_time, horizons=None): """ Return a dictionary of sensitivity numbers for each detector, based on a known sky location and an optional input dictionary of inspiral horizon distances for a reference source of the user's choice. If the horizons dictionary is specified, the return... | def directional_horizon(ifos, RA, dec, gps_time, horizons=None): """ Return a dictionary of sensitivity numbers for each detector, based on a known sky location and an optional input dictionary of inspiral horizon distances for a reference source of the user's choice. If the horizons dictionary is specified, the return... | 479,743 |
def detector_thresholds(horizons,min_threshold,max_threshold=7.5): """ Return a set of detector thresholds adjusted for a particular set of inspiral horizon distances (calculated with directional_horizon). The min_threshold specified the minimum threshold which will be set for all detectors less sensitive than the best... | def detector_thresholds(horizons,min_threshold,max_threshold=7.5): """ Return a set of detector thresholds adjusted for a particular set of inspiral horizon distances (calculated with directional_horizon). The min_threshold specified the minimum threshold which will be set for all detectors less sensitive than the best... | 479,744 |
def fetchInformation(self,triggerTime=None,window=300): """ Wrapper for fetchInformationDualWindow that mimics original behavior """ return self.fetchInformationDualWindow(triggerTime,window,window,ifoList='DEFAULT') | deffetchInformation(self,triggerTime=None,window=300):"""WrapperforfetchInformationDualWindowthatmimicsoriginalbehavior"""returnself.fetchInformationDualWindow(triggerTime,window,window,ifoList='DEFAULT') | 479,745 |
def createDQbackground(self,ifoEpochList=list(),pickleLocale=None): """ Two inputs a list of tuples (ifo,epochname) for each instrument. Also a place to save the potential pickle to for quick access later. """ if type(ifoEpochList) != type(list()): raise Exception, \ "Invalid input argument ifoEpochList,%s type(%s)"\ %... | def createDQbackground(self,ifoEpochList=list(),pickleLocale=None): """ Two inputs a list of tuples (ifo,epochname) for each instrument. Also a place to save the potential pickle to for quick access later. """ if type(ifoEpochList) != type(list()): raise Exception, \ "Invalid input argument ifoEpochList,%s type(%s)"... | 479,746 |
def createDQbackground(self,ifoEpochList=list(),pickleLocale=None): """ Two inputs a list of tuples (ifo,epochname) for each instrument. Also a place to save the potential pickle to for quick access later. """ if type(ifoEpochList) != type(list()): raise Exception, \ "Invalid input argument ifoEpochList,%s type(%s)"\ %... | def createDQbackground(self,ifoEpochList=list(),pickleLocale=None): """ Two inputs a list of tuples (ifo,epochname) for each instrument. Also a place to save the potential pickle to for quick access later. """ if type(ifoEpochList) != type(list()): raise Exception, \ "Invalid input argument ifoEpochList,%s type(%s)"\ %... | 479,747 |
def createDQbackground(self,ifoEpochList=list(),pickleLocale=None): """ Two inputs a list of tuples (ifo,epochname) for each instrument. Also a place to save the potential pickle to for quick access later. """ if type(ifoEpochList) != type(list()): raise Exception, \ "Invalid input argument ifoEpochList,%s type(%s)"\ %... | def createDQbackground(self,ifoEpochList=list(),pickleLocale=None): """ Two inputs a list of tuples (ifo,epochname) for each instrument. Also a place to save the potential pickle to for quick access later. """ if type(ifoEpochList) != type(list()): raise Exception, \ "Invalid input argument ifoEpochList,%s type(%s)"\ %... | 479,748 |
def estimateDQbackground(self): """ This method looks at the self.resultlist inside the instance. Using this and 1000 generated time stamp it tabulates a ranking of flag prevelance, binomial probability 'p' """ if len(self.resultList) < 1: self.__backgroundResults__=list() self.__backgroundTimesDict__=dict() self.__bac... | def estimateDQbackground(self): """ This method looks at the self.resultlist inside the instance. Using this and 1000 generated time stamp it tabulates a ranking of flag prevelance, binomial probability 'p' """ if len(self.resultList) < 1: self.__backgroundResults__=list() self.__backgroundTimesDict__=dict() self.__bac... | 479,749 |
def estimateDQbackground(self): """ This method looks at the self.resultlist inside the instance. Using this and 1000 generated time stamp it tabulates a ranking of flag prevelance, binomial probability 'p' """ if len(self.resultList) < 1: self.__backgroundResults__=list() self.__backgroundTimesDict__=dict() self.__bac... | def estimateDQbackground(self): """ This method looks at the self.resultlist inside the instance. Using this and 1000 generated time stamp it tabulates a ranking of flag prevelance, binomial probability 'p' """ if len(self.resultList) < 1: self.__backgroundResults__=list() self.__backgroundTimesDict__=dict() self.__bac... | 479,750 |
def estimateDQbackground(self): """ This method looks at the self.resultlist inside the instance. Using this and 1000 generated time stamp it tabulates a ranking of flag prevelance, binomial probability 'p' """ if len(self.resultList) < 1: self.__backgroundResults__=list() self.__backgroundTimesDict__=dict() self.__bac... | def estimateDQbackground(self): """ This method looks at the self.resultlist inside the instance. Using this and 1000 generated time stamp it tabulates a ranking of flag prevelance, binomial probability 'p' """ if len(self.resultList) < 1: self.__backgroundResults__=list() self.__backgroundTimesDict__=dict() self.__bac... | 479,751 |
def estimateDQbackground(self): """ This method looks at the self.resultlist inside the instance. Using this and 1000 generated time stamp it tabulates a ranking of flag prevelance, binomial probability 'p' """ if len(self.resultList) < 1: self.__backgroundResults__=list() self.__backgroundTimesDict__=dict() self.__bac... | def estimateDQbackground(self): """ This method looks at the self.resultlist inside the instance. Using this and 1000 generated time stamp it tabulates a ranking of flag prevelance, binomial probability 'p' """ if len(self.resultList) < 1: self.__backgroundResults__=list() self.__backgroundTimesDict__=dict() self.__bac... | 479,752 |
def generateHTMLTable(self,tableType="BOTH"): """ Return a HTML table already formatted using the module MARKUP to keep the HTML tags complient. This method does nothing but return the result of the last call to self.fetchInformation() The flag names associated with LIGO will have links to the channel wiki in them als... | def generateHTMLTable(self,tableType="BOTH"): """ Return a HTML table already formatted using the module MARKUP to keep the HTML tags complient. This method does nothing but return the result of the last call to self.fetchInformation() The flag names associated with LIGO will have links to the channel wiki in them als... | 479,753 |
def generateHTMLTable(self,tableType="BOTH"): """ Return a HTML table already formatted using the module MARKUP to keep the HTML tags complient. This method does nothing but return the result of the last call to self.fetchInformation() The flag names associated with LIGO will have links to the channel wiki in them als... | def generateHTMLTable(self,tableType="BOTH"): """ Return a HTML table already formatted using the module MARKUP to keep the HTML tags complient. This method does nothing but return the result of the last call to self.fetchInformation() The flag names associated with LIGO will have links to the channel wiki in them als... | 479,754 |
def generateMOINMOINTable(self,tableType="BOTH"): """ Return a MOINMOIN table. """ ligo=["L1","H1","H2","V1"] channelWiki="https://ldas-jobs.ligo.caltech.edu/cgi-bin/chanwiki?%s" if self.triggerTime==int(-1): return "" myColor="grey" tableString="" titleString="" emptyRowString="" rowString="" for i,col in enumerate(se... | def generateMOINMOINTable(self,tableType="BOTH"): """ Return a MOINMOIN table. """ ligo=["L1","H1","H2","V1"] channelWiki="https://ldas-jobs.ligo.caltech.edu/cgi-bin/chanwiki?%s" if self.triggerTime==int(-1): return "" myColor="grey" tableString="" titleString="" emptyRowString="" rowString="" for i,col in enumerate(se... | 479,755 |
def run(self): # remove the automatically generated user env scripts for script in ["pylal-user-env.sh", "pylal-user-env.csh"]: log.info("removing " + script ) try: os.unlink(os.path.join("etc", script)) except: pass | def run(self): # remove the automatically generated user env scripts for script in ["pylal-user-env.sh", "pylal-user-env.csh"]: log.info("removing " + script ) try: os.unlink(os.path.join("etc", script)) except: pass | 479,756 |
def setup(target, check_same_thread=True): connection = sqlite3.connect(target, check_same_thread=check_same_thread) dbtables.DBTable_set_connection(connection) for tbl in dbtables.get_xml(connection).getElementsByTagName(ligolw.Table.tagName): tbl.sync_next_id() return connection | def setup(target, check_same_thread=True): connection = sqlite3.connect(target, check_same_thread=check_same_thread) dbtables.DBTable_set_connection(connection) dbtables.idmap_sync(connection) return connection | 479,757 |
def update_ids(xmldoc, connection, verbose = False): """ For internal use only. """ table_elems = xmldoc.getElementsByTagName(ligolw.Table.tagName) for i, tbl in enumerate(table_elems): if verbose: print >>sys.stderr, "updating IDs: %d%%\r" % (100 * i / len(table_elems)), tbl.applyKeyMapping() if verbose: print >>sys.s... | def update_ids(connection, verbose = False): """ For internal use only. """ table_elems = xmldoc.getElementsByTagName(ligolw.Table.tagName) for i, tbl in enumerate(table_elems): if verbose: print >>sys.stderr, "updating IDs: %d%%\r" % (100 * i / len(table_elems)), tbl.applyKeyMapping() if verbose: print >>sys.stderr, ... | 479,758 |
def update_ids(xmldoc, connection, verbose = False): """ For internal use only. """ table_elems = xmldoc.getElementsByTagName(ligolw.Table.tagName) for i, tbl in enumerate(table_elems): if verbose: print >>sys.stderr, "updating IDs: %d%%\r" % (100 * i / len(table_elems)), tbl.applyKeyMapping() if verbose: print >>sys.s... | def update_ids(xmldoc, connection, verbose = False): """ For internal use only. """ table_elems = dbtables.get_xml(connection).getElementsByTagName(ligolw.Table.tagName) for i, tbl in enumerate(table_elems): if verbose: print >>sys.stderr, "updating IDs: %d%%\r" % (100 * i / len(table_elems)), tbl.applyKeyMapping() if ... | 479,759 |
def update_ids(xmldoc, connection, verbose = False): """ For internal use only. """ table_elems = xmldoc.getElementsByTagName(ligolw.Table.tagName) for i, tbl in enumerate(table_elems): if verbose: print >>sys.stderr, "updating IDs: %d%%\r" % (100 * i / len(table_elems)), tbl.applyKeyMapping() if verbose: print >>sys.s... | def update_ids(xmldoc, connection, verbose = False): """ For internal use only. """ table_elems = xmldoc.getElementsByTagName(ligolw.Table.tagName) for i, tbl in enumerate(table_elems): if verbose: print >>sys.stderr, "updating IDs: %d%%\r" % (100.0 * i / len(table_elems)), tbl.applyKeyMapping() if verbose: print >>sys... | 479,760 |
def insert_from_urls(connection, urls, preserve_ids = False, verbose = False): """ Iterate over a sequence of URLs and parse and insert each one into the database the dbtables.DBTable class is currently connected to. """ orig_DBTable_append = dbtables.DBTable.append if not preserve_ids: # enable ID remapping dbtables.i... | def insert_from_urls(connection, urls, preserve_ids = False, verbose = False): """ Iterate over a sequence of URLs, calling insert_from_url() on each, then build the indexes indicated by the metadata in lsctables.py. """ orig_DBTable_append = dbtables.DBTable.append if not preserve_ids: # enable ID remapping dbtabl... | 479,761 |
def insert_from_urls(connection, urls, preserve_ids = False, verbose = False): """ Iterate over a sequence of URLs and parse and insert each one into the database the dbtables.DBTable class is currently connected to. """ orig_DBTable_append = dbtables.DBTable.append if not preserve_ids: # enable ID remapping dbtables.i... | definsert_from_urls(connection,urls,preserve_ids=False,verbose=False):"""IterateoverasequenceofURLsandparseandinserteachoneintothedatabasethedbtables.DBTableclassiscurrentlyconnectedto."""orig_DBTable_append=dbtables.DBTable.appendifnotpreserve_ids:#enableIDremappingdbtables.idmap_create(connection)dbtables.DBTable.app... | 479,762 |
def insert_from_urls(connection, urls, preserve_ids = False, verbose = False): """ Iterate over a sequence of URLs and parse and insert each one into the database the dbtables.DBTable class is currently connected to. """ orig_DBTable_append = dbtables.DBTable.append if not preserve_ids: # enable ID remapping dbtables.i... | definsert_from_urls(connection,urls,preserve_ids=False,verbose=False):"""IterateoverasequenceofURLsandparseandinserteachoneintothedatabasethedbtables.DBTableclassiscurrentlyconnectedto."""orig_DBTable_append=dbtables.DBTable.appendifnotpreserve_ids:#enableIDremappingdbtables.idmap_create(connection)dbtables.DBTable.app... | 479,763 |
def insert_from_urls(connection, urls, preserve_ids = False, verbose = False): """ Iterate over a sequence of URLs and parse and insert each one into the database the dbtables.DBTable class is currently connected to. """ orig_DBTable_append = dbtables.DBTable.append if not preserve_ids: # enable ID remapping dbtables.i... | def insert_from_urls(connection, urls, preserve_ids = False, verbose = False): """ Iterate over a sequence of URLs and parse and insert each one into the database the dbtables.DBTable class is currently connected to. """ orig_DBTable_append = dbtables.DBTable.append if not preserve_ids: # enable ID remapping dbtables.i... | 479,764 |
def insert_from_urls(connection, urls, preserve_ids = False, verbose = False): """ Iterate over a sequence of URLs and parse and insert each one into the database the dbtables.DBTable class is currently connected to. """ orig_DBTable_append = dbtables.DBTable.append if not preserve_ids: # enable ID remapping dbtables.i... | def insert_from_urls(connection, urls, preserve_ids = False, verbose = False): """ Iterate over a sequence of URLs and parse and insert each one into the database the dbtables.DBTable class is currently connected to. """ orig_DBTable_append = dbtables.DBTable.append if not preserve_ids: # enable ID remapping dbtables.i... | 479,765 |
def insert_from_urls(connection, urls, preserve_ids = False, verbose = False): """ Iterate over a sequence of URLs and parse and insert each one into the database the dbtables.DBTable class is currently connected to. """ orig_DBTable_append = dbtables.DBTable.append if not preserve_ids: # enable ID remapping dbtables.i... | def insert_from_urls(connection, urls, preserve_ids = False, verbose = False): """ Iterate over a sequence of URLs and parse and insert each one into the database the dbtables.DBTable class is currently connected to. """ orig_DBTable_append = dbtables.DBTable.append if not preserve_ids: # enable ID remapping dbtables.i... | 479,766 |
def cbcBayesSkyRes(outdir,data,oneDMenu,twoDGreedyMenu,GreedyRes,confidence_levels,twoDplots,injfile=None,eventnum=None,skyres=None): if eventnum is not None and injfile is None: print "You specified an event number but no injection file. Ignoring!" if data is None: print 'You must specify an input data file' exit(1)... | def cbcBayesSkyRes(outdir,data,oneDMenu,twoDGreedyMenu,GreedyRes,confidence_levels,twoDplots,injfile=None,eventnum=None,skyres=None): if eventnum is not None and injfile is None: print "You specified an event number but no injection file. Ignoring!" if data is None: print 'You must specify an input data file' exit(1)... | 479,767 |
def cbcBayesSkyRes(outdir,data,oneDMenu,twoDGreedyMenu,GreedyRes,confidence_levels,twoDplots,injfile=None,eventnum=None,skyres=None): if eventnum is not None and injfile is None: print "You specified an event number but no injection file. Ignoring!" if data is None: print 'You must specify an input data file' exit(1)... | def cbcBayesSkyRes(outdir,data,oneDMenu,twoDGreedyMenu,GreedyRes,confidence_levels,twoDplots,injfile=None,eventnum=None,skyres=None): if eventnum is not None and injfile is None: print "You specified an event number but no injection file. Ignoring!" if data is None: print 'You must specify an input data file' exit(1)... | 479,768 |
def cbcBayesSkyRes(outdir,data,oneDMenu,twoDGreedyMenu,GreedyRes,confidence_levels,twoDplots,injfile=None,eventnum=None,skyres=None): if eventnum is not None and injfile is None: print "You specified an event number but no injection file. Ignoring!" if data is None: print 'You must specify an input data file' exit(1)... | def cbcBayesSkyRes(outdir,data,oneDMenu,twoDGreedyMenu,GreedyRes,confidence_levels,twoDplots,injfile=None,eventnum=None,skyres=None): if eventnum is not None and injfile is None: print "You specified an event number but no injection file. Ignoring!" if data is None: print 'You must specify an input data file' exit(1)... | 479,769 |
def cbcBayesSkyRes(outdir,data,oneDMenu,twoDGreedyMenu,GreedyRes,confidence_levels,twoDplots,injfile=None,eventnum=None,skyres=None): if eventnum is not None and injfile is None: print "You specified an event number but no injection file. Ignoring!" if data is None: print 'You must specify an input data file' exit(1)... | def cbcBayesSkyRes(outdir,data,oneDMenu,twoDGreedyMenu,GreedyRes,confidence_levels,twoDplots,injfile=None,eventnum=None,skyres=None): if eventnum is not None and injfile is None: print "You specified an event number but no injection file. Ignoring!" if data is None: print 'You must specify an input data file' exit(1)... | 479,770 |
def cbcBayesSkyRes(outdir,data,oneDMenu,twoDGreedyMenu,GreedyRes,confidence_levels,twoDplots,injfile=None,eventnum=None,skyres=None): if eventnum is not None and injfile is None: print "You specified an event number but no injection file. Ignoring!" if data is None: print 'You must specify an input data file' exit(1)... | def cbcBayesSkyRes(outdir,data,oneDMenu,twoDGreedyMenu,GreedyRes,confidence_levels,twoDplots,injfile=None,eventnum=None,skyres=None): if eventnum is not None and injfile is None: print "You specified an event number but no injection file. Ignoring!" if data is None: print 'You must specify an input data file' exit(1)... | 479,771 |
def cbcBayesSkyRes(outdir,data,oneDMenu,twoDGreedyMenu,GreedyRes,confidence_levels,twoDplots,injfile=None,eventnum=None,skyres=None): if eventnum is not None and injfile is None: print "You specified an event number but no injection file. Ignoring!" if data is None: print 'You must specify an input data file' exit(1)... | def cbcBayesSkyRes(outdir,data,oneDMenu,twoDGreedyMenu,GreedyRes,confidence_levels,twoDplots,injfile=None,eventnum=None,skyres=None): if eventnum is not None and injfile is None: print "You specified an event number but no injection file. Ignoring!" if data is None: print 'You must specify an input data file' exit(1)... | 479,772 |
def write_abstract_dag(self): """ Write all the nodes in the workflow to the DAX file. """ if not self.__dax_file_path: # this workflow is not dax-compatible, so don't write a dax return try: dagfile = open( self.__dax_file_path, 'w' ) except: raise CondorDAGError, "Cannot open file " + self.__dag_file_path | def write_abstract_dag(self): """ Write all the nodes in the workflow to the DAX file. """ if not self.__dax_file_path: # this workflow is not dax-compatible, so don't write a dax return try: dagfile = open( self.__dax_file_path, 'w' ) except: raise CondorDAGError, "Cannot open file " + self.__dag_file_path | 479,773 |
def populate_SkyLocInjTable(skylocinjtable,coinc,dt_area,rank_area, \ dtrss_inj,dDrss_inj): """ given an instance of skypoints populate and return skylocinjtable """ row = skylocinjtable.RowType() row.end_time = coinc.time rhosquared = 0.0 for ifo in coinc.ifo_list: rhosquared += coinc.snr[ifo]*coinc.snr[ifo] row.comb... | def populate_SkyLocInjTable(skylocinjtable,coinc,dt_area,rank_area, \ dtrss_inj,dDrss_inj): """ record injection data in a skylocinjtable """ row = skylocinjtable.RowType() row.end_time = coinc.time rhosquared = 0.0 for ifo in coinc.ifo_list: rhosquared += coinc.snr[ifo]*coinc.snr[ifo] row.comb_snr = sqrt(rhosquared) ... | 479,774 |
def skyhist_cart(skycarts,samples): """ Histogram the list of samples into bins defined by Cartesian vectors in skycarts """ dot=numpy.dot N=len(skycarts) print 'operating on %d sky points'%(N) bins=zeros(N) for sample in samples: sampcart=pol2cart(sample[RAdim],sample[decdim]) maxdx=max(xrange(0,N),key=lambda i:dot(sa... | def skyhist_cart(skycarts,samples): """ Histogram the list of samples into bins defined by Cartesian vectors in skycarts """ dot=numpy.dot N=len(skycarts) print 'operating on %d sky points'%(N) bins=zeros(N) for sample in samples: sampcart=pol2cart(sample[RAdim],sample[decdim]) maxdot=0 for i in range(0,N): thisdot=dot... | 479,775 |
def __init__(self,dag,job,cp,opts): pipeline.CondorDAGNode.__init__(self,job) #Specify pipe location self.add_var_opt('followup-directory',cp.get("makeCheckListWiki", "location").strip()) #Specify pipe ini file self.add_var_opt('ini-file',cp.get("makeCheckListWiki", "ini-file").strip()) if not opts.disable_dag_categori... | def __init__(self,dag,job,cp,opts): pipeline.CondorDAGNode.__init__(self,job) #Specify pipe location self.add_var_opt('followup-directory',cp.get("makeCheckListWiki", "location").strip()) #Specify pipe ini file self.add_var_opt('ini-file',cp.get("makeCheckListWiki", "ini-file").strip()) if not opts.disable_dag_categori... | 479,776 |
def setup_distrib_script(self,tag_base): distrib_script = open('distribRemoteScan_'+tag_base+'.sh','w') distrib_script.write("""#!/bin/bash | def setup_distrib_script(self,tag_base): distrib_script = open('distribRemoteScan_'+tag_base+'.sh','w') distrib_script.write("""#!/bin/bash | 479,777 |
def setup_distrib_script(self,tag_base): distrib_script = open('distribRemoteScan_'+tag_base+'.sh','w') distrib_script.write("""#!/bin/bash | def setup_distrib_script(self,tag_base): distrib_script = open('distribRemoteScan_'+tag_base+'.sh','w') distrib_script.write("""#!/bin/bash | 479,778 |
def __init__(self,job,coinc,cp,opts,dag,ifo,ifonames,p_nodes): pipeline.CondorDAGNode.__init__(self,job) | def __init__(self,job,coinc,cp,opts,dag,ifo,ifonames,p_nodes): pipeline.CondorDAGNode.__init__(self,job) | 479,779 |
def __init__(self, configfile=None): cp = ConfigParser.ConfigParser() self.cp = cp self.time_now = "_".join([str(i) for i in time_method.gmtime()[0:6]]) self.ini_file=self.time_now + ".ini" home_base = home_dirs() # CONDOR SECTION NEEDED BY THINGS IN INSPIRAL.PY cp.add_section("condor") cp.set("condor","datafind",self... | def __init__(self, configfile=None): cp = ConfigParser.ConfigParser() self.cp = cp self.time_now = "_".join([str(i) for i in time_method.gmtime()[0:6]]) self.ini_file=self.time_now + ".ini" home_base = home_dirs() # CONDOR SECTION NEEDED BY THINGS IN INSPIRAL.PY cp.add_section("condor") cp.set("condor","datafind",self... | 479,780 |
def __patchFrameTypeDef__(frametype=None,ifo=None,gpstime=None): """ Temporary patch function, to adjust specfied frame type used in searching the filesystem for files to display in followup. """ if frametype == None: raise Exception, "input to __patchFrameTypeDef__ included a \ frametype argument specified as None\n" ... | def __patchFrameTypeDef__(frametype=None,ifo=None,gpstime=None): """ Temporary patch function, to adjust specfied frame type used in searching the filesystem for files to display in followup. """ if frametype == None: raise Exception, "input to __patchFrameTypeDef__ included a \ frametype argument specified as None\n" ... | 479,781 |
def getinjpar(inj,parnum): if parnum==0: return inj.mchirp if parnum==1: return inj.eta if parnum==2: return inj.get_end() if parnum==3: return inj.phi0 if parnum==4: return inj.distance if parnum==5: return inj.longitude if parnum==6: return inj.latitude if parnum==7: return inj.polarization if parnum==8: return inj.i... | def getinjpar(inj,parnum): if parnum==0: return inj.mchirp if parnum==1: return inj.eta if parnum==2: return inj.get_end() if parnum==3: return inj.phi0 if parnum==4: return inj.distance if parnum==5: return inj.longitude if parnum==6: return inj.latitude if parnum==7: return inj.polarization if parnum==8: return inj.i... | 479,782 |
def script_dict(): script = {} tog = create_toggle() script[tog] = 'javascript' script['http://ajax.googleapis.com/ajax/libs/jquery/1.2.6/jquery.min.js'] = 'javascript' return (script, [tog]) | def script_dict(fname): script = {} tog = create_toggle() script[tog] = 'javascript' script['http://ajax.googleapis.com/ajax/libs/jquery/1.2.6/jquery.min.js'] = 'javascript' return (script, [tog]) | 479,783 |
def script_dict(): script = {} tog = create_toggle() script[tog] = 'javascript' script['http://ajax.googleapis.com/ajax/libs/jquery/1.2.6/jquery.min.js'] = 'javascript' return (script, [tog]) | def script_dict(): script = {} tog = os.path.split(create_toggle(fname))[1] script[tog] = 'javascript' script['http://ajax.googleapis.com/ajax/libs/jquery/1.2.6/jquery.min.js'] = 'javascript' return (script, [tog]) | 479,784 |
def copy_ihope_style(stylefile="cbcwebpage.css", base_dir="."): # FIXME this is a stupid way to find the path... changes to build scripts, set env var? path = which('ligo_data_find') if path: path = os.path.split(path)[0] else: print >>sys.stderr, "COULD NOT FIND STYLE FILES %s IN %s, ABORTING" % (stylefile, path) rai... | def copy_ihope_style(stylefile="cbcwebpage.css", base_dir="."): # FIXME this is a stupid way to find the path... changes to build scripts, set env var? path = which('ligo_data_find') if path: path = os.path.split(path)[0] else: print >>sys.stderr, "COULD NOT FIND STYLE FILES %s IN %s, ABORTING" % (stylefile, path) rai... | 479,785 |
def __init__(self, title="cbc web page", path='./', css=None, script=None, pagenum=1, verbose=False): """ """ if not css: css = copy_ihope_style() scdict = script_dict() if not script: script = scdict[0] self.front = "" scriptfiles = scdict[1] self.verbose = verbose self._style = css self._title = title self._script = ... | def __init__(self, title="cbc web page", path='./', css=None, script=None, pagenum=1, verbose=False): """ """ if not css: css = copy_ihope_style(base_dir=path) scdict = script_dict(fname='%s/%s' % (path,"toggle.js")) if not script: script = scdict[0] self.front = "" scriptfiles = scdict[1] self.verbose = verbose self._... | 479,786 |
def add_subpage(self, tag, title, link_text=None): """ """ | def add_subpage(self, tag, title, link_text=None): """ """ | 479,787 |
def get_signal_vetoes(trigger,bankq=0,bankn=0,autoq=0,auton=0,chiq=0,chin=0,sigmaVals = None,fResp = None): sbvs = {} q = bankq nhigh = bankn q2 = autoq nhigh2 = auton if trigger.chisq == 0: sbvs['BestNR1'] = 0 else: if trigger.chisq < 60: sbvs['BestNR1'] = trigger.snr else: sbvs['BestNR1'] = trigger.snr/((1 + (trigge... | def get_signal_vetoes(trigger,bankq=0,bankn=0,autoq=0,auton=0,chiq=0,chin=0,sigmaVals = None,fResp = None): sbvs = {} q = bankq nhigh = bankn q2 = autoq nhigh2 = auton if trigger.chisq == 0: sbvs['BestNR1'] = 0 else: if trigger.chisq < 60: sbvs['BestNR1'] = trigger.snr else: sbvs['BestNR1'] = trigger.snr/((1 + (trigge... | 479,788 |
def plotSkyMap(skypos,skyres,sky_injpoint): from pylal import skylocutils from mpl_toolkits.basemap import Basemap skypoints=array(skylocutils.gridsky(float(skyres))) skycarts=map(lambda s: pol2cart(s[1],s[0]),skypoints) skyinjectionconfidence=None shist=bayespputils.skyhist_cart(array(skycarts),skypos) #shist=skyhi... | def plotSkyMap(skypos,skyres,sky_injpoint): from pylal import skylocutils from mpl_toolkits.basemap import Basemap skypoints=array(skylocutils.gridsky(float(skyres))) skycarts=map(lambda s: pol2cart(s[1],s[0]),skypoints) skyinjectionconfidence=None shist=bayespputils.skyhist_cart(array(skycarts),skypos) #shist=skyhi... | 479,789 |
def get_ilwdchar_class(tbl_name, col_name): """ Searches the cache of pre-defined ilwdchar subclasses for a class whose table_name and column_name attributes match those provided. If a matching subclass is found it is returned; otherwise a new class is defined, added to the cache, and returned. Example: >>> process_... | def get_ilwdchar_class(tbl_name, col_name): """ Searches the cache of pre-defined ilwdchar subclasses for a class whose table_name and column_name attributes match those provided. If a matching subclass is found it is returned; otherwise a new class is defined, added to the cache, and returned. Example: >>> process_... | 479,790 |
def get_ilwdchar_class(tbl_name, col_name): """ Searches the cache of pre-defined ilwdchar subclasses for a class whose table_name and column_name attributes match those provided. If a matching subclass is found it is returned; otherwise a new class is defined, added to the cache, and returned. Example: >>> process_... | def get_ilwdchar_class(tbl_name, col_name): """ Searches the cache of pre-defined ilwdchar subclasses for a class whose table_name and column_name attributes match those provided. If a matching subclass is found it is returned; otherwise a new class is defined, added to the cache, and returned. Example: >>> process_... | 479,791 |
def __init__(self, configfile=None): cp = ConfigParser.ConfigParser() self.cp = cp self.time_now = "_".join([str(i) for i in time_method.gmtime()[0:6]]) self.ini_file=self.time_now + ".ini" home_base = home_dirs() # CONDOR SECTION NEEDED BY THINGS IN INSPIRAL.PY cp.add_section("condor") cp.set("condor","datafind",self... | def __init__(self, configfile=None): cp = ConfigParser.ConfigParser() self.cp = cp self.time_now = "_".join([str(i) for i in time_method.gmtime()[0:6]]) self.ini_file=self.time_now + ".ini" home_base = home_dirs() # CONDOR SECTION NEEDED BY THINGS IN INSPIRAL.PY cp.add_section("condor") cp.set("condor","datafind",self... | 479,792 |
def protract(self, x): """ Move both the start and the end of the segment a distance x away from the other. """ return self.__class__(self[0] - x, self[1] + x) | def protract(self, x): """ Return a new segment whose bounds are given by subtracting x from the segment's lower bound and adding x to the segment's upper bound. """ return self.__class__(self[0] - x, self[1] + x) | 479,793 |
def contract(self, x): """ Move both the start and the end of the segment a distance x towards the the other. """ return self.__class__(self[0] + x, self[1] - x) | def contract(self, x): """ Return a new segment whose bounds are given by adding x to the segment's lower bound and subtracting x from the segment's upper bound. """ return self.__class__(self[0] + x, self[1] - x) | 479,794 |
def shift(self, x): """ Return a new segment by adding x to the upper and lower bounds of this segment. """ return tuple.__new__(self.__class__, (self[0] + x, self[1] + x)) | def shift(self, x): """ Return a new segment whose bounds are given by adding x to the segment's upper and lower bounds. """ return tuple.__new__(self.__class__, (self[0] + x, self[1] + x)) | 479,795 |
def protract(self, x): """ For each segment in the list, move both the start and the end a distance x away from the other. Coalesce the result. Segmentlist is modified in place. """ for i in xrange(len(self)): self[i] = self[i].protract(x) return self.coalesce() | def protract(self, x): """ Execute the .protract() method on each segment in the list and coalesce the result. Segmentlist is modified in place. """ for i in xrange(len(self)): self[i] = self[i].protract(x) return self.coalesce() | 479,796 |
def contract(self, x): """ For each segment in the list, move both the start and the end a distance x towards the other. Coalesce the result. Segmentlist is modified in place. """ for i in xrange(len(self)): self[i] = self[i].contract(x) return self.coalesce() | def contract(self, x): """ Execute the .contract() method on each segment in the list and coalesce the result. Segmentlist is modified in place. """ for i in xrange(len(self)): self[i] = self[i].contract(x) return self.coalesce() | 479,797 |
def shift(self, x): """ Shift the segmentlist by adding x to the upper and lower bounds of all segments. The algorithm is O(n) and does not require the list to be coalesced. Segmentlist is modified in place. """ for i in xrange(len(self)): self[i] = self[i].shift(x) return self | def shift(self, x): """ Execute the .shift() method on each segment in the list. The algorithm is O(n) and does not require the list to be coalesced nor does it coalesce the list. Segmentlist is modified in place. """ for i in xrange(len(self)): self[i] = self[i].shift(x) return self | 479,798 |
def popitem(*args): raise NotImplementedError | def popitem(*args): raise NotImplementedError | 479,799 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.