bugged
stringlengths
4
228k
fixed
stringlengths
0
96.3M
__index_level_0__
int64
0
481k
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
479,600
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
479,601
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
479,602
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
479,603
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
479,604
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
479,605
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
479,606
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
479,607
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
479,608
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
479,609
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
479,610
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
479,611
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
479,612
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
479,613
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
479,614
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
479,615
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
479,616
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
479,617
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
479,618
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
479,619
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
479,620
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
479,621
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
479,622
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.ex...
deffor myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if not "PEM" in chan[0] or not ...
479,623
def __init__(self,type=None,ifo=None,time=None,snr=None,chisqr=None,mass1=None,mass2=None,mchirp=None): """ """ self.type=str(type) self.ifo=str(ifo) self.time=float(time) self.snr=float(snr) self.chisqr=float(chisqr) self.mass1=float(mass1) self.mass2=float(mass2) self.mchirp=float(mchirp)
def __init__(self,type=None,ifo=None,time=None,snr=None,chisqr=None,mass1=None,mass2=None,mchirp=None): """ """ self.type=str(type) self.ifo=str(ifo) self.time=float(time) self.snr=float(snr) self.chisqr=float(chisqr) self.mass1=float(mass1) self.mass2=float(mass2) self.mchirp=float(mchirp)
479,624
def __init__(self, config_parser): """ config_parser = ConfigParser object """ pipeline.CondorDAGJob.__init__(self, get_universe(config_parser), get_executable(config_parser, "lalapps_binj")) pipeline.AnalysisJob.__init__(self, config_parser)
def __init__(self, config_parser): """ config_parser = ConfigParser object """ pipeline.CondorDAGJob.__init__(self, get_universe(config_parser), get_executable(config_parser, "lalapps_binj")) pipeline.AnalysisJob.__init__(self, config_parser)
479,625
def init_job_types(config_parser, job_types = ("datafind", "rm", "binj", "power", "lladd", "binjfind", "bucluster", "bucut", "burca", "burca2", "sqlite", "burcatailor")): """ Construct definitions of the submit files. """ global datafindjob, rmjob, binjjob, powerjob, lladdjob, binjfindjob, buclusterjob, llb2mjob, bucut...
def init_job_types(config_parser, job_types = ("datafind", "rm", "binj", "power", "lladd", "binjfind", "bucluster", "bucut", "burca", "burca2", "sqlite", "burcatailor")): """ Construct definitions of the submit files. """ global datafindjob, rmjob, binjjob, powerjob, lladdjob, binjfindjob, buclusterjob, llb2mjob, bucut...
479,626
def make_binjfind_fragment(dag, parents, tag, verbose = False): input_cache = collect_output_caches(parents) nodes = set() while input_cache: node = BinjfindNode(binjfindjob) node.add_input_cache([cache_entry for (cache_entry, parent) in input_cache[:binjfindjob.files_per_binjfind]]) for cache_entry, parent in input_ca...
def make_binjfind_fragment(dag, parents, tag, verbose = False): input_cache = collect_output_caches(parents) nodes = set() while input_cache: node = BinjfindNode(binjfindjob) node.add_input_cache([cache_entry for (cache_entry, parent) in input_cache[:binjfindjob.files_per_binjfind]]) for parent in set(parent for cache_...
479,627
def make_bucluster_fragment(dag, parents, tag, verbose = False): input_cache = collect_output_caches(parents) nodes = set() while input_cache: node = BuclusterNode(buclusterjob) node.add_input_cache([cache_entry for (cache_entry, parent) in input_cache[:buclusterjob.files_per_bucluster]]) for cache_entry, parent in inp...
def make_bucluster_fragment(dag, parents, tag, verbose = False): input_cache = collect_output_caches(parents) nodes = set() while input_cache: node = BuclusterNode(buclusterjob) node.add_input_cache([cache_entry for (cache_entry, parent) in input_cache[:buclusterjob.files_per_bucluster]]) for parent in set(parent for c...
479,628
def make_bucut_fragment(dag, parents, tag, verbose = False): input_cache = collect_output_caches(parents) nodes = set() while input_cache: node = BucutNode(bucutjob) node.add_input_cache([cache_entry for (cache_entry, parent) in input_cache[:bucutjob.files_per_bucut]]) for cache_entry, parent in input_cache[:bucutjob.f...
def make_bucut_fragment(dag, parents, tag, verbose = False): input_cache = collect_output_caches(parents) nodes = set() while input_cache: node = BucutNode(bucutjob) node.add_input_cache([cache_entry for (cache_entry, parent) in input_cache[:bucutjob.files_per_bucut]]) for parent in set(parent for cache_entry, parent i...
479,629
def make_burca_fragment(dag, parents, tag, coincidence_segments = None, verbose = False): input_cache = collect_output_caches(parents) if coincidence_segments is not None: # doesn't sense to supply this keyword argument for # more than one input file assert len(input_cache) == 1 nodes = set() while input_cache: node = ...
def make_burca_fragment(dag, parents, tag, coincidence_segments = None, verbose = False): input_cache = collect_output_caches(parents) if coincidence_segments is not None: # doesn't sense to supply this keyword argument for # more than one input file assert len(input_cache) == 1 nodes = set() while input_cache: node = ...
479,630
def __init__(self, dag, job, cp, opts, ifo, time, p_nodes=[], type=""):
def __init__(self, dag, job, cp, opts, ifo, time, p_nodes=[], type=""):
479,631
def __init__(self, dag, job, cp, opts, time, ifo, p_nodes=[], type="ht", variety="fg"):
def __init__(self, dag, job, cp, opts, time, ifo, p_nodes=[], type="ht", variety="fg"):
479,632
def __init__(self, dag, job, cp, opts, ifo, sngl=None, qscan=False, trigger_time=None, data_type="hoft", p_nodes=[]):
def __init__(self, dag, job, cp, opts, ifo, sngl=None, qscan=False, trigger_time=None, data_type="hoft", p_nodes=[]):
479,633
def setup_qscan(self, job, cp, time, ifo, data_type): # 1s is substracted to the expected startTime to make sure the window # will be large enough. This is to be sure to handle the rouding to the # next sample done by qscan. type, channel = figure_out_type(time,ifo,data_type) self.set_type(type) self.q_time = float(cp....
def setup_qscan(self, job, cp, time, ifo, data_type): # 1s is substracted to the expected startTime to make sure the window # will be large enough. This is to be sure to handle the rouding to the # next sample done by qscan. type, channel = figure_out_type(time,ifo,data_type) self.set_type(type) self.q_time = float(cp....
479,634
def cbcBayesSkyRes(outdir,data,oneDMenu,twoDGreedyMenu,GreedyRes,confidence_levels,twoDplots,injfile=None,eventnum=None,skyres=None): if eventnum is not None and injfile is None: print "You specified an event number but no injection file. Ignoring!" if data is None: print 'You must specify an input data file' exit(1)...
def cbcBayesSkyRes(outdir,data,oneDMenu,twoDGreedyMenu,GreedyRes,confidence_levels,twoDplots,injfile=None,eventnum=None,skyres=None):if eventnum is not None and injfile is None: print "You specified an event number but no injection file. Ignoring!"if data is None: print 'You must specify an input data file' exit(1) # i...
479,635
def cbcBayesSkyRes(outdir,data,oneDMenu,twoDGreedyMenu,GreedyRes,confidence_levels,twoDplots,injfile=None,eventnum=None,skyres=None): if eventnum is not None and injfile is None: print "You specified an event number but no injection file. Ignoring!" if data is None: print 'You must specify an input data file' exit(1)...
def cbcBayesSkyRes(outdir,data,oneDMenu,twoDGreedyMenu,GreedyRes,confidence_levels,twoDplots,injfile=None,eventnum=None,skyres=None): if eventnum is not None and injfile is None: print "You specified an event number but no injection file. Ignoring!" if data is None: print 'You must specify an input data file' exit(1)...
479,636
def __init__(self, options): """ Initializes this class with the options. """ self.opts = options self.fname_list = [] self.tag_list = [] self.html_footer = ""
def __init__(self, options, ifo_times = None, ifo_tag = None, user_tag = None,\ gps_start_time = None, gps_end_time = None): """ Initializes this class with the options. """ self.opts = options self.fname_list = [] self.tag_list = [] self.html_footer = ""
479,637
def __init__(self, options): """ Initializes this class with the options. """ self.opts = options self.fname_list = [] self.tag_list = [] self.html_footer = ""
def __init__(self, options): """ Initializes this class with the options. """ self.opts = options self.fname_list = [] self.tag_list = [] self.html_footer = ""
479,638
def add_plot(self, plot_fig, text): """ Add a plot to the page """ fname = set_figure_name(self.opts, text) fname_thumb = savefig_pylal(fname, fig=plot_fig) self.fname_list.append(fname) self.tag_list.append(fname)
def add_plot(self, plot_fig, text): """ Add a plot to the page """ fname = set_figure_name(self.opts, text) fname_thumb = savefig_pylal(fname, fig=plot_fig) self.fname_list.append(fname) self.tag_list.append(fname)
479,639
def write_page(self): """ create the page """ if self.opts.enable_output: html_filename = write_html_output(self.opts, sys.argv[1:],\ self.fname_list, self.tag_list,\ comment=self.html_footer or None) write_cache_output(self.opts, html_filename, self.fname_list) return html_filename
def write_page(self): """ create the page """ if self.opts.enable_output: html_filename = self.create_htmlname(infix, cbcweb) self.write_html_output(html_filename, doThumb = doThumb, cbcweb = cbcweb, \ map_list = map_list, coinc_summ_table = coinc_summ_table,\ comment=self.html_footer or None) self.write_cache_output(h...
479,640
def write(self, text): """ Write some text to the standard output AND to the page. """ print text self.html_footer+=text+'<br>'
def write(self, text): """ Write some text to the standard output AND to the page. """ print text self.html_footer+=text+'<br>' def create_htmlname(self, infix, cbcweb): """ Create the html filename """ if infix: html_filename = self.prefix + '_'+ infix +self.suffix else: html_filename = self.prefix + self.suffix ...
479,641
def greedyBin1(par_samps,par_bin,confidence_levels,par_injvalue=None): oneDGreedyCL={} oneDGreedyInj={} parpos_min=min(par_samps) parpos_max=max(par_samps) par_point=parpos_min parpos_Nbins= int(ceil((parpos_max - parpos_min)/par_bin))+1 greedyPoints=np.zeros((parpos_Nbins,2)) #2D so it can be put through same con...
def greedyBin1(par_samps,par_bin,confidence_levels,par_injvalue=None): oneDGreedyCL={} oneDGreedyInj={} parpos_min=min(par_samps) parpos_max=max(par_samps) par_point=parpos_min parpos_Nbins= int(ceil((parpos_max - parpos_min)/par_bin))+1 greedyPoints=np.zeros((parpos_Nbins,2)) #2D so it can be put through same con...
479,642
def get_coincs(self, eventlists, event_comparefunc, thresholds, verbose = False): # # has this node already been visited? if so, return the # answer we already know #
def get_coincs(self, eventlists, event_comparefunc, thresholds, verbose = False): # # has this node already been visited? if so, return the # answer we already know #
479,643
def get_likelihood_ratio(coinc_event_id, time_slide_id, row_from_cols = database.sngl_burst_table.row_from_cols, cursor = database.connection.cursor(), offset_vectors = offset_vectors, params_func = params_func, params_func_extra_args = params_func_extra_args): events = map(row_from_cols, cursor.execute("""
def get_likelihood_ratio(coinc_event_id, time_slide_id, row_from_cols = database.sngl_burst_table.row_from_cols, cursor = database.connection.cursor(), offset_vectors = offset_vectors, params_func = params_func, params_func_extra_args = params_func_extra_args): events = map(row_from_cols, cursor.execute("""
479,644
def __init__(self, dag, job, cp, opts, sngl, frame_cache, chia, tag, p_nodes=[]):
def __init__(self, dag, job, cp, opts, sngl, frame_cache, chia, tag, p_nodes=[]):
479,645
def write_abstract_dag(self): """ Write all the nodes in the workflow to the DAX file. """ if not self.__dax_file_path: # this workflow is not dax-compatible, so don't write a dax return try: dagfile = open( self.__dax_file_path, 'w' ) except: raise CondorDAGError, "Cannot open file " + self.__dag_file_path
def write_abstract_dag(self): """ Write all the nodes in the workflow to the DAX file. """ if not self.__dax_file_path: # this workflow is not dax-compatible, so don't write a dax return try: dagfile = open( self.__dax_file_path, 'w' ) except: raise CondorDAGError, "Cannot open file " + self.__dag_file_path
479,646
def get_unique_filename(name): """ use this to avoid name collisions """ counter = 1 base_name, ext = os.path.splitext(name) while os.path.isfile(name): name = base_name + '_' + str(counter) + ext counter += 1 return name
def get_unique_filename(name): """ use this to avoid name collisions """ counter = 1 base_name, ext = os.path.splitext(name) while os.path.isfile(name): name = base_name + '_' + str(counter) + ext counter += 1 return name
479,647
def detector_thresholds(min_threshold, ifos, RA, dec, gps_time, sensitivities=None): """ Return a dictionary of sensitivity thresholds for each detector, based on a minimum threshold of min_threshold in the least sensitive one, for a source at position (RA,dec) specified in radians at time gps_time. Specifying a dictio...
def detector_thresholds(ifos, RA, dec, gps_time, sensitivities=None, min_threshold=4.5, max_threshold=7.5): """ Return a dictionary of sensitivity thresholds for each detector, based on a minimum threshold of min_threshold in the least sensitive one, for a source at position (RA,dec) specified in radians at time gps_ti...
479,648
def detector_thresholds(min_threshold, ifos, RA, dec, gps_time, sensitivities=None): """ Return a dictionary of sensitivity thresholds for each detector, based on a minimum threshold of min_threshold in the least sensitive one, for a source at position (RA,dec) specified in radians at time gps_time. Specifying a dictio...
def detector_thresholds(min_threshold, ifos, RA, dec, gps_time, sensitivities=None): """ Return a dictionary of sensitivity thresholds for each detector, based on a minimum threshold of min_threshold in the least sensitive one, for a source at position (RA,dec) specified in radians at time gps_time. Specifying a dictio...
479,649
def detector_thresholds(min_threshold, ifos, RA, dec, gps_time, sensitivities=None): """ Return a dictionary of sensitivity thresholds for each detector, based on a minimum threshold of min_threshold in the least sensitive one, for a source at position (RA,dec) specified in radians at time gps_time. Specifying a dictio...
def detector_thresholds(min_threshold, ifos, RA, dec, gps_time, sensitivities=None): """ Return a dictionary of sensitivity thresholds for each detector, based on a minimum threshold of min_threshold in the least sensitive one, for a source at position (RA,dec) specified in radians at time gps_time. Specifying a dictio...
479,650
def detector_thresholds(min_threshold, ifos, RA, dec, gps_time, sensitivities=None): """ Return a dictionary of sensitivity thresholds for each detector, based on a minimum threshold of min_threshold in the least sensitive one, for a source at position (RA,dec) specified in radians at time gps_time. Specifying a dictio...
def detector_thresholds(min_threshold, ifos, RA, dec, gps_time, sensitivities=None): """ Return a dictionary of sensitivity thresholds for each detector, based on a minimum threshold of min_threshold in the least sensitive one, for a source at position (RA,dec) specified in radians at time gps_time. Specifying a dictio...
479,651
def detector_thresholds(min_threshold, ifos, RA, dec, gps_time, sensitivities=None): """ Return a dictionary of sensitivity thresholds for each detector, based on a minimum threshold of min_threshold in the least sensitive one, for a source at position (RA,dec) specified in radians at time gps_time. Specifying a dictio...
def detector_thresholds(min_threshold, ifos, RA, dec, gps_time, sensitivities=None): """ Return a dictionary of sensitivity thresholds for each detector, based on a minimum threshold of min_threshold in the least sensitive one, for a source at position (RA,dec) specified in radians at time gps_time. Specifying a dictio...
479,652
def detector_thresholds(min_threshold, ifos, RA, dec, gps_time, sensitivities=None): """ Return a dictionary of sensitivity thresholds for each detector, based on a minimum threshold of min_threshold in the least sensitive one, for a source at position (RA,dec) specified in radians at time gps_time. Specifying a dictio...
def detector_thresholds(min_threshold, ifos, RA, dec, gps_time, sensitivities=None): """ Return a dictionary of sensitivity thresholds for each detector, based on a minimum threshold of min_threshold in the least sensitive one, for a source at position (RA,dec) specified in radians at time gps_time. Specifying a dictio...
479,653
def set_event_number(self,event): """ Set the event number in the injection XML. """ if event is not None: self.__event=int(event) self.add_var_opt('eventnum',str(event))
def set_event_number(self,event): """ Set the event number in the injection XML. """ if event is not None: self.__event=int(event) self.add_var_opt('eventnum',str(event))
479,654
def get_unique_filename(name): """ use this to avoid name collisions """ counter = 1 base_name, ext = os.path.splitext(name) while os.path.isfile(base_name): base_name = base_name + '_' + str(counter) + ext counter += 1 return base_name + ext
def get_unique_filename(name): """ use this to avoid name collisions """ counter = 1 base_name, ext = os.path.splitext(name) while os.path.isfile(base_name): base_name = base_name + '_' + str(counter) + ext counter += 1 return base_name + ext
479,655
def __init__(self, offset_vector_dict, verbose = False): if verbose: print >>sys.stderr, "constructing coincidence assembly graph for %d target offset vectors ..." % len(offset_vector_dict)
def __init__(self, offset_vector_dict, verbose = False): if verbose: print >>sys.stderr, "constructing coincidence assembly graph for %d target offset vectors ..." % len(offset_vector_dict)
479,656
def write(self, fileobj): """ Write a DOT graph representation of the time slide graph to fileobj. """ vectorstring = lambda offset_vector: ",".join("%s=%g" % (instrument, offset) for instrument, offset in sorted(offset_vector.items()))
def write(self, fileobj): """ Write a DOT graph representation of the time slide graph to fileobj. """ vectorstring = lambda offset_vector: ",".join("%s=%g" % (instrument, offset) for instrument, offset in sorted(offset_vector.items()))
479,657
def write(self, fileobj): """ Write a DOT graph representation of the time slide graph to fileobj. """ vectorstring = lambda offset_vector: ",".join("%s=%g" % (instrument, offset) for instrument, offset in sorted(offset_vector.items()))
def write(self, fileobj): """ Write a DOT graph representation of the time slide graph to fileobj. """ vectorstring = lambda offset_vector: ",".join("%s=%g" % (instrument, offset) for instrument, offset in sorted(offset_vector.items()))
479,658
def __init__(self, xmldoc, b_b_def, sb_b_def, si_b_def, sb_c_e_def, sb_c_n_def, si_c_e_def, si_c_n_def, process): # # store the process row #
def __init__(self, xmldoc, b_b_def, sb_b_def, si_b_def, sb_c_e_def, sb_c_n_def, si_c_e_def, si_c_n_def, process, livetime_program): # # store the process row #
479,659
def __init__(self, xmldoc, b_b_def, sb_b_def, si_b_def, sb_c_e_def, sb_c_n_def, si_c_e_def, si_c_n_def, process): # # store the process row #
def __init__(self, xmldoc, b_b_def, sb_b_def, si_b_def, sb_c_e_def, sb_c_n_def, si_c_e_def, si_c_n_def, process): # # store the process row #
479,660
def bursts_near_peaktime(self, t): """ Return a list of the burst events whose peak times are within self.burst_peak_time_window of t. """ return self.snglbursttable[bisect.bisect_left(self.snglbursttable, t - self.burst_peak_time_window):bisect.bisect_right(self.snglbursttable, t + self.burst_peak_time_window)]
def bursts_near_peaktime(self, t): """ Return a list of the burst events whose peak times are within window seconds of t. This is not used to define any coincidences, only to provide a short list of burst events for use in more costly comparison tests. """ return self.snglbursttable[bisect.bisect_left(self.snglburstta...
479,661
def bursts_near_peaktime(self, t): """ Return a list of the burst events whose peak times are within self.burst_peak_time_window of t. """ return self.snglbursttable[bisect.bisect_left(self.snglbursttable, t - self.burst_peak_time_window):bisect.bisect_right(self.snglbursttable, t + self.burst_peak_time_window)]
def bursts_near_peaktime(self, t): """ Return a list of the burst events whose peak times are within self.burst_peak_time_window of t. """ return self.snglbursttable[bisect.bisect_left(self.snglbursttable, t - self.burst_peak_time_window):bisect.bisect_right(self.snglbursttable, t + self.burst_peak_time_window)]
479,662
def coincs_near_peaktime(self, t): """ Return a list of the (coinc_event_id, event list) tuples in which at least one burst event's peak time is within self.coinc_peak_time_window of t. """ # FIXME: this test does not consider the time slide # offsets that should be applied to the coinc, but for now # injections are d...
def coincs_near_peaktime(self, t): """ Return a list of the (coinc_event_id, event list) tuples in which at least one burst event's peak time is within window seconds of t. This is not used to define any coincidences, only to provide a short list of coinc events for use in more costly comparison tests. """ # FIXME: t...
479,663
def coincs_near_peaktime(self, t): """ Return a list of the (coinc_event_id, event list) tuples in which at least one burst event's peak time is within self.coinc_peak_time_window of t. """ # FIXME: this test does not consider the time slide # offsets that should be applied to the coinc, but for now # injections are d...
def coincs_near_peaktime(self, t): """ Return a list of the (coinc_event_id, event list) tuples in which at least one burst event's peak time is within self.coinc_peak_time_window of t. """ # FIXME: this test does not consider the time slide # offsets that should be applied to the coinc, but for now # injections are d...
479,664
def StringCuspSnglCompare(sim, burst): """ Return False if the peak time of the injection sim lies within the time interval of burst. """ return SimBurstUtils.time_at_instrument(sim, burst.ifo) not in burst.get_period()
def StringCuspSnglCompare(sim, burst): """ Return False (injection matches event) if an autocorrelation-width window centred on the injection is continuous with the time interval of the burst. """ tinj = SimBurstUtils.time_at_instrument(sim, burst.ifo) window = SimBurstUtils.stringcusp_autocorrelation_width / 2 return ...
479,665
def ExcessPowerSnglCompare(sim, burst): """ Return False if the peak time and centre frequency of sim lie within the time-frequency tile of burst. """ return StringCuspSnglCompare(sim, burst) or (sim.frequency not in burst.get_band())
def ExcessPowerSnglCompare(sim, burst): """ Return False (injection matches event) if the peak time and centre frequency of sim lie within the time-frequency tile of burst. """ return (SimBurstUtils.time_at_instrument(sim, burst.ifo) not in burst.get_period()) or (sim.frequency not in burst.get_band())
479,666
def OmegaSnglCompare(sim, burst, delta_t = 10.0): """ Return False if the peak time and centre frequency of sim lie within the time-frequency tile of burst. """ return abs(float(SimBurstUtils.time_at_instrument(sim, burst.ifo) - burst.get_peak())) > delta_t
def OmegaSnglCompare(sim, burst, delta_t = 10.0): """ Return False (injection matches event) if the time of the sim and the peak time of the burst event differ by less than or equal to delta_t seconds. """ return abs(float(SimBurstUtils.time_at_instrument(sim, burst.ifo) - burst.get_peak())) > delta_t
479,667
def StringCuspNearCoincCompare(sim, burst): """ Return False if the peak time of the sim is "near" the burst event. """ return OmegaNearCoincCompare(sim, burst)
def StringCuspNearCoincCompare(sim, burst): """ Return False (injection matches coinc) if the peak time of the sim is "near" the burst event. """ tinj = SimBurstUtils.time_at_instrument(sim, burst.ifo) window = SimBurstUtils.stringcusp_autocorrelation_width / 2 + SimBurstUtils.burst_is_near_injection_window return segm...
479,668
def ExcessPowerNearCoincCompare(sim, burst): """ Return False if the peak time of the sim is "near" the burst event. """ return not SimBurstUtils.burst_is_near_injection(sim, burst.start_time, burst.start_time_ns, burst.duration, burst.ifo)
def ExcessPowerNearCoincCompare(sim, burst): """ Return False (injection matches coinc) if the peak time of the sim is "near" the burst event. """ return not SimBurstUtils.burst_is_near_injection(sim, burst.start_time, burst.start_time_ns, burst.duration, burst.ifo)
479,669
def OmegaNearCoincCompare(sim, burst): """ Return False if the peak time of the sim is "near" the burst event. """ start_time = burst.get_peak() - burst.duration / 2.0 return not SimBurstUtils.burst_is_near_injection(sim, start_time.seconds, start_time.nanoseconds, burst.duration, burst.ifo)
def OmegaNearCoincCompare(sim, burst): """ Return False (injection matches coinc) if the peak time of the sim is "near" the burst event. """ return OmegaSnglCompare(sim, burst, delta_t = 20.0 + burst.duration / 2)
479,670
def find_sngl_burst_matches(contents, sim, comparefunc): """ Scan the burst table for triggers matching sim. """ return [burst for burst in contents.bursts_near_peaktime(sim.get_time_geocent()) if not comparefunc(sim, burst)]
def find_sngl_burst_matches(contents, sim, comparefunc, sieve_window): """ Scan the burst table for triggers matching sim. sieve_window is used in a bisection search to quickly identify burst events within that many seconds of the injection's peak time at the geocentre; it should be larger than the greatest time diffe...
479,671
def find_exact_coinc_matches(coincs, sim, comparefunc): """ Return a list of the coinc_event_ids of the burst<-->burst coincs in which all burst events match sim. """ # FIXME: this test does not consider the time slide offsets that # should be applied to the coinc, but for now injections are done # at zero lag so this...
def find_exact_coinc_matches(coincs, sim, comparefunc, seglists): """ Return a list of the coinc_event_ids of the burst<-->burst coincs in which all burst events match sim. """ # FIXME: this test does not consider the time slide offsets that # should be applied to the coinc, but for now injections are done # at zero l...
479,672
def find_exact_coinc_matches(coincs, sim, comparefunc): """ Return a list of the coinc_event_ids of the burst<-->burst coincs in which all burst events match sim. """ # FIXME: this test does not consider the time slide offsets that # should be applied to the coinc, but for now injections are done # at zero lag so this...
def find_exact_coinc_matches(coincs, sim, comparefunc): """ Return a list of the coinc_event_ids of the burst<-->burst coincs in which all burst events match sim and to which all instruments on at the time of the sim contributed events. """ # FIXME: this test does not consider the time slide offsets that # should be a...
479,673
def find_exact_coinc_matches(coincs, sim, comparefunc): """ Return a list of the coinc_event_ids of the burst<-->burst coincs in which all burst events match sim. """ # FIXME: this test does not consider the time slide offsets that # should be applied to the coinc, but for now injections are done # at zero lag so this...
def find_exact_coinc_matches(coincs, sim, comparefunc): """ Return a list of the coinc_event_ids of the burst<-->burst coincs in which all burst events match sim. """ # FIXME: this test does not consider the time slide offsets that # should be applied to the coinc, but for now injections are done # at zero lag so this...
479,674
def find_near_coinc_matches(coincs, sim, comparefunc): """ Return a list of the coinc_event_ids of the burst<-->burst coincs in which at least one burst event matches sim. """ # FIXME: this test does not consider the time slide offsets that # should be applied to the coinc, but for now injections are done # at zero la...
def find_near_coinc_matches(coincs, sim, comparefunc): """ Return a list of the coinc_event_ids of the burst<-->burst coincs in which at least one burst event matches sim. """ # FIXME: this test does not consider the time slide offsets that # should be applied to the coinc, but for now injections are done # at zero la...
479,675
si_c_n_def = si_c_n_def,
si_c_n_def = si_c_n_def,
479,676
si_c_n_def = si_c_n_def,
si_c_n_def = si_c_n_def,
479,677
si_c_n_def = si_c_n_def,
si_c_n_def = si_c_n_def,
479,678
si_c_n_def = si_c_n_def,
si_c_n_def = si_c_n_def,
479,679
si_c_n_def = si_c_n_def,
si_c_n_def = si_c_n_def,
479,680
def compute_segment_lists(seglists, offset_vectors, min_segment_length, pad): # don't modify original seglists = seglists.copy() # ignore offset vectors referencing instruments we don't have offset_vectors = [offset_vector for offset_vector in offset_vectors if set(offset_vector.keys()).issubset(set(seglists.keys()))]...
def compute_segment_lists(seglists, offset_vectors, min_segment_length, pad): # don't modify original seglists = seglists.copy() # ignore offset vectors referencing instruments we don't have offset_vectors = [offset_vector for offset_vector in offset_vectors if set(offset_vector.keys()).issubset(set(seglists.keys()))]...
479,681
def init_job_types(config_parser, job_types = ("string", "meas_likelihoodjob", "calc_likelihood")): """ Construct definitions of the submit files. """ global stringjob, meas_likelihoodjob, calc_likelihoodjob # lalapps_StringSearch if "string" in job_types: stringjob = StringJob(config_parser) # lalapps_string_meas_li...
def init_job_types(config_parser, job_types = ("string", "meas_likelihoodjob", "calc_likelihood")): """ Construct definitions of the submit files. """ global stringjob, meas_likelihoodjob, calc_likelihoodjob, runsqlitejob # lalapps_StringSearch if "string" in job_types: stringjob = StringJob(config_parser) # lalapps_...
479,682
def make_meas_likelihood_fragment(dag, parents, tag, files_per_meas_likelihood = None): if files_per_meas_likelihood is None: files_per_meas_likelihood = meas_likelihoodjob.files_per_meas_likelihood nodes = set() input_cache = power.collect_output_caches(parents) while input_cache: node = MeasLikelihoodNode(meas_likeli...
def make_meas_likelihood_fragment(dag, parents, tag, files_per_meas_likelihood = None): if files_per_meas_likelihood is None: files_per_meas_likelihood = meas_likelihoodjob.files_per_meas_likelihood nodes = set() input_cache = power.collect_output_caches(parents) while input_cache: node = MeasLikelihoodNode(meas_likeli...
479,683
def make_meas_likelihood_fragment(dag, parents, tag, files_per_meas_likelihood = None): if files_per_meas_likelihood is None: files_per_meas_likelihood = meas_likelihoodjob.files_per_meas_likelihood nodes = set() input_cache = power.collect_output_caches(parents) while input_cache: node = MeasLikelihoodNode(meas_likeli...
def make_meas_likelihood_fragment(dag, parents, tag, files_per_meas_likelihood = None): if files_per_meas_likelihood is None: files_per_meas_likelihood = meas_likelihoodjob.files_per_meas_likelihood nodes = set() input_cache = power.collect_output_caches(parents) while input_cache: node = MeasLikelihoodNode(meas_likeli...
479,684
def __init__(self, job, database, output_cache = None, output_tag = "SEARCH_VOLUME", bootstrap_iterations=10000, veto_segments_name="vetoes", use_expected_loudest_event = False): """ @database: the pipedown database containing the injection triggers @ouptut_cache: name prefix for cache file to be written out by program...
def __init__(self, job, database, output_cache = None, output_tag = "SEARCH_VOLUME", bootstrap_iterations=10000, veto_segments_name="vetoes", use_expected_loudest_event = False): """ @database: the pipedown database containing the injection triggers @ouptut_cache: name prefix for cache file to be written out by program...
479,685
def test_and_add_hipe_arg(hipeCommand, hipe_arg): if config.has_option("hipe-arguments",hipe_arg): hipeCommand += "--" + hipe_arg + " " + \ config.get("hipe-arguments",hipe_arg) return(hipeCommand)
def test_and_add_hipe_arg(hipeCommand, hipe_arg): if config.has_option("hipe-arguments",hipe_arg): hipeCommand += "--" + hipe_arg + " " + \ config.get("hipe-arguments",hipe_arg) return(hipeCommand)
479,686
def test_and_add_hipe_arg(hipeCommand, hipe_arg): if config.has_option("hipe-arguments",hipe_arg): hipeCommand += "--" + hipe_arg + " " + \ config.get("hipe-arguments",hipe_arg) return(hipeCommand)
def test_and_add_hipe_arg(hipeCommand, hipe_arg): if config.has_option("hipe-arguments",hipe_arg): hipeCommand += "--" + hipe_arg + " " + \ config.get("hipe-arguments",hipe_arg) return(hipeCommand)
479,687
def test_and_add_hipe_arg(hipeCommand, hipe_arg): if config.has_option("hipe-arguments",hipe_arg): hipeCommand += "--" + hipe_arg + " " + \ config.get("hipe-arguments",hipe_arg) return(hipeCommand)
def test_and_add_hipe_arg(hipeCommand, hipe_arg): if config.has_option("hipe-arguments",hipe_arg): hipeCommand += "--" + hipe_arg + " " + \ config.get("hipe-arguments",hipe_arg) return(hipeCommand)
479,688
def get_input_from_cache(self, cache): """ Retrieves """ self.add_var_arg(filename)
def get_input_from_cache(self, cache): """ Retrieves """ self.add_var_arg(filename)
479,689
def __init__(self, job): """ @job: a PrintLCJob """ pipeline.SqliteNode.__init__(self, job) self.__extract_to_xml = None self.__extract_to_database = None self.__exclude_coincs = None self.__include_only_coincs = None self.__sim_type = None self.__output_format = None self.__columns = None
def __init__(self, job): """ @job: a PrintLCJob """ pipeline.SqliteNode.__init__(self, job) self.__extract_to_xml = None self.__extract_to_database = None self.__exclude_coincs = None self.__include_only_coincs = None self.__sim_tag = None self.__output_format = None self.__columns = None
479,690
def __init__(self, options, cp, dir='', tag_base=''): """ """ self.__conditionalLoadDefaults__(followUpChiaJob.defaults,cp) #self.__prog__ = 'followUpChiaJob' self.__executable = string.strip(cp.get('condor','chia')) self.__universe = "standard" pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable) sel...
def __init__(self, options, cp, dir='', tag_base=''): """ """ self.__conditionalLoadDefaults__(followUpChiaJob.defaults,cp) #self.__prog__ = 'followUpChiaJob' self.__executable = string.strip(cp.get('condor','chia')) self.__universe = "standard" pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable) sel...
479,691
def __init__(self, dag, job, cp, opts, sngl, frame_cache, chia, tag, p_nodes=[]):
def __init__(self, dag, job, cp, opts, sngl, frame_cache, chia, tag, p_nodes=[]):
479,692
def __init__(self, dag, job, cp, opts, coinc, inspiral_node_dict, chia_node =None, p_nodes = []):
def __init__(self, dag, job, cp, opts, coinc, inspiral_node_dict, chia_node =None, p_nodes = []):
479,693
def __init__(self,job,coinc,cp,opts,dag,ifo,ifonames,p_nodes): pipeline.CondorDAGNode.__init__(self,job)
def __init__(self,job,coinc,cp,opts,dag,ifo,ifonames,p_nodes): pipeline.CondorDAGNode.__init__(self,job)
479,694
def get_time_slides(connection): """ Query the database for the IDs and offsets of all time slides, and return two dictionaries one containing the all-zero time slides and the other containing the not-all-zero time slides. """ zero_lag_time_slides = {} background_time_slides = {} for id, instrument, offset, is_backgrou...
def get_time_slides(connection): """ Query the database for the IDs and offsets of all time slides, and return two dictionaries one containing the all-zero time slides and the other containing the not-all-zero time slides. """ zero_lag_time_slides = {} background_time_slides = {} for id, instrument, offset, is_backgrou...
479,695
def get_time_slides(connection): """ Query the database for the IDs and offsets of all time slides, and return two dictionaries one containing the all-zero time slides and the other containing the not-all-zero time slides. """ zero_lag_time_slides = {} background_time_slides = {} for id, instrument, offset, is_backgrou...
def get_time_slides(connection): """ Query the database for the IDs and offsets of all time slides, and return two dictionaries one containing the all-zero time slides and the other containing the not-all-zero time slides. """ zero_lag_time_slides = {} background_time_slides = {} for id, instrument, offset, is_backgrou...
479,696
def get_time_slides(connection): """ Query the database for the IDs and offsets of all time slides, and return two dictionaries one containing the all-zero time slides and the other containing the not-all-zero time slides. """ zero_lag_time_slides = {} background_time_slides = {} for id, instrument, offset, is_backgrou...
def get_time_slides(connection): """ Query the database for the IDs and offsets of all time slides, and return two dictionaries one containing the all-zero time slides and the other containing the not-all-zero time slides. """ zero_lag_time_slides = {} background_time_slides = {} for id, instrument, offset, is_backgrou...
479,697
def latexnumber(s): """ Convert a string of the form "d.dddde-dd" to "d.dddd \times 10^{-dd}" """ m, e = floatpattern.match(s).groups() return r"%s \\times 10^{%d}" % (m, int(e))
def latexnumber(s): """ Convert a string of the form "d.dddde-dd" to "d.dddd \times 10^{-dd}" """ m, e = floatpattern.match(s).groups() return r"%s \times 10^{%d}" % (m, int(e))
479,698
def get_coincs_from_coire(self,files,stat='snr'): """ uses CoincInspiralUtils to get data from old-style (coire'd) coincs """ coincTrigs = CoincInspiralUtils.coincInspiralTable() inspTrigs = SnglInspiralUtils.ReadSnglInspiralFromFiles(files, \ mangle_event_id = True,verbose=None) statistic = CoincInspiralUtils.coincSta...
def get_coincs_from_coire(self,files,stat='snr'): """ uses CoincInspiralUtils to get data from old-style (coire'd) coincs """ coincTrigs = CoincInspiralUtils.coincInspiralTable() inspTrigs = SnglInspiralUtils.ReadSnglInspiralFromFiles(files, \ mangle_event_id = True,verbose=None) statistic = CoincInspiralUtils.coincSta...
479,699