rem
stringlengths 1
322k
| add
stringlengths 0
2.05M
| context
stringlengths 4
228k
| meta
stringlengths 156
215
|
---|---|---|---|
return comp_lenghts_pos | return comp_lengths_pos | def _prepare_header(output, in_size, basename, mtime): """Returns a prepared gzip header StringIO. The gzip header is defined in RFC 1952. """ output.write("\x1f\x8b\x08") # Gzip-deflate identification flags = FEXTRA if basename: flags |= FNAME output.write(chr(flags)) # The mtime will be undefined if it does not fit. if mtime > 0xffffffffL: mtime = 0 _write32(output, mtime) deflate_flags = "\0" if COMPRESSION_LEVEL == zlib.Z_BEST_COMPRESSION: deflate_flags = "\x02" # slowest compression algorithm output.write(deflate_flags) output.write(chr(OS_CODE_UNIX)) comp_lenghts_pos = _write_extra_fields(output, in_size) if basename: output.write(basename + '\0') # original basename return comp_lenghts_pos | 599e1e2a4e20b94572217d36e0d7135c9204a61d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/4495/599e1e2a4e20b94572217d36e0d7135c9204a61d/compressor.py |
comp_lenghts_pos = output.tell() | comp_lengths_pos = output.tell() | def _write_extra_fields(output, in_size): """Writes the dictzip extra field. It will be initiated with zeros in chunk lengths. See man dictzip. """ num_chunks = in_size // CHUNK_LENGTH if in_size % CHUNK_LENGTH != 0: num_chunks += 1 field_length = 3*2 + 2 * num_chunks extra_length = 2*2 + field_length assert extra_length <= 0xffff _write16(output, extra_length) # XLEN # Dictzip extra field (Random Access) output.write("RA") _write16(output, field_length) _write16(output, 1) # version _write16(output, CHUNK_LENGTH) _write16(output, num_chunks) comp_lenghts_pos = output.tell() output.write("\0\0" * num_chunks) return comp_lenghts_pos | 599e1e2a4e20b94572217d36e0d7135c9204a61d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/4495/599e1e2a4e20b94572217d36e0d7135c9204a61d/compressor.py |
return comp_lenghts_pos | return comp_lengths_pos | def _write_extra_fields(output, in_size): """Writes the dictzip extra field. It will be initiated with zeros in chunk lengths. See man dictzip. """ num_chunks = in_size // CHUNK_LENGTH if in_size % CHUNK_LENGTH != 0: num_chunks += 1 field_length = 3*2 + 2 * num_chunks extra_length = 2*2 + field_length assert extra_length <= 0xffff _write16(output, extra_length) # XLEN # Dictzip extra field (Random Access) output.write("RA") _write16(output, field_length) _write16(output, 1) # version _write16(output, CHUNK_LENGTH) _write16(output, num_chunks) comp_lenghts_pos = output.tell() output.write("\0\0" * num_chunks) return comp_lenghts_pos | 599e1e2a4e20b94572217d36e0d7135c9204a61d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/4495/599e1e2a4e20b94572217d36e0d7135c9204a61d/compressor.py |
output = open(filename + ".gz", "wb") | output = open(filename + SUFFIX, "wb") | def main(): args = sys.argv[1:] if len(args) == 0: print >>sys.stderr, __doc__ sys.exit(1) for filename in args: input = open(filename, "rb") inputinfo = os.fstat(input.fileno()) basename = os.path.basename(filename) output = open(filename + ".gz", "wb") compressor.compress(input, inputinfo.st_size, output, basename, int(inputinfo.st_mtime)) output.close() input.close() | 98765916ea115983617ef488d3d87392abab75f7 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/4495/98765916ea115983617ef488d3d87392abab75f7/command.py |
print >> cache_fh, ' '.join([lfn,pfn,' pool = "local"']) | print >> cache_fh, ' '.join([lfn,pfn,' pool="local"']) | def hipe_pfn_cache(cachename,globpat): """ create and return the name of a pfn cache containing files that match globpat. This is needed to manage the .input files that hipe creates. cachename = the name of the pfn cache file globpat = the pattern to search for """ cache_fh = open(cachename,"w") for file in glob.glob(globpat): lfn = os.path.basename(file) pfn = "file://" + os.path.join(os.getcwd(),file) print >> cache_fh, ' '.join([lfn,pfn,' pool = "local"']) cache_fh.close() return cachename | 75581937a87bbd80bdc541181f98075089b9a4f3 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/75581937a87bbd80bdc541181f98075089b9a4f3/inspiralutils.py |
self.cache_name = os.path.join(self._CondorDAGNode__job.cache_dir, "%s.cache" % self.get_name()) | self.cache_name = os.path.join(self.cache_dir, "%s.cache" % self.get_name()) self.add_var_opt("input-cache", self.cache_name) | def set_name(self, *args): pipeline.CondorDAGNode.set_name(self, *args) self.cache_name = os.path.join(self._CondorDAGNode__job.cache_dir, "%s.cache" % self.get_name()) | 21092c700f762f841e8eba33e9f472c11ebd26af /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/21092c700f762f841e8eba33e9f472c11ebd26af/cosmicstring.py |
for c in cache: filename = c.path() pipeline.CondorDAGNode.add_file_arg(self, filename) self.add_output_file(filename) | def add_input_cache(self, cache): if self.output_cache: raise AttributeError, "cannot change attributes after computing output cache" self.input_cache.extend(cache) for c in cache: filename = c.path() pipeline.CondorDAGNode.add_file_arg(self, filename) self.add_output_file(filename) | 21092c700f762f841e8eba33e9f472c11ebd26af /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/21092c700f762f841e8eba33e9f472c11ebd26af/cosmicstring.py |
|
cache_entry.url = "file://localhost" + os.path.abspath(filename) | def set_output(self, description): if self.output_cache: raise AttributeError, "cannot change attributes after computing output cache" cache_entry = power.make_cache_entry(self.input_cache, description, "") filename = os.path.join(self.output_dir, "%s-STRING_LIKELIHOOD_%s-%d-%d.xml.gz" % (cache_entry.observatory, cache_entry.description, int(cache_entry.segment[0]), int(abs(cache_entry.segment)))) self.add_var_opt("output", filename) cache_entry.url = "file://localhost" + os.path.abspath(filename) del self.output_cache[:] self.output_cache.append(cache_entry) return filename | 21092c700f762f841e8eba33e9f472c11ebd26af /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/21092c700f762f841e8eba33e9f472c11ebd26af/cosmicstring.py |
|
for arg in self.get_args(): if "--add-from-cache" in arg: f = file(self.cache_name, "w") for c in self.input_cache: print >>f, str(c) pipeline.CondorDAGNode.write_input_files(self, *args) break | f = file(self.cache_name, "w") for c in self.input_cache: print >>f, str(c) pipeline.CondorDAGNode.write_input_files(self, *args) | def write_input_files(self, *args): # oh. my. god. this is fscked. for arg in self.get_args(): if "--add-from-cache" in arg: f = file(self.cache_name, "w") for c in self.input_cache: print >>f, str(c) pipeline.CondorDAGNode.write_input_files(self, *args) break | 21092c700f762f841e8eba33e9f472c11ebd26af /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/21092c700f762f841e8eba33e9f472c11ebd26af/cosmicstring.py |
injectionconfidence=0 | def loadDataFile(filename): print filename infile=open(filename,'r') formatstr=infile.readline().lstrip() header=formatstr.split() llines=[] import re dec=re.compile(r'[^\d.-]+') for line in infile: sline=line.split() proceed=True if len(sline)<1: print 'Ignoring empty line in input file: %s'%(sline) proceed=False for s in sline: if dec.search(s) is not None: print 'Warning! Ignoring non-numeric data after the header: %s'%(sline) proceed=False if proceed: llines.append(array(map(float,sline))) flines=array(llines) for i in range(0,len(header)): if header[i].lower().find('log')!=-1 and header[i].lower()!='logl': print 'exponentiating %s'%(header[i]) flines[:,i]=exp(flines[:,i]) header[i]=header[i].replace('log','') if header[i].lower().find('sin')!=-1: print 'asining %s'%(header[i]) flines[:,i]=arcsin(flines[:,i]) header[i]=header[i].replace('sin','') if header[i].lower().find('cos')!=-1: print 'acosing %s'%(header[i]) flines[:,i]=arccos(flines[:,i]) header[i]=header[i].replace('cos','') header[i]=header[i].replace('(','') header[i]=header[i].replace(')','') print 'Read columns %s'%(str(header)) return header,flines | 9d26385d15c782f3f1b49b2a4badf738fd4f2223 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/9d26385d15c782f3f1b49b2a4badf738fd4f2223/cbcBayesSkyRes.py |
|
if injectionconfidence: | if injectionconfidence!=0: | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=linspace(min(xdat),max(xdat),Nx) yax=linspace(min(ydat),max(ydat),Ny) x,y=numpy.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = numpy.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() | 9d26385d15c782f3f1b49b2a4badf738fd4f2223 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/9d26385d15c782f3f1b49b2a4badf738fd4f2223/cbcBayesSkyRes.py |
twoDKdePath=os.path.join(margdir,par1_name+'-'+par2_name+'_2Dkernel.png') | figname=par1_name+'-'+par2_name+'_2Dkernel.png' twoDKdePath=os.path.join(margdir,figname) | def cbcBayesPostProc(outdir,data,oneDMenu,twoDGreedyMenu,GreedyRes,confidence_levels,twoDplots,injfile=None,eventnum=None,skyres=None,bayesfactornoise=None,bayesfactorcoherent=None): """ This is a demonstration script for using the functionality/data structures contained in pylal.bayespputils . It will produce a webpage from a file containing posterior samples generated by the parameter estimation codes with 1D/2D plots and stats from the marginal posteriors for each parameter/set of parameters. """ if eventnum is not None and injfile is None: print "You specified an event number but no injection file. Ignoring!" if data is None: print 'You must specify an input data file' exit(1) # if outdir is None: print "You must specify an output directory." exit(1) if not os.path.isdir(outdir): os.makedirs(outdir) # commonOutputFileObj=open(data[0]) #Select injections using tc +/- 0.1s if it exists or eventnum from the injection file if injfile: import itertools injections = SimInspiralUtils.ReadSimInspiralFromFiles([injfile]) if(eventnum is not None): if(len(injections)<eventnum): print "Error: You asked for event %d, but %s contains only %d injections" %(eventnum,injfile,len(injections)) sys.exit(1) else: injection=injections[eventnum] else: if(len(injections)<1): print 'Warning: Cannot find injection with end time %f' %(means[2]) else: injection = itertools.ifilter(lambda a: abs(a.get_end() - means[2]) < 0.1, injections).next() ## Load Bayes factors ## # Add Bayes factor information to summary file # if bayesfactornoise is not None: bfile=open(bayesfactornoise,'r') BSN=bfile.read() bfile.close() print 'BSN: %s'%BSN if bayesfactorcoherent is not None: bfile=open(bayesfactorcoherent,'r') BCI=bfile.read() bfile.close() print 'BCI: %s'%BCI #Create an instance of the posterior class using the posterior values loaded #from the file and any injection information (if given). pos = bppu.Posterior(commonOutputFileObj,SimInspiralTableEntry=injection) if ('mc' in pos.names or 'mchirp' in pos.names) and \ 'eta' in pos.names and \ ('mass1' not in pos.names or 'm1' not in pos.names) and\ ('m2' not in pos.names or 'm2' not in pos.names): if 'mc' in pos.names: mchirp_name='mc' else: mchirp_name='mchirp' if injection: inj_mass1,inj_mass2=bppu.mc2ms(injection.mchirp,injection.eta) mass1_samps,mass2_samps=bppu.mc2ms(pos[mchirp_name].samples,pos['eta'].samples) mass1_pos=bppu.OneDPosterior('m1',mass1_samps,injected_value=inj_mass1) mass2_pos=bppu.OneDPosterior('m2',mass2_samps,injected_value=inj_mass2) pos.append(mass1_pos) pos.append(mass2_pos) ##Print some summary stats for the user...## #Number of samples print "Number of posterior samples: %i"%len(pos) # Means print 'Means:' print str(pos.means) #Median print 'Median:' print str(pos.medians) #maxL print 'maxL:' max_pos,max_pos_co=pos.maxL print max_pos_co #==================================================================# #Create web page #==================================================================# html=bppu.htmlPage('Posterior PDFs') #Create a section for meta-data/run information html_meta=html.add_section('Summary') html_meta.p('Produced from '+str(len(pos))+' posterior samples.') html_meta.p('Samples read from %s'%(data[0])) #Create a section for model selection results (if they exist) if bayesfactornoise is not None: html_model=html.add_section('Model selection') html_model.p('log Bayes factor ( coherent vs gaussian noise) = %s, Bayes factor=%f'%(BSN,exp(float(BSN)))) if bayesfactorcoherent is not None: html_model.p('log Bayes factor ( coherent vs incoherent OR noise ) = %s, Bayes factor=%f'%(BCI,exp(float(BCI)))) #Create a section for summary statistics html_stats=html.add_section('Summary statistics') html_stats.write(str(pos)) #==================================================================# #Generate sky map #==================================================================# #If sky resolution parameter has been specified try and create sky map... skyreses=None sky_injection_cl=None if skyres is not None and 'ra' in pos.names and 'dec' in pos.names: #Greedy bin sky samples (ra,dec) into a grid on the sky which preserves #? top_ranked_sky_pixels,sky_injection_cl,skyreses,injection_area=bppu.greedy_bin_sky(pos,skyres,confidence_levels) print "BCI for sky area:" print skyreses #Create sky map in outdir bppu.plot_sky_map(top_ranked_sky_pixels,outdir) #Create a web page section for sky localization results/plots html_sky=html.add_section('Sky Localization') if injection: if sky_injection_cl: html_sky.p('Injection found at confidence interval %f in sky location'%(sky_injection_cl)) else: html_sky.p('Injection not found in posterior bins in sky location!') html_sky.write('<img width="35%" src="skymap.png"/>') if skyres is not None: html_sky_write='<table border="1"><tr><th>Confidence region</th><th>size (sq. deg)</th></tr>' fracs=skyreses.keys() fracs.sort() skysizes=[skyreses[frac] for frac in fracs] for frac,skysize in zip(fracs,skysizes): html_sky_write+=('<tr><td>%f</td><td>%f</td></tr>'%(frac,skysize)) html_sky_write+=('</table>') html_sky.write(html_sky_write) #==================================================================# #2D posteriors #==================================================================# #Loop over parameter pairs in twoDGreedyMenu and bin the sample pairs #using a greedy algorithm . The ranked pixels (toppoints) are used #to plot 2D histograms and evaluate Bayesian confidence intervals. #Make a folder for the 2D kde plots margdir=os.path.join(outdir,'2Dkde') if not os.path.isdir(margdir): os.makedirs(margdir) twobinsdir=os.path.join(outdir,'2Dbins') if not os.path.isdir(twobinsdir): os.makedirs(twobinsdir) #Add a section to the webpage for a table of the confidence interval #results. html_tcig=html.add_section('2D confidence intervals (greedy binning)') #Generate the top part of the table html_tcig_write='<table width="100%" border="1"><tr><th/>' confidence_levels.sort() for cl in confidence_levels: html_tcig_write+='<th>%f</th>'%cl if injection: html_tcig_write+='<th>Injection Confidence Level</th>' html_tcig_write+='<th>Injection Confidence Interval</th>' html_tcig_write+='</tr>' #= Add a section for a table of 2D marginal PDFs (kde) html_tcmp=html.add_section('2D Marginal PDFs') html_tcmp.br() #Table matter html_tcmp_write='<table border="1" width="100%">' row_count=0 for par1_name,par2_name in twoDGreedyMenu: par1_name=par1_name.lower() par2_name=par2_name.lower() print "Binning %s-%s to determine confidence levels ..."%(par1_name,par2_name) try: pos[par1_name.lower()] except KeyError: print "No input chain for %s, skipping binning."%par1_name continue try: pos[par2_name.lower()] except KeyError: print "No input chain for %s, skipping binning."%par2_name continue #Bin sizes try: par1_bin=GreedyRes[par1_name] except KeyError: print "Bin size is not set for %s, skipping %s/%s binning."%(par1_name,par1_name,par2_name) continue try: par2_bin=GreedyRes[par2_name] except KeyError: print "Bin size is not set for %s, skipping %s/%s binning."%(par2_name,par1_name,par2_name) continue #Form greedy binning input structure greedy2Params={par1_name:par1_bin,par2_name:par2_bin} #Greedy bin the posterior samples toppoints,injection_cl,reses,injection_area=\ bppu.greedy_bin_two_param(pos,greedy2Params,confidence_levels) print "BCI %s-%s:"%(par1_name,par2_name) print reses #Generate new BCI html table row BCItableline='<tr><td>%s-%s</td>'%(par1_name,par2_name) cls=reses.keys() cls.sort() for cl in cls: BCItableline+='<td>%f</td>'%reses[cl] if injection is not None and injection_cl is not None: BCItableline+='<td>%f</td>'%injection_cl BCItableline+='<td>%f</td>'%injection_area BCItableline+='</tr>' #Append new table line to section html html_tcig_write+=BCItableline #= Plot 2D histograms of greedily binned points =# #greedy2PlotFig=bppu.plot_two_param_greedy_bins(np.array(toppoints),pos,greedy2Params) #greedy2PlotFig.savefig(os.path.join(twobinsdir,'%s-%s_greedy2.png'%(par1_name,par2_name))) #= Generate 2D kde plots =# print 'Generating %s-%s plot'%(par1_name,par2_name) par1_pos=pos[par1_name].samples par2_pos=pos[par2_name].samples if (size(np.unique(par1_pos))<2 or size(np.unique(par2_pos))<2): continue plot2DkdeParams={par1_name:50,par2_name:50} myfig=bppu.plot_two_param_kde(pos,plot2DkdeParams) twoDKdePath=os.path.join(margdir,par1_name+'-'+par2_name+'_2Dkernel.png') if row_count==0: html_tcmp_write+='<tr>' html_tcmp_write+='<td width="30%"><img width="100%" src="'+twoDKdePath+'"/></td>' row_count+=1 if row_count==3: html_tcmp_write+='</tr>' row_count=0 myfig.savefig(twoDKdePath) #Finish off the BCI table and write it into the etree html_tcig_write+='</table>' html_tcig.write(html_tcig_write) #Finish off the 2D kde plot table while row_count!=0: html_tcmp_write+='<td/>' row_count+=1 if row_count==3: row_count=0 html_tcmp_write+='</tr>' html_tcmp_write+='</table>' html_tcmp.write(html_tcmp_write) #Add a link to all plots html_tcmp.br() html_tcmp.a("2D/",'All 2D Marginal PDFs') html_tcmp.hr() #==================================================================# #1D posteriors #==================================================================# #Loop over each parameter and determine the contigious and greedy #confidence levels and some statistics. #Add section for 1D confidence intervals html_ogci=html.add_section('1D confidence intervals (greedy binning)') #Generate the top part of the table html_ogci_write='<table width="100%" border="1"><tr><th/>' confidence_levels.sort() for cl in confidence_levels: html_ogci_write+='<th>%f</th>'%cl if injection: html_ogci_write+='<th>Injection Confidence Level</th>' html_ogci_write+='<th>Injection Confidence Interval</th>' html_ogci_write+='</tr>' #Add section for 1D marginal PDFs and sample plots html_ompdf=html.add_section('1D marginal posterior PDFs') html_ompdf.br() #Table matter html_ompdf_write= '<table><tr><th>Histogram and Kernel Density Estimate</th><th>Samples used</th></tr>' onepdfdir=os.path.join(outdir,'1Dpdf') if not os.path.isdir(onepdfdir): os.makedirs(onepdfdir) sampsdir=os.path.join(outdir,'1Dsamps') if not os.path.isdir(sampsdir): os.makedirs(sampsdir) for par_name in oneDMenu: par_name=par_name.lower() print "Binning %s to determine confidence levels ..."%par_name try: pos[par_name.lower()] except KeyError: print "No input chain for %s, skipping binning."%par_name continue try: par_bin=GreedyRes[par_name] except KeyError: print "Bin size is not set for %s, skipping binning."%par_name continue binParams={par_name:par_bin} toppoints,injectionconfidence,reses,injection_area=bppu.greedy_bin_one_param(pos,binParams,confidence_levels) oneDContCL,oneDContInj = bppu.contigious_interval_one_param(pos,binParams,confidence_levels) #Generate new BCI html table row BCItableline='<tr><td>%s</td>'%(par_name) cls=reses.keys() cls.sort() for cl in cls: BCItableline+='<td>%f</td>'%reses[cl] if injection is not None and injectionconfidence is not None and injection_area is not None: BCItableline+='<td>%f</td>'%injectionconfidence BCItableline+='<td>%f</td>'%injection_area BCItableline+='</tr>' #Append new table line to section html html_ogci_write+=BCItableline #Generate 1D histogram/kde plots print "Generating 1D plot for %s."%par_name oneDPDFParams={par_name:50} rbins,plotFig=bppu.plot_one_param_pdf(pos,oneDPDFParams) figname=par_name+'.png' oneDplotPath=os.path.join(onepdfdir,figname) plotFig.savefig(oneDplotPath) if rbins: print "r of injected value of %s (bins) = %f"%(par_name, rbins) ##Produce plot of raw samples myfig=plt.figure(figsize=(4,3.5),dpi=80) pos_samps=pos[par_name].samples plt.plot(pos_samps,'.',figure=myfig) injpar=pos[par_name].injval if injpar: if min(pos_samps)<injpar and max(pos_samps)>injpar: plt.plot([0,len(pos_samps)],[injpar,injpar],'r-.') myfig.savefig(os.path.join(sampsdir,figname.replace('.png','_samps.png'))) html_ompdf_write+='<tr><td><img src="1Dpdf/'+figname+'"/></td><td><img src="1Dsamps/'+figname.replace('.png','_samps.png')+'"/></td></tr>' html_ompdf_write+='</table>' html_ompdf.write(html_ompdf_write) html_ogci_write+='</table>' html_ogci.write(html_ogci_write) html_ogci.hr() html_ogci.br() html_ompdf.hr() html_ompdf.br() html_footer=html.add_section('') html_footer.p('Produced using cbcBayesPostProc.py at '+strftime("%Y-%m-%d %H:%M:%S")+' .') html_footer.p(git_version.verbose_msg) #Save results page resultspage=open(os.path.join(outdir,'posplots.html'),'w') resultspage.write(str(html)) # Save posterior samples too... posfilename=os.path.join(outdir,'posterior_samples.dat') posfile=open(posfilename,'w') input_file=open(data[0]) posfile.write(input_file.read()) # posfilename2=os.path.join(outdir,'posterior_samples2.dat') pos.write_to_file(posfilename2) #Close files input_file.close() posfile.close() resultspage.close() | 5f8152dff458ceb1a147c6c8d816e08c11982cd1 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/5f8152dff458ceb1a147c6c8d816e08c11982cd1/cbcBayesPostProc.py |
html_tcmp_write+='<td width="30%"><img width="100%" src="'+twoDKdePath+'"/></td>' | html_tcmp_write+='<td width="30%"><img width="100%" src="2Dkde/'+twoDKdePath+'"/></td>' | def cbcBayesPostProc(outdir,data,oneDMenu,twoDGreedyMenu,GreedyRes,confidence_levels,twoDplots,injfile=None,eventnum=None,skyres=None,bayesfactornoise=None,bayesfactorcoherent=None): """ This is a demonstration script for using the functionality/data structures contained in pylal.bayespputils . It will produce a webpage from a file containing posterior samples generated by the parameter estimation codes with 1D/2D plots and stats from the marginal posteriors for each parameter/set of parameters. """ if eventnum is not None and injfile is None: print "You specified an event number but no injection file. Ignoring!" if data is None: print 'You must specify an input data file' exit(1) # if outdir is None: print "You must specify an output directory." exit(1) if not os.path.isdir(outdir): os.makedirs(outdir) # commonOutputFileObj=open(data[0]) #Select injections using tc +/- 0.1s if it exists or eventnum from the injection file if injfile: import itertools injections = SimInspiralUtils.ReadSimInspiralFromFiles([injfile]) if(eventnum is not None): if(len(injections)<eventnum): print "Error: You asked for event %d, but %s contains only %d injections" %(eventnum,injfile,len(injections)) sys.exit(1) else: injection=injections[eventnum] else: if(len(injections)<1): print 'Warning: Cannot find injection with end time %f' %(means[2]) else: injection = itertools.ifilter(lambda a: abs(a.get_end() - means[2]) < 0.1, injections).next() ## Load Bayes factors ## # Add Bayes factor information to summary file # if bayesfactornoise is not None: bfile=open(bayesfactornoise,'r') BSN=bfile.read() bfile.close() print 'BSN: %s'%BSN if bayesfactorcoherent is not None: bfile=open(bayesfactorcoherent,'r') BCI=bfile.read() bfile.close() print 'BCI: %s'%BCI #Create an instance of the posterior class using the posterior values loaded #from the file and any injection information (if given). pos = bppu.Posterior(commonOutputFileObj,SimInspiralTableEntry=injection) if ('mc' in pos.names or 'mchirp' in pos.names) and \ 'eta' in pos.names and \ ('mass1' not in pos.names or 'm1' not in pos.names) and\ ('m2' not in pos.names or 'm2' not in pos.names): if 'mc' in pos.names: mchirp_name='mc' else: mchirp_name='mchirp' if injection: inj_mass1,inj_mass2=bppu.mc2ms(injection.mchirp,injection.eta) mass1_samps,mass2_samps=bppu.mc2ms(pos[mchirp_name].samples,pos['eta'].samples) mass1_pos=bppu.OneDPosterior('m1',mass1_samps,injected_value=inj_mass1) mass2_pos=bppu.OneDPosterior('m2',mass2_samps,injected_value=inj_mass2) pos.append(mass1_pos) pos.append(mass2_pos) ##Print some summary stats for the user...## #Number of samples print "Number of posterior samples: %i"%len(pos) # Means print 'Means:' print str(pos.means) #Median print 'Median:' print str(pos.medians) #maxL print 'maxL:' max_pos,max_pos_co=pos.maxL print max_pos_co #==================================================================# #Create web page #==================================================================# html=bppu.htmlPage('Posterior PDFs') #Create a section for meta-data/run information html_meta=html.add_section('Summary') html_meta.p('Produced from '+str(len(pos))+' posterior samples.') html_meta.p('Samples read from %s'%(data[0])) #Create a section for model selection results (if they exist) if bayesfactornoise is not None: html_model=html.add_section('Model selection') html_model.p('log Bayes factor ( coherent vs gaussian noise) = %s, Bayes factor=%f'%(BSN,exp(float(BSN)))) if bayesfactorcoherent is not None: html_model.p('log Bayes factor ( coherent vs incoherent OR noise ) = %s, Bayes factor=%f'%(BCI,exp(float(BCI)))) #Create a section for summary statistics html_stats=html.add_section('Summary statistics') html_stats.write(str(pos)) #==================================================================# #Generate sky map #==================================================================# #If sky resolution parameter has been specified try and create sky map... skyreses=None sky_injection_cl=None if skyres is not None and 'ra' in pos.names and 'dec' in pos.names: #Greedy bin sky samples (ra,dec) into a grid on the sky which preserves #? top_ranked_sky_pixels,sky_injection_cl,skyreses,injection_area=bppu.greedy_bin_sky(pos,skyres,confidence_levels) print "BCI for sky area:" print skyreses #Create sky map in outdir bppu.plot_sky_map(top_ranked_sky_pixels,outdir) #Create a web page section for sky localization results/plots html_sky=html.add_section('Sky Localization') if injection: if sky_injection_cl: html_sky.p('Injection found at confidence interval %f in sky location'%(sky_injection_cl)) else: html_sky.p('Injection not found in posterior bins in sky location!') html_sky.write('<img width="35%" src="skymap.png"/>') if skyres is not None: html_sky_write='<table border="1"><tr><th>Confidence region</th><th>size (sq. deg)</th></tr>' fracs=skyreses.keys() fracs.sort() skysizes=[skyreses[frac] for frac in fracs] for frac,skysize in zip(fracs,skysizes): html_sky_write+=('<tr><td>%f</td><td>%f</td></tr>'%(frac,skysize)) html_sky_write+=('</table>') html_sky.write(html_sky_write) #==================================================================# #2D posteriors #==================================================================# #Loop over parameter pairs in twoDGreedyMenu and bin the sample pairs #using a greedy algorithm . The ranked pixels (toppoints) are used #to plot 2D histograms and evaluate Bayesian confidence intervals. #Make a folder for the 2D kde plots margdir=os.path.join(outdir,'2Dkde') if not os.path.isdir(margdir): os.makedirs(margdir) twobinsdir=os.path.join(outdir,'2Dbins') if not os.path.isdir(twobinsdir): os.makedirs(twobinsdir) #Add a section to the webpage for a table of the confidence interval #results. html_tcig=html.add_section('2D confidence intervals (greedy binning)') #Generate the top part of the table html_tcig_write='<table width="100%" border="1"><tr><th/>' confidence_levels.sort() for cl in confidence_levels: html_tcig_write+='<th>%f</th>'%cl if injection: html_tcig_write+='<th>Injection Confidence Level</th>' html_tcig_write+='<th>Injection Confidence Interval</th>' html_tcig_write+='</tr>' #= Add a section for a table of 2D marginal PDFs (kde) html_tcmp=html.add_section('2D Marginal PDFs') html_tcmp.br() #Table matter html_tcmp_write='<table border="1" width="100%">' row_count=0 for par1_name,par2_name in twoDGreedyMenu: par1_name=par1_name.lower() par2_name=par2_name.lower() print "Binning %s-%s to determine confidence levels ..."%(par1_name,par2_name) try: pos[par1_name.lower()] except KeyError: print "No input chain for %s, skipping binning."%par1_name continue try: pos[par2_name.lower()] except KeyError: print "No input chain for %s, skipping binning."%par2_name continue #Bin sizes try: par1_bin=GreedyRes[par1_name] except KeyError: print "Bin size is not set for %s, skipping %s/%s binning."%(par1_name,par1_name,par2_name) continue try: par2_bin=GreedyRes[par2_name] except KeyError: print "Bin size is not set for %s, skipping %s/%s binning."%(par2_name,par1_name,par2_name) continue #Form greedy binning input structure greedy2Params={par1_name:par1_bin,par2_name:par2_bin} #Greedy bin the posterior samples toppoints,injection_cl,reses,injection_area=\ bppu.greedy_bin_two_param(pos,greedy2Params,confidence_levels) print "BCI %s-%s:"%(par1_name,par2_name) print reses #Generate new BCI html table row BCItableline='<tr><td>%s-%s</td>'%(par1_name,par2_name) cls=reses.keys() cls.sort() for cl in cls: BCItableline+='<td>%f</td>'%reses[cl] if injection is not None and injection_cl is not None: BCItableline+='<td>%f</td>'%injection_cl BCItableline+='<td>%f</td>'%injection_area BCItableline+='</tr>' #Append new table line to section html html_tcig_write+=BCItableline #= Plot 2D histograms of greedily binned points =# #greedy2PlotFig=bppu.plot_two_param_greedy_bins(np.array(toppoints),pos,greedy2Params) #greedy2PlotFig.savefig(os.path.join(twobinsdir,'%s-%s_greedy2.png'%(par1_name,par2_name))) #= Generate 2D kde plots =# print 'Generating %s-%s plot'%(par1_name,par2_name) par1_pos=pos[par1_name].samples par2_pos=pos[par2_name].samples if (size(np.unique(par1_pos))<2 or size(np.unique(par2_pos))<2): continue plot2DkdeParams={par1_name:50,par2_name:50} myfig=bppu.plot_two_param_kde(pos,plot2DkdeParams) twoDKdePath=os.path.join(margdir,par1_name+'-'+par2_name+'_2Dkernel.png') if row_count==0: html_tcmp_write+='<tr>' html_tcmp_write+='<td width="30%"><img width="100%" src="'+twoDKdePath+'"/></td>' row_count+=1 if row_count==3: html_tcmp_write+='</tr>' row_count=0 myfig.savefig(twoDKdePath) #Finish off the BCI table and write it into the etree html_tcig_write+='</table>' html_tcig.write(html_tcig_write) #Finish off the 2D kde plot table while row_count!=0: html_tcmp_write+='<td/>' row_count+=1 if row_count==3: row_count=0 html_tcmp_write+='</tr>' html_tcmp_write+='</table>' html_tcmp.write(html_tcmp_write) #Add a link to all plots html_tcmp.br() html_tcmp.a("2D/",'All 2D Marginal PDFs') html_tcmp.hr() #==================================================================# #1D posteriors #==================================================================# #Loop over each parameter and determine the contigious and greedy #confidence levels and some statistics. #Add section for 1D confidence intervals html_ogci=html.add_section('1D confidence intervals (greedy binning)') #Generate the top part of the table html_ogci_write='<table width="100%" border="1"><tr><th/>' confidence_levels.sort() for cl in confidence_levels: html_ogci_write+='<th>%f</th>'%cl if injection: html_ogci_write+='<th>Injection Confidence Level</th>' html_ogci_write+='<th>Injection Confidence Interval</th>' html_ogci_write+='</tr>' #Add section for 1D marginal PDFs and sample plots html_ompdf=html.add_section('1D marginal posterior PDFs') html_ompdf.br() #Table matter html_ompdf_write= '<table><tr><th>Histogram and Kernel Density Estimate</th><th>Samples used</th></tr>' onepdfdir=os.path.join(outdir,'1Dpdf') if not os.path.isdir(onepdfdir): os.makedirs(onepdfdir) sampsdir=os.path.join(outdir,'1Dsamps') if not os.path.isdir(sampsdir): os.makedirs(sampsdir) for par_name in oneDMenu: par_name=par_name.lower() print "Binning %s to determine confidence levels ..."%par_name try: pos[par_name.lower()] except KeyError: print "No input chain for %s, skipping binning."%par_name continue try: par_bin=GreedyRes[par_name] except KeyError: print "Bin size is not set for %s, skipping binning."%par_name continue binParams={par_name:par_bin} toppoints,injectionconfidence,reses,injection_area=bppu.greedy_bin_one_param(pos,binParams,confidence_levels) oneDContCL,oneDContInj = bppu.contigious_interval_one_param(pos,binParams,confidence_levels) #Generate new BCI html table row BCItableline='<tr><td>%s</td>'%(par_name) cls=reses.keys() cls.sort() for cl in cls: BCItableline+='<td>%f</td>'%reses[cl] if injection is not None and injectionconfidence is not None and injection_area is not None: BCItableline+='<td>%f</td>'%injectionconfidence BCItableline+='<td>%f</td>'%injection_area BCItableline+='</tr>' #Append new table line to section html html_ogci_write+=BCItableline #Generate 1D histogram/kde plots print "Generating 1D plot for %s."%par_name oneDPDFParams={par_name:50} rbins,plotFig=bppu.plot_one_param_pdf(pos,oneDPDFParams) figname=par_name+'.png' oneDplotPath=os.path.join(onepdfdir,figname) plotFig.savefig(oneDplotPath) if rbins: print "r of injected value of %s (bins) = %f"%(par_name, rbins) ##Produce plot of raw samples myfig=plt.figure(figsize=(4,3.5),dpi=80) pos_samps=pos[par_name].samples plt.plot(pos_samps,'.',figure=myfig) injpar=pos[par_name].injval if injpar: if min(pos_samps)<injpar and max(pos_samps)>injpar: plt.plot([0,len(pos_samps)],[injpar,injpar],'r-.') myfig.savefig(os.path.join(sampsdir,figname.replace('.png','_samps.png'))) html_ompdf_write+='<tr><td><img src="1Dpdf/'+figname+'"/></td><td><img src="1Dsamps/'+figname.replace('.png','_samps.png')+'"/></td></tr>' html_ompdf_write+='</table>' html_ompdf.write(html_ompdf_write) html_ogci_write+='</table>' html_ogci.write(html_ogci_write) html_ogci.hr() html_ogci.br() html_ompdf.hr() html_ompdf.br() html_footer=html.add_section('') html_footer.p('Produced using cbcBayesPostProc.py at '+strftime("%Y-%m-%d %H:%M:%S")+' .') html_footer.p(git_version.verbose_msg) #Save results page resultspage=open(os.path.join(outdir,'posplots.html'),'w') resultspage.write(str(html)) # Save posterior samples too... posfilename=os.path.join(outdir,'posterior_samples.dat') posfile=open(posfilename,'w') input_file=open(data[0]) posfile.write(input_file.read()) # posfilename2=os.path.join(outdir,'posterior_samples2.dat') pos.write_to_file(posfilename2) #Close files input_file.close() posfile.close() resultspage.close() | 5f8152dff458ceb1a147c6c8d816e08c11982cd1 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/5f8152dff458ceb1a147c6c8d816e08c11982cd1/cbcBayesPostProc.py |
html_tcmp.a("2D/",'All 2D Marginal PDFs') | html_tcmp.a("2Dkde/",'All 2D marginal PDFs (kde)') | def cbcBayesPostProc(outdir,data,oneDMenu,twoDGreedyMenu,GreedyRes,confidence_levels,twoDplots,injfile=None,eventnum=None,skyres=None,bayesfactornoise=None,bayesfactorcoherent=None): """ This is a demonstration script for using the functionality/data structures contained in pylal.bayespputils . It will produce a webpage from a file containing posterior samples generated by the parameter estimation codes with 1D/2D plots and stats from the marginal posteriors for each parameter/set of parameters. """ if eventnum is not None and injfile is None: print "You specified an event number but no injection file. Ignoring!" if data is None: print 'You must specify an input data file' exit(1) # if outdir is None: print "You must specify an output directory." exit(1) if not os.path.isdir(outdir): os.makedirs(outdir) # commonOutputFileObj=open(data[0]) #Select injections using tc +/- 0.1s if it exists or eventnum from the injection file if injfile: import itertools injections = SimInspiralUtils.ReadSimInspiralFromFiles([injfile]) if(eventnum is not None): if(len(injections)<eventnum): print "Error: You asked for event %d, but %s contains only %d injections" %(eventnum,injfile,len(injections)) sys.exit(1) else: injection=injections[eventnum] else: if(len(injections)<1): print 'Warning: Cannot find injection with end time %f' %(means[2]) else: injection = itertools.ifilter(lambda a: abs(a.get_end() - means[2]) < 0.1, injections).next() ## Load Bayes factors ## # Add Bayes factor information to summary file # if bayesfactornoise is not None: bfile=open(bayesfactornoise,'r') BSN=bfile.read() bfile.close() print 'BSN: %s'%BSN if bayesfactorcoherent is not None: bfile=open(bayesfactorcoherent,'r') BCI=bfile.read() bfile.close() print 'BCI: %s'%BCI #Create an instance of the posterior class using the posterior values loaded #from the file and any injection information (if given). pos = bppu.Posterior(commonOutputFileObj,SimInspiralTableEntry=injection) if ('mc' in pos.names or 'mchirp' in pos.names) and \ 'eta' in pos.names and \ ('mass1' not in pos.names or 'm1' not in pos.names) and\ ('m2' not in pos.names or 'm2' not in pos.names): if 'mc' in pos.names: mchirp_name='mc' else: mchirp_name='mchirp' if injection: inj_mass1,inj_mass2=bppu.mc2ms(injection.mchirp,injection.eta) mass1_samps,mass2_samps=bppu.mc2ms(pos[mchirp_name].samples,pos['eta'].samples) mass1_pos=bppu.OneDPosterior('m1',mass1_samps,injected_value=inj_mass1) mass2_pos=bppu.OneDPosterior('m2',mass2_samps,injected_value=inj_mass2) pos.append(mass1_pos) pos.append(mass2_pos) ##Print some summary stats for the user...## #Number of samples print "Number of posterior samples: %i"%len(pos) # Means print 'Means:' print str(pos.means) #Median print 'Median:' print str(pos.medians) #maxL print 'maxL:' max_pos,max_pos_co=pos.maxL print max_pos_co #==================================================================# #Create web page #==================================================================# html=bppu.htmlPage('Posterior PDFs') #Create a section for meta-data/run information html_meta=html.add_section('Summary') html_meta.p('Produced from '+str(len(pos))+' posterior samples.') html_meta.p('Samples read from %s'%(data[0])) #Create a section for model selection results (if they exist) if bayesfactornoise is not None: html_model=html.add_section('Model selection') html_model.p('log Bayes factor ( coherent vs gaussian noise) = %s, Bayes factor=%f'%(BSN,exp(float(BSN)))) if bayesfactorcoherent is not None: html_model.p('log Bayes factor ( coherent vs incoherent OR noise ) = %s, Bayes factor=%f'%(BCI,exp(float(BCI)))) #Create a section for summary statistics html_stats=html.add_section('Summary statistics') html_stats.write(str(pos)) #==================================================================# #Generate sky map #==================================================================# #If sky resolution parameter has been specified try and create sky map... skyreses=None sky_injection_cl=None if skyres is not None and 'ra' in pos.names and 'dec' in pos.names: #Greedy bin sky samples (ra,dec) into a grid on the sky which preserves #? top_ranked_sky_pixels,sky_injection_cl,skyreses,injection_area=bppu.greedy_bin_sky(pos,skyres,confidence_levels) print "BCI for sky area:" print skyreses #Create sky map in outdir bppu.plot_sky_map(top_ranked_sky_pixels,outdir) #Create a web page section for sky localization results/plots html_sky=html.add_section('Sky Localization') if injection: if sky_injection_cl: html_sky.p('Injection found at confidence interval %f in sky location'%(sky_injection_cl)) else: html_sky.p('Injection not found in posterior bins in sky location!') html_sky.write('<img width="35%" src="skymap.png"/>') if skyres is not None: html_sky_write='<table border="1"><tr><th>Confidence region</th><th>size (sq. deg)</th></tr>' fracs=skyreses.keys() fracs.sort() skysizes=[skyreses[frac] for frac in fracs] for frac,skysize in zip(fracs,skysizes): html_sky_write+=('<tr><td>%f</td><td>%f</td></tr>'%(frac,skysize)) html_sky_write+=('</table>') html_sky.write(html_sky_write) #==================================================================# #2D posteriors #==================================================================# #Loop over parameter pairs in twoDGreedyMenu and bin the sample pairs #using a greedy algorithm . The ranked pixels (toppoints) are used #to plot 2D histograms and evaluate Bayesian confidence intervals. #Make a folder for the 2D kde plots margdir=os.path.join(outdir,'2Dkde') if not os.path.isdir(margdir): os.makedirs(margdir) twobinsdir=os.path.join(outdir,'2Dbins') if not os.path.isdir(twobinsdir): os.makedirs(twobinsdir) #Add a section to the webpage for a table of the confidence interval #results. html_tcig=html.add_section('2D confidence intervals (greedy binning)') #Generate the top part of the table html_tcig_write='<table width="100%" border="1"><tr><th/>' confidence_levels.sort() for cl in confidence_levels: html_tcig_write+='<th>%f</th>'%cl if injection: html_tcig_write+='<th>Injection Confidence Level</th>' html_tcig_write+='<th>Injection Confidence Interval</th>' html_tcig_write+='</tr>' #= Add a section for a table of 2D marginal PDFs (kde) html_tcmp=html.add_section('2D Marginal PDFs') html_tcmp.br() #Table matter html_tcmp_write='<table border="1" width="100%">' row_count=0 for par1_name,par2_name in twoDGreedyMenu: par1_name=par1_name.lower() par2_name=par2_name.lower() print "Binning %s-%s to determine confidence levels ..."%(par1_name,par2_name) try: pos[par1_name.lower()] except KeyError: print "No input chain for %s, skipping binning."%par1_name continue try: pos[par2_name.lower()] except KeyError: print "No input chain for %s, skipping binning."%par2_name continue #Bin sizes try: par1_bin=GreedyRes[par1_name] except KeyError: print "Bin size is not set for %s, skipping %s/%s binning."%(par1_name,par1_name,par2_name) continue try: par2_bin=GreedyRes[par2_name] except KeyError: print "Bin size is not set for %s, skipping %s/%s binning."%(par2_name,par1_name,par2_name) continue #Form greedy binning input structure greedy2Params={par1_name:par1_bin,par2_name:par2_bin} #Greedy bin the posterior samples toppoints,injection_cl,reses,injection_area=\ bppu.greedy_bin_two_param(pos,greedy2Params,confidence_levels) print "BCI %s-%s:"%(par1_name,par2_name) print reses #Generate new BCI html table row BCItableline='<tr><td>%s-%s</td>'%(par1_name,par2_name) cls=reses.keys() cls.sort() for cl in cls: BCItableline+='<td>%f</td>'%reses[cl] if injection is not None and injection_cl is not None: BCItableline+='<td>%f</td>'%injection_cl BCItableline+='<td>%f</td>'%injection_area BCItableline+='</tr>' #Append new table line to section html html_tcig_write+=BCItableline #= Plot 2D histograms of greedily binned points =# #greedy2PlotFig=bppu.plot_two_param_greedy_bins(np.array(toppoints),pos,greedy2Params) #greedy2PlotFig.savefig(os.path.join(twobinsdir,'%s-%s_greedy2.png'%(par1_name,par2_name))) #= Generate 2D kde plots =# print 'Generating %s-%s plot'%(par1_name,par2_name) par1_pos=pos[par1_name].samples par2_pos=pos[par2_name].samples if (size(np.unique(par1_pos))<2 or size(np.unique(par2_pos))<2): continue plot2DkdeParams={par1_name:50,par2_name:50} myfig=bppu.plot_two_param_kde(pos,plot2DkdeParams) twoDKdePath=os.path.join(margdir,par1_name+'-'+par2_name+'_2Dkernel.png') if row_count==0: html_tcmp_write+='<tr>' html_tcmp_write+='<td width="30%"><img width="100%" src="'+twoDKdePath+'"/></td>' row_count+=1 if row_count==3: html_tcmp_write+='</tr>' row_count=0 myfig.savefig(twoDKdePath) #Finish off the BCI table and write it into the etree html_tcig_write+='</table>' html_tcig.write(html_tcig_write) #Finish off the 2D kde plot table while row_count!=0: html_tcmp_write+='<td/>' row_count+=1 if row_count==3: row_count=0 html_tcmp_write+='</tr>' html_tcmp_write+='</table>' html_tcmp.write(html_tcmp_write) #Add a link to all plots html_tcmp.br() html_tcmp.a("2D/",'All 2D Marginal PDFs') html_tcmp.hr() #==================================================================# #1D posteriors #==================================================================# #Loop over each parameter and determine the contigious and greedy #confidence levels and some statistics. #Add section for 1D confidence intervals html_ogci=html.add_section('1D confidence intervals (greedy binning)') #Generate the top part of the table html_ogci_write='<table width="100%" border="1"><tr><th/>' confidence_levels.sort() for cl in confidence_levels: html_ogci_write+='<th>%f</th>'%cl if injection: html_ogci_write+='<th>Injection Confidence Level</th>' html_ogci_write+='<th>Injection Confidence Interval</th>' html_ogci_write+='</tr>' #Add section for 1D marginal PDFs and sample plots html_ompdf=html.add_section('1D marginal posterior PDFs') html_ompdf.br() #Table matter html_ompdf_write= '<table><tr><th>Histogram and Kernel Density Estimate</th><th>Samples used</th></tr>' onepdfdir=os.path.join(outdir,'1Dpdf') if not os.path.isdir(onepdfdir): os.makedirs(onepdfdir) sampsdir=os.path.join(outdir,'1Dsamps') if not os.path.isdir(sampsdir): os.makedirs(sampsdir) for par_name in oneDMenu: par_name=par_name.lower() print "Binning %s to determine confidence levels ..."%par_name try: pos[par_name.lower()] except KeyError: print "No input chain for %s, skipping binning."%par_name continue try: par_bin=GreedyRes[par_name] except KeyError: print "Bin size is not set for %s, skipping binning."%par_name continue binParams={par_name:par_bin} toppoints,injectionconfidence,reses,injection_area=bppu.greedy_bin_one_param(pos,binParams,confidence_levels) oneDContCL,oneDContInj = bppu.contigious_interval_one_param(pos,binParams,confidence_levels) #Generate new BCI html table row BCItableline='<tr><td>%s</td>'%(par_name) cls=reses.keys() cls.sort() for cl in cls: BCItableline+='<td>%f</td>'%reses[cl] if injection is not None and injectionconfidence is not None and injection_area is not None: BCItableline+='<td>%f</td>'%injectionconfidence BCItableline+='<td>%f</td>'%injection_area BCItableline+='</tr>' #Append new table line to section html html_ogci_write+=BCItableline #Generate 1D histogram/kde plots print "Generating 1D plot for %s."%par_name oneDPDFParams={par_name:50} rbins,plotFig=bppu.plot_one_param_pdf(pos,oneDPDFParams) figname=par_name+'.png' oneDplotPath=os.path.join(onepdfdir,figname) plotFig.savefig(oneDplotPath) if rbins: print "r of injected value of %s (bins) = %f"%(par_name, rbins) ##Produce plot of raw samples myfig=plt.figure(figsize=(4,3.5),dpi=80) pos_samps=pos[par_name].samples plt.plot(pos_samps,'.',figure=myfig) injpar=pos[par_name].injval if injpar: if min(pos_samps)<injpar and max(pos_samps)>injpar: plt.plot([0,len(pos_samps)],[injpar,injpar],'r-.') myfig.savefig(os.path.join(sampsdir,figname.replace('.png','_samps.png'))) html_ompdf_write+='<tr><td><img src="1Dpdf/'+figname+'"/></td><td><img src="1Dsamps/'+figname.replace('.png','_samps.png')+'"/></td></tr>' html_ompdf_write+='</table>' html_ompdf.write(html_ompdf_write) html_ogci_write+='</table>' html_ogci.write(html_ogci_write) html_ogci.hr() html_ogci.br() html_ompdf.hr() html_ompdf.br() html_footer=html.add_section('') html_footer.p('Produced using cbcBayesPostProc.py at '+strftime("%Y-%m-%d %H:%M:%S")+' .') html_footer.p(git_version.verbose_msg) #Save results page resultspage=open(os.path.join(outdir,'posplots.html'),'w') resultspage.write(str(html)) # Save posterior samples too... posfilename=os.path.join(outdir,'posterior_samples.dat') posfile=open(posfilename,'w') input_file=open(data[0]) posfile.write(input_file.read()) # posfilename2=os.path.join(outdir,'posterior_samples2.dat') pos.write_to_file(posfilename2) #Close files input_file.close() posfile.close() resultspage.close() | 5f8152dff458ceb1a147c6c8d816e08c11982cd1 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/5f8152dff458ceb1a147c6c8d816e08c11982cd1/cbcBayesPostProc.py |
if re.match('.*-[0-9]*-[0-9]*\.xml', dirname): | if re.match('.*-[0-9]*-[0-9]*\.xml$', dirname): | def get_all_files_in_range(dirname, starttime, endtime, pad=64): """Returns all files in dirname and all its subdirectories whose names indicate that they contain segments in the range starttime to endtime""" ret = [] # Maybe the user just wants one file... if os.path.isfile(dirname): if re.match('.*-[0-9]*-[0-9]*\.xml', dirname): return [dirname] else: return ret first_four_start = starttime / 100000 first_four_end = endtime / 100000 for filename in os.listdir(dirname): if re.match('.*-[0-9]{4}$', filename): dirtime = int(filename[-4:]) if dirtime >= first_four_start and dirtime <= first_four_end: ret += get_all_files_in_range(os.path.join(dirname,filename), starttime, endtime, pad=pad) elif re.match('.*-[0-9]*-[0-9]*\.xml', filename): file_time = int(filename.split('-')[-2]) if file_time >= (starttime-pad) and file_time <= (endtime+pad): ret.append(os.path.join(dirname,filename)) else: # Keep recursing, we may be looking at directories of # ifos, each of which has directories with times ret += get_all_files_in_range(os.path.join(dirname,filename), starttime, endtime, pad=pad) return ret | e0fb7da226a60de170116ca2cc2d29e96e8f00ed /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/e0fb7da226a60de170116ca2cc2d29e96e8f00ed/segmentdb_utils.py |
elif re.match('.*-[0-9]*-[0-9]*\.xml', filename): | elif re.match('.*-[0-9]*-[0-9]*\.xml$', filename): | def get_all_files_in_range(dirname, starttime, endtime, pad=64): """Returns all files in dirname and all its subdirectories whose names indicate that they contain segments in the range starttime to endtime""" ret = [] # Maybe the user just wants one file... if os.path.isfile(dirname): if re.match('.*-[0-9]*-[0-9]*\.xml', dirname): return [dirname] else: return ret first_four_start = starttime / 100000 first_four_end = endtime / 100000 for filename in os.listdir(dirname): if re.match('.*-[0-9]{4}$', filename): dirtime = int(filename[-4:]) if dirtime >= first_four_start and dirtime <= first_four_end: ret += get_all_files_in_range(os.path.join(dirname,filename), starttime, endtime, pad=pad) elif re.match('.*-[0-9]*-[0-9]*\.xml', filename): file_time = int(filename.split('-')[-2]) if file_time >= (starttime-pad) and file_time <= (endtime+pad): ret.append(os.path.join(dirname,filename)) else: # Keep recursing, we may be looking at directories of # ifos, each of which has directories with times ret += get_all_files_in_range(os.path.join(dirname,filename), starttime, endtime, pad=pad) return ret | e0fb7da226a60de170116ca2cc2d29e96e8f00ed /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/e0fb7da226a60de170116ca2cc2d29e96e8f00ed/segmentdb_utils.py |
def write_abstract_dag(self): """ Write all the nodes in the workflow to the DAX file. """ if not self.__dax_file_path: # this workflow is not dax-compatible, so just return return try: dagfile = open( self.__dag_file_path, 'w' ) except: raise CondorDAGError, "Cannot open file " + self.__dag_file_path | af0d11174f59a33954bcf909dedc86e1d0c82d73 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/af0d11174f59a33954bcf909dedc86e1d0c82d73/pipeline.py |
||
dagfile = open( self.__dag_file_path, 'w' ) | dagfile = open( self.__dax_file_path, 'w' ) | def write_abstract_dag(self): """ Write all the nodes in the workflow to the DAX file. """ if not self.__dax_file_path: # this workflow is not dax-compatible, so just return return try: dagfile = open( self.__dag_file_path, 'w' ) except: raise CondorDAGError, "Cannot open file " + self.__dag_file_path | af0d11174f59a33954bcf909dedc86e1d0c82d73 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/af0d11174f59a33954bcf909dedc86e1d0c82d73/pipeline.py |
preamble = """\ <?xml version="1.0" encoding="UTF-8"?> | preamble = """<?xml version="1.0" encoding="UTF-8"?> | def write_abstract_dag(self): """ Write all the nodes in the workflow to the DAX file. """ if not self.__dax_file_path: # this workflow is not dax-compatible, so just return return try: dagfile = open( self.__dag_file_path, 'w' ) except: raise CondorDAGError, "Cannot open file " + self.__dag_file_path | af0d11174f59a33954bcf909dedc86e1d0c82d73 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/af0d11174f59a33954bcf909dedc86e1d0c82d73/pipeline.py |
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" version="3.0" count="1" index="0" """ | xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" version="3.0" count="1" index="0" """ | def write_abstract_dag(self): """ Write all the nodes in the workflow to the DAX file. """ if not self.__dax_file_path: # this workflow is not dax-compatible, so just return return try: dagfile = open( self.__dag_file_path, 'w' ) except: raise CondorDAGError, "Cannot open file " + self.__dag_file_path | af0d11174f59a33954bcf909dedc86e1d0c82d73 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/af0d11174f59a33954bcf909dedc86e1d0c82d73/pipeline.py |
if isinstance(node, LSCDataFindNode): | if self.is_dax() and isinstance(node, LSCDataFindNode): | def write_abstract_dag(self): """ Write all the nodes in the workflow to the DAX file. """ if not self.__dax_file_path: # this workflow is not dax-compatible, so just return return try: dagfile = open( self.__dag_file_path, 'w' ) except: raise CondorDAGError, "Cannot open file " + self.__dag_file_path | af0d11174f59a33954bcf909dedc86e1d0c82d73 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/af0d11174f59a33954bcf909dedc86e1d0c82d73/pipeline.py |
os.getcwd(),node.job().get_dag_directory(),subgax_name) | os.getcwd(),node.job().get_dag_directory(),subdag_name) | def write_abstract_dag(self): """ Write all the nodes in the workflow to the DAX file. """ if not self.__dax_file_path: # this workflow is not dax-compatible, so just return return try: dagfile = open( self.__dag_file_path, 'w' ) except: raise CondorDAGError, "Cannot open file " + self.__dag_file_path | af0d11174f59a33954bcf909dedc86e1d0c82d73 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/af0d11174f59a33954bcf909dedc86e1d0c82d73/pipeline.py |
print >>dagfile, """<dag id="%s" file="%s">""" % (id, subdag_name) | print >>dagfile, """<dag id="%s" file="%s">""" % (id_tag, subdag_name) | def write_abstract_dag(self): """ Write all the nodes in the workflow to the DAX file. """ if not self.__dax_file_path: # this workflow is not dax-compatible, so just return return try: dagfile = open( self.__dag_file_path, 'w' ) except: raise CondorDAGError, "Cannot open file " + self.__dag_file_path | af0d11174f59a33954bcf909dedc86e1d0c82d73 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/af0d11174f59a33954bcf909dedc86e1d0c82d73/pipeline.py |
print >>dagfile, """<dax id="%s" file="%s">""" % (id, subdax_name) | print >>dagfile, """<dax id="%s" file="%s">""" % (id_tag, subdax_name) | def write_abstract_dag(self): """ Write all the nodes in the workflow to the DAX file. """ if not self.__dax_file_path: # this workflow is not dax-compatible, so just return return try: dagfile = open( self.__dag_file_path, 'w' ) except: raise CondorDAGError, "Cannot open file " + self.__dag_file_path | af0d11174f59a33954bcf909dedc86e1d0c82d73 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/af0d11174f59a33954bcf909dedc86e1d0c82d73/pipeline.py |
outdir=opts.outpath | def histN(mat,N): Nd=size(N) histo=zeros(N) scale=array(map(lambda a,b:a/b,map(lambda a,b:(1*a)-b,map(max,mat),map(min,mat)),N)) axes=array(map(lambda a,N:linspace(min(a),max(a),N),mat,N)) bins=floor(map(lambda a,b:a/b , map(lambda a,b:a-b, mat, map(min,mat) ),scale*1.01)) hbins=reshape(map(int,bins.flat),bins.shape) for co in transpose(hbins): t=tuple(co) histo[t[::-1]]=histo[t[::-1]]+1 return (axes,histo) | 6b7383178dfc3e0bc72b88b696c5636f91f41a6d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/6b7383178dfc3e0bc72b88b696c5636f91f41a6d/cbcBayesSkyRes.py |
|
if debug: print "maxdx : %i , maxvalue: %f sample0 : %f sample1: %f"%(maxdx,maxvalue,sample[RAdim],sample[decdim]) | def skyhist_cart(skycarts,sky_samples): """ Histogram the list of samples into bins defined by Cartesian vectors in skycarts """ dot=np.dot N=len(skycarts) print 'operating on %d sky points'%(N) bins=np.zeros(N) for RAsample,decsample in sky_samples: sampcart=pol2cart(RAsample,decsample) maxdx=-1 maxvalue=-1 for i in xrange(0,N): dx=dot(sampcart,skycarts[i]) if dx>maxvalue: maxdx=i maxvalue=dx #if debug: #print "sky co : "+str(skycarts[i])+"long : "+str(sample[RAdim])+" "+str(sample[decdim]) #print "sample co: "+str(sampcart) if debug: print "maxdx : %i , maxvalue: %f sample0 : %f sample1: %f"%(maxdx,maxvalue,sample[RAdim],sample[decdim]) bins[maxdx]+=1 return bins | 6b7383178dfc3e0bc72b88b696c5636f91f41a6d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/6b7383178dfc3e0bc72b88b696c5636f91f41a6d/cbcBayesSkyRes.py |
|
def loadDataFile(filename): print filename infile=open(filename,'r') formatstr=infile.readline().lstrip() formatstr=formatstr.replace('#','') header=formatstr.split() llines=[] import re dec=re.compile(r'[^\d.-]+') line_count=0 for line in infile: sline=line.split() proceed=True if len(sline)<1: print 'Ignoring empty line in input file: %s'%(sline) proceed=False for s in sline: if dec.search(s) is not None: print 'Warning! Ignoring non-numeric data after the header: %s'%(sline) proceed=False if proceed: llines.append(array(map(float,sline))) flines=array(llines) for i in range(0,len(header)): if header[i].lower().find('log')!=-1 and header[i].lower()!='logl': print 'exponentiating %s'%(header[i]) flines[:,i]=exp(flines[:,i]) header[i]=header[i].replace('log','') if header[i].lower().find('sin')!=-1: print 'asining %s'%(header[i]) flines[:,i]=arcsin(flines[:,i]) header[i]=header[i].replace('sin','') if header[i].lower().find('cos')!=-1: print 'acosing %s'%(header[i]) flines[:,i]=arccos(flines[:,i]) header[i]=header[i].replace('cos','') header[i]=header[i].replace('(','') header[i]=header[i].replace(')','') print 'Read columns %s'%(str(header)) return header,flines | 6b7383178dfc3e0bc72b88b696c5636f91f41a6d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/6b7383178dfc3e0bc72b88b696c5636f91f41a6d/cbcBayesSkyRes.py |
||
print 'Read columns %s'%(str(header)) | print 'Read columns %s'%(str(header)) | def loadDataFile(filename): print filename infile=open(filename,'r') formatstr=infile.readline().lstrip() formatstr=formatstr.replace('#','') header=formatstr.split() llines=[] import re dec=re.compile(r'[^\d.-]+') line_count=0 for line in infile: sline=line.split() proceed=True if len(sline)<1: print 'Ignoring empty line in input file: %s'%(sline) proceed=False for s in sline: if dec.search(s) is not None: print 'Warning! Ignoring non-numeric data after the header: %s'%(sline) proceed=False if proceed: llines.append(array(map(float,sline))) flines=array(llines) for i in range(0,len(header)): if header[i].lower().find('log')!=-1 and header[i].lower()!='logl': print 'exponentiating %s'%(header[i]) flines[:,i]=exp(flines[:,i]) header[i]=header[i].replace('log','') if header[i].lower().find('sin')!=-1: print 'asining %s'%(header[i]) flines[:,i]=arcsin(flines[:,i]) header[i]=header[i].replace('sin','') if header[i].lower().find('cos')!=-1: print 'acosing %s'%(header[i]) flines[:,i]=arccos(flines[:,i]) header[i]=header[i].replace('cos','') header[i]=header[i].replace('(','') header[i]=header[i].replace(')','') print 'Read columns %s'%(str(header)) return header,flines | 6b7383178dfc3e0bc72b88b696c5636f91f41a6d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/6b7383178dfc3e0bc72b88b696c5636f91f41a6d/cbcBayesSkyRes.py |
summary_fo=open('summary.ini','w') summary_file=ConfigParser() summary_file.add_section('metadata') summary_file.set('metadata','group_id','X') if opts.eventnum: summary_file.set('metadata','event_id',str(opts.eventnum)) summary_file.add_section('Confidence levels') summary_file.set('Confidence levels','confidence levels',str(confidence_levels)) paramnames, pos=loadDataFile(opts.data[0]) if "m1" not in paramnames and "m2" not in paramnames and "mchirp" in paramnames and "eta" in paramnames: (m1,m2)=mc2ms(pos[:,paramnames.index('mchirp')],pos[:,paramnames.index('eta')]) pos=np.column_stack((pos,m1,m2)) paramnames.append("m1") paramnames.append("m2") Nd=len(paramnames) print "Number of posterior samples: " + str(size(pos,0)) means = mean(pos,axis=0) meanStr=map(str,means) out=reduce(lambda a,b:a+'||'+b,meanStr) print 'Means:' print '||'+out+'||' RAdim=paramnames.index('RA') decdim=paramnames.index('dec') injection=None if(opts.injfile is not None): import itertools injections = SimInspiralUtils.ReadSimInspiralFromFiles([opts.injfile]) if(opts.eventnum is not None): if(len(injections)<opts.eventnum): print "Error: You asked for event %d, but %s contains only %d injections" %(opts.eventnum,opts.opts.injfile,len(injections)) sys.exit(1) else: injection=injections[opts.eventnum] else: if(len(injections)<1): print 'Warning: Cannot find injection with end time %f' %(means[2]) else: injection = itertools.ifilter(lambda a: abs(a.get_end() - means[2]) < 0.1, injections).next() def getinjpar(inj,parnum): | def getinjpar(paramnames,inj,parnum): | def loadDataFile(filename): print filename infile=open(filename,'r') formatstr=infile.readline().lstrip() formatstr=formatstr.replace('#','') header=formatstr.split() llines=[] import re dec=re.compile(r'[^\d.-]+') line_count=0 for line in infile: sline=line.split() proceed=True if len(sline)<1: print 'Ignoring empty line in input file: %s'%(sline) proceed=False for s in sline: if dec.search(s) is not None: print 'Warning! Ignoring non-numeric data after the header: %s'%(sline) proceed=False if proceed: llines.append(array(map(float,sline))) flines=array(llines) for i in range(0,len(header)): if header[i].lower().find('log')!=-1 and header[i].lower()!='logl': print 'exponentiating %s'%(header[i]) flines[:,i]=exp(flines[:,i]) header[i]=header[i].replace('log','') if header[i].lower().find('sin')!=-1: print 'asining %s'%(header[i]) flines[:,i]=arcsin(flines[:,i]) header[i]=header[i].replace('sin','') if header[i].lower().find('cos')!=-1: print 'acosing %s'%(header[i]) flines[:,i]=arccos(flines[:,i]) header[i]=header[i].replace('cos','') header[i]=header[i].replace('(','') header[i]=header[i].replace(')','') print 'Read columns %s'%(str(header)) return header,flines | 6b7383178dfc3e0bc72b88b696c5636f91f41a6d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/6b7383178dfc3e0bc72b88b696c5636f91f41a6d/cbcBayesSkyRes.py |
if injection: injpoint=map(lambda a: getinjpar(injection,a),range(len(paramnames))) injvals=map(str,injpoint) out=reduce(lambda a,b:a+'||'+b,injvals) print 'Injected values:' print out summary_file.add_section('Injection values') for parnum in range(len(paramnames)): summary_file.set('Injection values',paramnames[parnum],getinjpar(injection,parnum)) | def getinjpar(inj,parnum): if paramnames[parnum]=='mchirp' or paramnames[parnum]=='mc': return inj.mchirp if paramnames[parnum]=='mass1' or paramnames[parnum]=='m1': (m1,m2)=mc2ms(inj.mchirp,inj.eta) return m1 if paramnames[parnum]=='mass2' or paramnames[parnum]=='m2': (m1,m2)=mc2ms(inj.mchirp,inj.eta) return m2 if paramnames[parnum]=='eta': return inj.eta if paramnames[parnum]=='time': return inj.get_end() if paramnames[parnum]=='phi0': return inj.phi0 if paramnames[parnum]=='dist' or paramnames[parnum]=='distance': return inj.distance if paramnames[parnum]=='RA' or paramnames[parnum]=='long': return inj.longitude if paramnames[parnum]=='dec' or paramnames[parnum]=='lat': return inj.latitude if paramnames[parnum]=='psi': return inj.polarization if paramnames[parnum]=='iota': return inj.inclination return None | 6b7383178dfc3e0bc72b88b696c5636f91f41a6d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/6b7383178dfc3e0bc72b88b696c5636f91f41a6d/cbcBayesSkyRes.py |
|
def plot2Dbins(toppoints,cl,par1_name,par1_bin,par2_name,par2_bin,injpoint): | def plot2Dbins(toppoints,par1_bin,par2_bin,outdir,par1name=None,par2name=None,injpoint=None): | def plot2Dbins(toppoints,cl,par1_name,par1_bin,par2_name,par2_bin,injpoint): #Work out good bin size xbins=int(ceil((max(toppoints[:,0])-min(toppoints[:,0]))/par1_bin)) ybins=int(ceil((max(toppoints[:,1])-min(toppoints[:,1]))/par2_bin)) _dpi=120 xsize_in_inches=6. xsize_points = xsize_in_inches * _dpi points_per_bin_width=xsize_points/xbins ysize_points=ybins*points_per_bin_width ysize_in_inches=ysize_points/_dpi # myfig=plt.figure(1,figsize=(xsize_in_inches+2,ysize_in_inches+2),dpi=_dpi,autoscale_on=True) cnlevel=[1-tp for tp in toppoints[:,3]] # myfig.gca().scatter(toppoints[:,0],toppoints[:,1],s=int(points_per_bin_width*1.5),faceted=False,marker='s',c=cnlevel,cmap=matplotlib.cm.jet) plt.colorbar() #Determine limits based on injection point (if any) and min/max values min_xlim=min(toppoints[:,0]) max_xlim=max(toppoints[:,0]) min_ylim=min(toppoints[:,1]) max_ylim=max(toppoints[:,1]) if injpoint is not None: myfig.gca().plot([injpoint[0]],[injpoint[1]],'r*',ms=20.) if injpoint[0] < min(toppoints[:,0]): min_xlim=injpoint[0] elif injpoint[0] > max(toppoints[:,0]): max_xlim=injpoint[0] if injpoint[1] < min(toppoints[:,1]): min_ylim=injpoint[1] elif injpoint[1] > max(toppoints[:,1]): max_ylim=injpoint[1] | 6b7383178dfc3e0bc72b88b696c5636f91f41a6d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/6b7383178dfc3e0bc72b88b696c5636f91f41a6d/cbcBayesSkyRes.py |
plt.title("%s-%s histogram (greedy binning)"%(par1_name,par2_name)) | plt.title("%s-%s histogram (greedy binning)"%(par1name,par2name)) | def plot2Dbins(toppoints,cl,par1_name,par1_bin,par2_name,par2_bin,injpoint): #Work out good bin size xbins=int(ceil((max(toppoints[:,0])-min(toppoints[:,0]))/par1_bin)) ybins=int(ceil((max(toppoints[:,1])-min(toppoints[:,1]))/par2_bin)) _dpi=120 xsize_in_inches=6. xsize_points = xsize_in_inches * _dpi points_per_bin_width=xsize_points/xbins ysize_points=ybins*points_per_bin_width ysize_in_inches=ysize_points/_dpi # myfig=plt.figure(1,figsize=(xsize_in_inches+2,ysize_in_inches+2),dpi=_dpi,autoscale_on=True) cnlevel=[1-tp for tp in toppoints[:,3]] # myfig.gca().scatter(toppoints[:,0],toppoints[:,1],s=int(points_per_bin_width*1.5),faceted=False,marker='s',c=cnlevel,cmap=matplotlib.cm.jet) plt.colorbar() #Determine limits based on injection point (if any) and min/max values min_xlim=min(toppoints[:,0]) max_xlim=max(toppoints[:,0]) min_ylim=min(toppoints[:,1]) max_ylim=max(toppoints[:,1]) if injpoint is not None: myfig.gca().plot([injpoint[0]],[injpoint[1]],'r*',ms=20.) if injpoint[0] < min(toppoints[:,0]): min_xlim=injpoint[0] elif injpoint[0] > max(toppoints[:,0]): max_xlim=injpoint[0] if injpoint[1] < min(toppoints[:,1]): min_ylim=injpoint[1] elif injpoint[1] > max(toppoints[:,1]): max_ylim=injpoint[1] | 6b7383178dfc3e0bc72b88b696c5636f91f41a6d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/6b7383178dfc3e0bc72b88b696c5636f91f41a6d/cbcBayesSkyRes.py |
myfig.savefig(os.path.join(outdir,'2Dbins',par1_name+'-'+par2_name+'.png'),dpi=_dpi) | myfig.savefig(os.path.join(outdir,'2Dbins',par1name+'-'+par2name+'.png'),dpi=_dpi) | def plot2Dbins(toppoints,cl,par1_name,par1_bin,par2_name,par2_bin,injpoint): #Work out good bin size xbins=int(ceil((max(toppoints[:,0])-min(toppoints[:,0]))/par1_bin)) ybins=int(ceil((max(toppoints[:,1])-min(toppoints[:,1]))/par2_bin)) _dpi=120 xsize_in_inches=6. xsize_points = xsize_in_inches * _dpi points_per_bin_width=xsize_points/xbins ysize_points=ybins*points_per_bin_width ysize_in_inches=ysize_points/_dpi # myfig=plt.figure(1,figsize=(xsize_in_inches+2,ysize_in_inches+2),dpi=_dpi,autoscale_on=True) cnlevel=[1-tp for tp in toppoints[:,3]] # myfig.gca().scatter(toppoints[:,0],toppoints[:,1],s=int(points_per_bin_width*1.5),faceted=False,marker='s',c=cnlevel,cmap=matplotlib.cm.jet) plt.colorbar() #Determine limits based on injection point (if any) and min/max values min_xlim=min(toppoints[:,0]) max_xlim=max(toppoints[:,0]) min_ylim=min(toppoints[:,1]) max_ylim=max(toppoints[:,1]) if injpoint is not None: myfig.gca().plot([injpoint[0]],[injpoint[1]],'r*',ms=20.) if injpoint[0] < min(toppoints[:,0]): min_xlim=injpoint[0] elif injpoint[0] > max(toppoints[:,0]): max_xlim=injpoint[0] if injpoint[1] < min(toppoints[:,1]): min_ylim=injpoint[1] elif injpoint[1] > max(toppoints[:,1]): max_ylim=injpoint[1] | 6b7383178dfc3e0bc72b88b696c5636f91f41a6d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/6b7383178dfc3e0bc72b88b696c5636f91f41a6d/cbcBayesSkyRes.py |
def plotSkyMap(skypos,skyres,sky_injpoint): | def plotSkyMap(skypos,skyres,sky_injpoint,confidence_levels,pos,outdir): | def plot2Dbins(toppoints,cl,par1_name,par1_bin,par2_name,par2_bin,injpoint): #Work out good bin size xbins=int(ceil((max(toppoints[:,0])-min(toppoints[:,0]))/par1_bin)) ybins=int(ceil((max(toppoints[:,1])-min(toppoints[:,1]))/par2_bin)) _dpi=120 xsize_in_inches=6. xsize_points = xsize_in_inches * _dpi points_per_bin_width=xsize_points/xbins ysize_points=ybins*points_per_bin_width ysize_in_inches=ysize_points/_dpi # myfig=plt.figure(1,figsize=(xsize_in_inches+2,ysize_in_inches+2),dpi=_dpi,autoscale_on=True) cnlevel=[1-tp for tp in toppoints[:,3]] # myfig.gca().scatter(toppoints[:,0],toppoints[:,1],s=int(points_per_bin_width*1.5),faceted=False,marker='s',c=cnlevel,cmap=matplotlib.cm.jet) plt.colorbar() #Determine limits based on injection point (if any) and min/max values min_xlim=min(toppoints[:,0]) max_xlim=max(toppoints[:,0]) min_ylim=min(toppoints[:,1]) max_ylim=max(toppoints[:,1]) if injpoint is not None: myfig.gca().plot([injpoint[0]],[injpoint[1]],'r*',ms=20.) if injpoint[0] < min(toppoints[:,0]): min_xlim=injpoint[0] elif injpoint[0] > max(toppoints[:,0]): max_xlim=injpoint[0] if injpoint[1] < min(toppoints[:,1]): min_ylim=injpoint[1] elif injpoint[1] > max(toppoints[:,1]): max_ylim=injpoint[1] | 6b7383178dfc3e0bc72b88b696c5636f91f41a6d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/6b7383178dfc3e0bc72b88b696c5636f91f41a6d/cbcBayesSkyRes.py |
def plotSkyMap(skypos,skyres,sky_injpoint): from pylal import skylocutils from mpl_toolkits.basemap import Basemap skypoints=array(skylocutils.gridsky(float(skyres))) skycarts=map(lambda s: pol2cart(s[1],s[0]),skypoints) skyinjectionconfidence=None shist=bayespputils.skyhist_cart(array(skycarts),skypos) #shist=skyhist_cart(skycarts,list(pos)) bins=skycarts # Find the bin of the injection if available injbin=None if sky_injpoint: injhist=skyhist_cart(skycarts,array([sky_injpoint])) injbin=injhist.tolist().index(1) print 'Found injection in bin %d with co-ordinates %f,%f .'%(injbin,skypoints[injbin,0],skypoints[injbin,1]) (skyinjectionconfidence,toppoints,skyreses)=bayespputils.calculateConfidenceLevels(shist,skypoints,injbin,float(opts.skyres),confidence_levels,len(pos)) if injbin and skyinjectionconfidence: i=list(np.nonzero(np.asarray(toppoints)[:,2]==injbin))[0] min_sky_area_containing_injection=float(opts.skyres)*float(opts.skyres)*i print 'Minimum sky area containing injection point = %f square degrees'%min_sky_area_containing_injection myfig=plt.figure() plt.clf() m=Basemap(projection='moll',lon_0=180.0,lat_0=0.0) plx,ply=m(np.asarray(toppoints)[::-1,1]*57.296,np.asarray(toppoints)[::-1,0]*57.296) cnlevel=[1-tp for tp in np.asarray(toppoints)[::-1,3]] plt.scatter(plx,ply,s=5,c=cnlevel,faceted=False,cmap=matplotlib.cm.jet) m.drawmapboundary() m.drawparallels(np.arange(-90.,120.,45.),labels=[1,0,0,0],labelstyle='+/-') # draw parallels m.drawmeridians(np.arange(0.,360.,90.),labels=[0,0,0,1],labelstyle='+/-') # draw meridians plt.title("Skymap") # add a title plt.colorbar() myfig.savefig(os.path.join(outdir,'skymap.png')) plt.clf() #Save skypoints np.savetxt('ranked_sky_pixels',column_stack([np.asarray(toppoints)[:,0:1],np.asarray(toppoints)[:,1],np.asarray(toppoints)[:,3]])) return skyreses,skyinjectionconfidence | 6b7383178dfc3e0bc72b88b696c5636f91f41a6d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/6b7383178dfc3e0bc72b88b696c5636f91f41a6d/cbcBayesSkyRes.py |
||
(skyinjectionconfidence,toppoints,skyreses)=bayespputils.calculateConfidenceLevels(shist,skypoints,injbin,float(opts.skyres),confidence_levels,len(pos)) | (skyinjectionconfidence,toppoints,skyreses)=bayespputils.calculateConfidenceLevels(shist,skypoints,injbin,float(skyres),confidence_levels,len(pos)) | def plotSkyMap(skypos,skyres,sky_injpoint): from pylal import skylocutils from mpl_toolkits.basemap import Basemap skypoints=array(skylocutils.gridsky(float(skyres))) skycarts=map(lambda s: pol2cart(s[1],s[0]),skypoints) skyinjectionconfidence=None shist=bayespputils.skyhist_cart(array(skycarts),skypos) #shist=skyhist_cart(skycarts,list(pos)) bins=skycarts # Find the bin of the injection if available injbin=None if sky_injpoint: injhist=skyhist_cart(skycarts,array([sky_injpoint])) injbin=injhist.tolist().index(1) print 'Found injection in bin %d with co-ordinates %f,%f .'%(injbin,skypoints[injbin,0],skypoints[injbin,1]) (skyinjectionconfidence,toppoints,skyreses)=bayespputils.calculateConfidenceLevels(shist,skypoints,injbin,float(opts.skyres),confidence_levels,len(pos)) if injbin and skyinjectionconfidence: i=list(np.nonzero(np.asarray(toppoints)[:,2]==injbin))[0] min_sky_area_containing_injection=float(opts.skyres)*float(opts.skyres)*i print 'Minimum sky area containing injection point = %f square degrees'%min_sky_area_containing_injection myfig=plt.figure() plt.clf() m=Basemap(projection='moll',lon_0=180.0,lat_0=0.0) plx,ply=m(np.asarray(toppoints)[::-1,1]*57.296,np.asarray(toppoints)[::-1,0]*57.296) cnlevel=[1-tp for tp in np.asarray(toppoints)[::-1,3]] plt.scatter(plx,ply,s=5,c=cnlevel,faceted=False,cmap=matplotlib.cm.jet) m.drawmapboundary() m.drawparallels(np.arange(-90.,120.,45.),labels=[1,0,0,0],labelstyle='+/-') # draw parallels m.drawmeridians(np.arange(0.,360.,90.),labels=[0,0,0,1],labelstyle='+/-') # draw meridians plt.title("Skymap") # add a title plt.colorbar() myfig.savefig(os.path.join(outdir,'skymap.png')) plt.clf() #Save skypoints np.savetxt('ranked_sky_pixels',column_stack([np.asarray(toppoints)[:,0:1],np.asarray(toppoints)[:,1],np.asarray(toppoints)[:,3]])) return skyreses,skyinjectionconfidence | 6b7383178dfc3e0bc72b88b696c5636f91f41a6d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/6b7383178dfc3e0bc72b88b696c5636f91f41a6d/cbcBayesSkyRes.py |
min_sky_area_containing_injection=float(opts.skyres)*float(opts.skyres)*i | min_sky_area_containing_injection=float(skyres)*float(skyres)*i | def plotSkyMap(skypos,skyres,sky_injpoint): from pylal import skylocutils from mpl_toolkits.basemap import Basemap skypoints=array(skylocutils.gridsky(float(skyres))) skycarts=map(lambda s: pol2cart(s[1],s[0]),skypoints) skyinjectionconfidence=None shist=bayespputils.skyhist_cart(array(skycarts),skypos) #shist=skyhist_cart(skycarts,list(pos)) bins=skycarts # Find the bin of the injection if available injbin=None if sky_injpoint: injhist=skyhist_cart(skycarts,array([sky_injpoint])) injbin=injhist.tolist().index(1) print 'Found injection in bin %d with co-ordinates %f,%f .'%(injbin,skypoints[injbin,0],skypoints[injbin,1]) (skyinjectionconfidence,toppoints,skyreses)=bayespputils.calculateConfidenceLevels(shist,skypoints,injbin,float(opts.skyres),confidence_levels,len(pos)) if injbin and skyinjectionconfidence: i=list(np.nonzero(np.asarray(toppoints)[:,2]==injbin))[0] min_sky_area_containing_injection=float(opts.skyres)*float(opts.skyres)*i print 'Minimum sky area containing injection point = %f square degrees'%min_sky_area_containing_injection myfig=plt.figure() plt.clf() m=Basemap(projection='moll',lon_0=180.0,lat_0=0.0) plx,ply=m(np.asarray(toppoints)[::-1,1]*57.296,np.asarray(toppoints)[::-1,0]*57.296) cnlevel=[1-tp for tp in np.asarray(toppoints)[::-1,3]] plt.scatter(plx,ply,s=5,c=cnlevel,faceted=False,cmap=matplotlib.cm.jet) m.drawmapboundary() m.drawparallels(np.arange(-90.,120.,45.),labels=[1,0,0,0],labelstyle='+/-') # draw parallels m.drawmeridians(np.arange(0.,360.,90.),labels=[0,0,0,1],labelstyle='+/-') # draw meridians plt.title("Skymap") # add a title plt.colorbar() myfig.savefig(os.path.join(outdir,'skymap.png')) plt.clf() #Save skypoints np.savetxt('ranked_sky_pixels',column_stack([np.asarray(toppoints)[:,0:1],np.asarray(toppoints)[:,1],np.asarray(toppoints)[:,3]])) return skyreses,skyinjectionconfidence | 6b7383178dfc3e0bc72b88b696c5636f91f41a6d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/6b7383178dfc3e0bc72b88b696c5636f91f41a6d/cbcBayesSkyRes.py |
def plotSkyMap(skypos,skyres,sky_injpoint): from pylal import skylocutils from mpl_toolkits.basemap import Basemap skypoints=array(skylocutils.gridsky(float(skyres))) skycarts=map(lambda s: pol2cart(s[1],s[0]),skypoints) skyinjectionconfidence=None shist=bayespputils.skyhist_cart(array(skycarts),skypos) #shist=skyhist_cart(skycarts,list(pos)) bins=skycarts # Find the bin of the injection if available injbin=None if sky_injpoint: injhist=skyhist_cart(skycarts,array([sky_injpoint])) injbin=injhist.tolist().index(1) print 'Found injection in bin %d with co-ordinates %f,%f .'%(injbin,skypoints[injbin,0],skypoints[injbin,1]) (skyinjectionconfidence,toppoints,skyreses)=bayespputils.calculateConfidenceLevels(shist,skypoints,injbin,float(opts.skyres),confidence_levels,len(pos)) if injbin and skyinjectionconfidence: i=list(np.nonzero(np.asarray(toppoints)[:,2]==injbin))[0] min_sky_area_containing_injection=float(opts.skyres)*float(opts.skyres)*i print 'Minimum sky area containing injection point = %f square degrees'%min_sky_area_containing_injection myfig=plt.figure() plt.clf() m=Basemap(projection='moll',lon_0=180.0,lat_0=0.0) plx,ply=m(np.asarray(toppoints)[::-1,1]*57.296,np.asarray(toppoints)[::-1,0]*57.296) cnlevel=[1-tp for tp in np.asarray(toppoints)[::-1,3]] plt.scatter(plx,ply,s=5,c=cnlevel,faceted=False,cmap=matplotlib.cm.jet) m.drawmapboundary() m.drawparallels(np.arange(-90.,120.,45.),labels=[1,0,0,0],labelstyle='+/-') # draw parallels m.drawmeridians(np.arange(0.,360.,90.),labels=[0,0,0,1],labelstyle='+/-') # draw meridians plt.title("Skymap") # add a title plt.colorbar() myfig.savefig(os.path.join(outdir,'skymap.png')) plt.clf() #Save skypoints np.savetxt('ranked_sky_pixels',column_stack([np.asarray(toppoints)[:,0:1],np.asarray(toppoints)[:,1],np.asarray(toppoints)[:,3]])) return skyreses,skyinjectionconfidence | 6b7383178dfc3e0bc72b88b696c5636f91f41a6d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/6b7383178dfc3e0bc72b88b696c5636f91f41a6d/cbcBayesSkyRes.py |
||
skyreses=None if opts.skyres is not None: RAvec=array(pos)[:,paramnames.index('RA')] decvec=array(pos)[:,paramnames.index('dec')] skypos=column_stack([RAvec,decvec]) | def cbcBayesSkyRes(outdir,data,oneDMenu,twoDGreedyMenu,GreedyRes,confidence_levels,twoDplots,injfile=None,eventnum=None,skyres=None): if eventnum is not None and injfile is None: print "You specified an event number but no injection file. Ignoring!" if data is None: print 'You must specify an input data file' exit(1) if outdir is None: print "You must specify an output directory." exit(1) if not os.path.isdir(outdir): os.makedirs(outdir) summary_fo=open(os.path.join(outdir,'summary.ini'),'w') summary_file=ConfigParser() summary_file.add_section('metadata') summary_file.set('metadata','group_id','X') if eventnum: summary_file.set('metadata','event_id',str(eventnum)) summary_file.add_section('Confidence levels') summary_file.set('Confidence levels','confidence levels',str(confidence_levels)) paramnames, pos=loadDataFile(data[0]) if "m1" not in paramnames and "m2" not in paramnames and "mchirp" in paramnames and "eta" in paramnames: (m1,m2)=mc2ms(pos[:,paramnames.index('mchirp')],pos[:,paramnames.index('eta')]) pos=np.column_stack((pos,m1,m2)) paramnames.append("m1") paramnames.append("m2") Nd=len(paramnames) print "Number of posterior samples: " + str(size(pos,0)) means = mean(pos,axis=0) meanStr=map(str,means) out=reduce(lambda a,b:a+'||'+b,meanStr) print 'Means:' print '||'+out+'||' RAdim=paramnames.index('RA') decdim=paramnames.index('dec') injection=None if(injfile is not None): import itertools injections = SimInspiralUtils.ReadSimInspiralFromFiles([injfile]) if(eventnum is not None): if(len(injections)<eventnum): print "Error: You asked for event %d, but %s contains only %d injections" %(eventnum,injfile,len(injections)) sys.exit(1) else: injection=injections[eventnum] else: if(len(injections)<1): print 'Warning: Cannot find injection with end time %f' %(means[2]) else: injection = itertools.ifilter(lambda a: abs(a.get_end() - means[2]) < 0.1, injections).next() | def plotSkyMap(skypos,skyres,sky_injpoint): from pylal import skylocutils from mpl_toolkits.basemap import Basemap skypoints=array(skylocutils.gridsky(float(skyres))) skycarts=map(lambda s: pol2cart(s[1],s[0]),skypoints) skyinjectionconfidence=None shist=bayespputils.skyhist_cart(array(skycarts),skypos) #shist=skyhist_cart(skycarts,list(pos)) bins=skycarts # Find the bin of the injection if available injbin=None if sky_injpoint: injhist=skyhist_cart(skycarts,array([sky_injpoint])) injbin=injhist.tolist().index(1) print 'Found injection in bin %d with co-ordinates %f,%f .'%(injbin,skypoints[injbin,0],skypoints[injbin,1]) (skyinjectionconfidence,toppoints,skyreses)=bayespputils.calculateConfidenceLevels(shist,skypoints,injbin,float(opts.skyres),confidence_levels,len(pos)) if injbin and skyinjectionconfidence: i=list(np.nonzero(np.asarray(toppoints)[:,2]==injbin))[0] min_sky_area_containing_injection=float(opts.skyres)*float(opts.skyres)*i print 'Minimum sky area containing injection point = %f square degrees'%min_sky_area_containing_injection myfig=plt.figure() plt.clf() m=Basemap(projection='moll',lon_0=180.0,lat_0=0.0) plx,ply=m(np.asarray(toppoints)[::-1,1]*57.296,np.asarray(toppoints)[::-1,0]*57.296) cnlevel=[1-tp for tp in np.asarray(toppoints)[::-1,3]] plt.scatter(plx,ply,s=5,c=cnlevel,faceted=False,cmap=matplotlib.cm.jet) m.drawmapboundary() m.drawparallels(np.arange(-90.,120.,45.),labels=[1,0,0,0],labelstyle='+/-') # draw parallels m.drawmeridians(np.arange(0.,360.,90.),labels=[0,0,0,1],labelstyle='+/-') # draw meridians plt.title("Skymap") # add a title plt.colorbar() myfig.savefig(os.path.join(outdir,'skymap.png')) plt.clf() #Save skypoints np.savetxt('ranked_sky_pixels',column_stack([np.asarray(toppoints)[:,0:1],np.asarray(toppoints)[:,1],np.asarray(toppoints)[:,3]])) return skyreses,skyinjectionconfidence | 6b7383178dfc3e0bc72b88b696c5636f91f41a6d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/6b7383178dfc3e0bc72b88b696c5636f91f41a6d/cbcBayesSkyRes.py |
skyreses,skyinjectionconfidence=plotSkyMap(skypos,opts.skyres,(injpoint[RAdim],injpoint[decdim])) else: skyreses,skyinjectionconfidence=plotSkyMap(skypos,opts.skyres,None) myfig=plt.figure(1,figsize=(6,4),dpi=80) plt.clf() summary_file.add_section('2D greedy cl') ncon=len(confidence_levels) pos_array=np.array(pos) for par1_name,par2_name in twoDGreedyMenu: print "Binning %s-%s to determine confidence levels ..."%(par1_name,par2_name) try: par1_bin=GreedyRes[par1_name] except KeyError: print "Bin size is not set for %s, skipping %s/%s binning."%(par1_name,par1_name,par2_name) continue try: par2_bin=GreedyRes[par2_name] except KeyError: print "Bin size is not set for %s, skipping %s/%s binning."%(par2_name,par1_name,par2_name) continue try: par1_index=paramnames.index(par1_name) except ValueError: print "No input chain for %s, skipping %s/%s binning."%(par1_name,par1_name,par2_name) continue try: par2_index=paramnames.index(par2_name) except ValueError: print "No input chain for %s, skipping %s/%s binning."%(par2_name,par1_name,par2_name) continue par1pos=pos_array[:,par1_index] par2pos=pos_array[:,par2_index] par1pos_min=min(par1pos) par2pos_min=min(par2pos) par1pos_max=max(par1pos) par2pos_max=max(par2pos) par1pos_Nbins= int(ceil((par1pos_max - par1pos_min)/par1_bin))+1 par2pos_Nbins= int(ceil((par2pos_max - par2pos_min)/par2_bin))+1 greedyHist = np.zeros(par1pos_Nbins*par2pos_Nbins,dtype='i8') greedyPoints = np.zeros((par1pos_Nbins*par2pos_Nbins,2)) par1_point=par1pos_min par2_point=par2pos_min for i in range(par2pos_Nbins): | injpoint=map(lambda a: getinjpar(paramnames,injection,a),range(len(paramnames))) injvals=map(str,injpoint) out=reduce(lambda a,b:a+'||'+b,injvals) print 'Injected values:' print out summary_file.add_section('Injection values') for parnum in range(len(paramnames)): summary_file.set('Injection values',paramnames[parnum],getinjpar(paramnames,injection,parnum)) skyreses=None if skyres is not None: RAvec=array(pos)[:,paramnames.index('RA')] decvec=array(pos)[:,paramnames.index('dec')] skypos=column_stack([RAvec,decvec]) if injection: skyreses,skyinjectionconfidence=plotSkyMap(skypos,skyres,(injpoint[RAdim],injpoint[decdim]),confidence_levels,pos,outdir) else: skyreses,skyinjectionconfidence=plotSkyMap(skypos,skyres,None,confidence_levels,pos,outdir) myfig=plt.figure(1,figsize=(6,4),dpi=80) plt.clf() summary_file.add_section('2D greedy cl') ncon=len(confidence_levels) pos_array=np.array(pos) twoDGreedyCL={} for par1_name,par2_name in twoDGreedyMenu: print "Binning %s-%s to determine confidence levels ..."%(par1_name,par2_name) try: par1_bin=GreedyRes[par1_name] except KeyError: print "Bin size is not set for %s, skipping %s/%s binning."%(par1_name,par1_name,par2_name) continue try: par2_bin=GreedyRes[par2_name] except KeyError: print "Bin size is not set for %s, skipping %s/%s binning."%(par2_name,par1_name,par2_name) continue try: par1_index=paramnames.index(par1_name) except ValueError: print "No input chain for %s, skipping %s/%s binning."%(par1_name,par1_name,par2_name) continue try: par2_index=paramnames.index(par2_name) except ValueError: print "No input chain for %s, skipping %s/%s binning."%(par2_name,par1_name,par2_name) continue twoDGreedyCL["%s-%s"%(par1_name,par2_name)]={} par1pos=pos_array[:,par1_index] par2pos=pos_array[:,par2_index] par1pos_min=min(par1pos) par2pos_min=min(par2pos) par1pos_max=max(par1pos) par2pos_max=max(par2pos) par1pos_Nbins= int(ceil((par1pos_max - par1pos_min)/par1_bin))+1 par2pos_Nbins= int(ceil((par2pos_max - par2pos_min)/par2_bin))+1 greedyHist = np.zeros(par1pos_Nbins*par2pos_Nbins,dtype='i8') greedyPoints = np.zeros((par1pos_Nbins*par2pos_Nbins,2)) | def plotSkyMap(skypos,skyres,sky_injpoint): from pylal import skylocutils from mpl_toolkits.basemap import Basemap skypoints=array(skylocutils.gridsky(float(skyres))) skycarts=map(lambda s: pol2cart(s[1],s[0]),skypoints) skyinjectionconfidence=None shist=bayespputils.skyhist_cart(array(skycarts),skypos) #shist=skyhist_cart(skycarts,list(pos)) bins=skycarts # Find the bin of the injection if available injbin=None if sky_injpoint: injhist=skyhist_cart(skycarts,array([sky_injpoint])) injbin=injhist.tolist().index(1) print 'Found injection in bin %d with co-ordinates %f,%f .'%(injbin,skypoints[injbin,0],skypoints[injbin,1]) (skyinjectionconfidence,toppoints,skyreses)=bayespputils.calculateConfidenceLevels(shist,skypoints,injbin,float(opts.skyres),confidence_levels,len(pos)) if injbin and skyinjectionconfidence: i=list(np.nonzero(np.asarray(toppoints)[:,2]==injbin))[0] min_sky_area_containing_injection=float(opts.skyres)*float(opts.skyres)*i print 'Minimum sky area containing injection point = %f square degrees'%min_sky_area_containing_injection myfig=plt.figure() plt.clf() m=Basemap(projection='moll',lon_0=180.0,lat_0=0.0) plx,ply=m(np.asarray(toppoints)[::-1,1]*57.296,np.asarray(toppoints)[::-1,0]*57.296) cnlevel=[1-tp for tp in np.asarray(toppoints)[::-1,3]] plt.scatter(plx,ply,s=5,c=cnlevel,faceted=False,cmap=matplotlib.cm.jet) m.drawmapboundary() m.drawparallels(np.arange(-90.,120.,45.),labels=[1,0,0,0],labelstyle='+/-') # draw parallels m.drawmeridians(np.arange(0.,360.,90.),labels=[0,0,0,1],labelstyle='+/-') # draw meridians plt.title("Skymap") # add a title plt.colorbar() myfig.savefig(os.path.join(outdir,'skymap.png')) plt.clf() #Save skypoints np.savetxt('ranked_sky_pixels',column_stack([np.asarray(toppoints)[:,0:1],np.asarray(toppoints)[:,1],np.asarray(toppoints)[:,3]])) return skyreses,skyinjectionconfidence | 6b7383178dfc3e0bc72b88b696c5636f91f41a6d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/6b7383178dfc3e0bc72b88b696c5636f91f41a6d/cbcBayesSkyRes.py |
for j in range(par1pos_Nbins): greedyPoints[j+par1pos_Nbins*i,0]=par1_point greedyPoints[j+par1pos_Nbins*i,1]=par2_point par1_point+=par1_bin par2_point+=par2_bin injbin=None if injection: par1_injvalue=np.array(injpoint)[par1_index] par2_injvalue=np.array(injpoint)[par2_index] if par1_injvalue is not None and par2_injvalue is not None: par1_binNumber=floor((par1_injvalue-par1pos_min)/par1_bin) par2_binNumber=floor((par2_injvalue-par2pos_min)/par2_bin) injbin=int(par1_binNumber+par2_binNumber*par1pos_Nbins) elif par1_injvalue is None and par2_injvalue is not None: print "Injection value not found for %s!"%par1_name elif par1_injvalue is not None and par2_injvalue is None: print "Injection value not found for %s!"%par2_name for par1_samp,par2_samp in zip(par1pos,par2pos): par1_binNumber=floor((par1_samp-par1pos_min)/par1_bin) par2_binNumber=floor((par2_samp-par2pos_min)/par2_bin) greedyHist[par1_binNumber+par2_binNumber*par1pos_Nbins]+=1 (injectionconfidence,toppoints,reses)=bayespputils.calculateConfidenceLevels(greedyHist,greedyPoints,injbin,float(sqrt(par1_bin*par2_bin)),confidence_levels,int(len(par1pos))) areastr='' for (frac,area) in reses: areastr+='%s,'%str(area) areastr=areastr.rstrip(',') summary_file.set('2D greedy cl',par1_name+','+par2_name,str(areastr)) if injection is not None and injectionconfidence is not None: summary_file.set('2D greedy cl',par1_name+','+par2_name+'_inj',str(injectionconfidence)) if injection is not None and par1_injvalue is not None and par2_injvalue is not None: plot2Dbins(np.array(toppoints),confidence_levels,par1_name,par1_bin,par2_name,par2_bin,[par1_injvalue,par2_injvalue]) else: plot2Dbins(np.array(toppoints),confidence_levels,par1_name,par1_bin,par2_name,par2_bin,None) summary_file.add_section('1D mean') summary_file.add_section('1D median') summary_file.add_section('1D mode') summary_file.add_section('1D contigious cl') summary_file.add_section('1D greedy cl') summary_file.add_section('1D stacc') max_i=0 max_pos=pos_array[0,-1] par_samps=pos_array[:,-1] for i in range(len(pos_array)): if par_samps[i] > max_pos: max_pos=par_samps[i] max_i=i for par_name in oneDMenu: print "Binning %s to determine confidence levels ..."%par_name try: par_index=paramnames.index(par_name) except ValueError: print "No input chain for %s, skipping binning."%par_name continue try: par_bin=GreedyRes[par_name] except KeyError: print "Bin size is not set for %s, skipping binning."%par_name continue par_samps=pos_array[:,par_index] summary_file.set('1D mode',par_name,str(par_samps[max_i])) summary_file.set("1D mean",par_name,str(np.mean(par_samps))) summary_file.set("1D median",par_name,str(np.median(par_samps))) parpos_min=min(par_samps) parpos_max=max(par_samps) par_point=parpos_min parpos_Nbins= int(ceil((parpos_max - parpos_min)/par_bin))+1 greedyPoints=np.zeros((parpos_Nbins,2)) greedyHist=np.zeros(parpos_Nbins,dtype='i8') for i in range(parpos_Nbins): greedyPoints[i,0]=par_point greedyPoints[i,1]=par_point par_point+=par_bin for par_samp in par_samps: par_binNumber=int(floor((par_samp-parpos_min)/par_bin)) | par2_point=par2pos_min for i in range(par2pos_Nbins): par1_point=par1pos_min for j in range(par1pos_Nbins): greedyPoints[j+par1pos_Nbins*i,0]=par1_point greedyPoints[j+par1pos_Nbins*i,1]=par2_point par1_point+=par1_bin par2_point+=par2_bin injbin=None if injection: par1_injvalue=np.array(injpoint)[par1_index] par2_injvalue=np.array(injpoint)[par2_index] if par1_injvalue is not None and par2_injvalue is not None: par1_binNumber=floor((par1_injvalue-par1pos_min)/par1_bin) par2_binNumber=floor((par2_injvalue-par2pos_min)/par2_bin) injbin=int(par1_binNumber+par2_binNumber*par1pos_Nbins) elif par1_injvalue is None and par2_injvalue is not None: print "Injection value not found for %s!"%par1_name elif par1_injvalue is not None and par2_injvalue is None: print "Injection value not found for %s!"%par2_name for par1_samp,par2_samp in zip(par1pos,par2pos): par1_binNumber=floor((par1_samp-par1pos_min)/par1_bin) par2_binNumber=floor((par2_samp-par2pos_min)/par2_bin) greedyHist[par1_binNumber+par2_binNumber*par1pos_Nbins]+=1 (injectionconfidence,toppoints,reses)=bayespputils.calculateConfidenceLevels(greedyHist,greedyPoints,injbin,float(sqrt(par1_bin*par2_bin)),confidence_levels,int(len(par1pos))) areastr='' for (frac,area) in reses: areastr+='%s,'%str(area) twoDGreedyCL["%s-%s"%(par1_name,par2_name)][frac]=area areastr=areastr.rstrip(',') summary_file.set('2D greedy cl',par1_name+','+par2_name,str(areastr)) if injection is not None and injectionconfidence is not None: summary_file.set('2D greedy cl',par1_name+','+par2_name+'_inj',str(injectionconfidence)) if injection is not None and par1_injvalue is not None and par2_injvalue is not None: plot2Dbins(np.array(toppoints),par1_bin,par2_bin,outdir,par1name=par1_name,par2name=par2_name,injpoint=[par1_injvalue,par2_injvalue]) else: plot2Dbins(np.array(toppoints),par1_bin,par2_bin,outdir,par1name=par1_name,par2name=par2_name) pickle_to_file(twoDGreedyCL,os.path.join(outdir,'twoGreedyCL.pickle')) for par in twoDGreedyCL.keys(): oneD_dict_to_file(twoDGreedyCL[par],os.path.join(outdir,str(par)+'_greedy2.dat')) summary_file.add_section('1D mean') summary_file.add_section('1D median') summary_file.add_section('1D mode') summary_file.add_section('1D contigious cl') summary_file.add_section('1D greedy cl') summary_file.add_section('1D stacc') max_i=0 oneDStats={} oneDGreedyCL={} oneDContCL={} max_pos=pos_array[0,-1] par_samps=pos_array[:,-1] for i in range(len(pos_array)): if par_samps[i] > max_pos: max_pos=par_samps[i] max_i=i for par_name in oneDMenu: print "Binning %s to determine confidence levels ..."%par_name | def plotSkyMap(skypos,skyres,sky_injpoint): from pylal import skylocutils from mpl_toolkits.basemap import Basemap skypoints=array(skylocutils.gridsky(float(skyres))) skycarts=map(lambda s: pol2cart(s[1],s[0]),skypoints) skyinjectionconfidence=None shist=bayespputils.skyhist_cart(array(skycarts),skypos) #shist=skyhist_cart(skycarts,list(pos)) bins=skycarts # Find the bin of the injection if available injbin=None if sky_injpoint: injhist=skyhist_cart(skycarts,array([sky_injpoint])) injbin=injhist.tolist().index(1) print 'Found injection in bin %d with co-ordinates %f,%f .'%(injbin,skypoints[injbin,0],skypoints[injbin,1]) (skyinjectionconfidence,toppoints,skyreses)=bayespputils.calculateConfidenceLevels(shist,skypoints,injbin,float(opts.skyres),confidence_levels,len(pos)) if injbin and skyinjectionconfidence: i=list(np.nonzero(np.asarray(toppoints)[:,2]==injbin))[0] min_sky_area_containing_injection=float(opts.skyres)*float(opts.skyres)*i print 'Minimum sky area containing injection point = %f square degrees'%min_sky_area_containing_injection myfig=plt.figure() plt.clf() m=Basemap(projection='moll',lon_0=180.0,lat_0=0.0) plx,ply=m(np.asarray(toppoints)[::-1,1]*57.296,np.asarray(toppoints)[::-1,0]*57.296) cnlevel=[1-tp for tp in np.asarray(toppoints)[::-1,3]] plt.scatter(plx,ply,s=5,c=cnlevel,faceted=False,cmap=matplotlib.cm.jet) m.drawmapboundary() m.drawparallels(np.arange(-90.,120.,45.),labels=[1,0,0,0],labelstyle='+/-') # draw parallels m.drawmeridians(np.arange(0.,360.,90.),labels=[0,0,0,1],labelstyle='+/-') # draw meridians plt.title("Skymap") # add a title plt.colorbar() myfig.savefig(os.path.join(outdir,'skymap.png')) plt.clf() #Save skypoints np.savetxt('ranked_sky_pixels',column_stack([np.asarray(toppoints)[:,0:1],np.asarray(toppoints)[:,1],np.asarray(toppoints)[:,3]])) return skyreses,skyinjectionconfidence | 6b7383178dfc3e0bc72b88b696c5636f91f41a6d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/6b7383178dfc3e0bc72b88b696c5636f91f41a6d/cbcBayesSkyRes.py |
greedyHist[par_binNumber]+=1 except IndexError: print "IndexError: bin number: %i total bins: %i parsamp: %f bin: %f - %f"%(par_binNumber,parpos_Nbins,par_samp,greedyPoints[par_binNumber-1,0],greedyPoints[par_binNumber-1,0]+par_bin) injbin=None if injection: par_injvalue=np.array(injpoint)[par_index] if par_injvalue is not None: par_binNumber=floor((par_injvalue-parpos_min)/par_bin) injbin=par_binNumber j=0 print "Calculating contigious confidence intervals for %s..."%par_name len_par_samps=len(par_samps) while j < len(confidence_levels): confidence_level=confidence_levels[j] max_left=0 max_right=0 for i in range(len(greedyHist)): max_frac=None left=0 right=i while right<len(greedyHist): Npoints=sum(greedyHist[left:right]) frac=float(Npoints)/float(len_par_samps) if frac>confidence_level: if max_frac is None: max_frac=frac max_left=left max_right=right else: if frac>max_frac: | par_index=paramnames.index(par_name) except ValueError: print "No input chain for %s, skipping binning."%par_name continue try: par_bin=GreedyRes[par_name] except KeyError: print "Bin size is not set for %s, skipping binning."%par_name continue oneDGreedyCL[par_name]={} oneDStats[par_name]={} oneDContCL[par_name]={} par_samps=pos_array[:,par_index] summary_file.set('1D mode',par_name,str(par_samps[max_i])) summary_file.set("1D mean",par_name,str(np.mean(par_samps))) summary_file.set("1D median",par_name,str(np.median(par_samps))) oneDStats[par_name]['mode']=par_samps[max_i] oneDStats[par_name]['mean']=np.mean(par_samps) oneDStats[par_name]['median']=np.median(par_samps) parpos_min=min(par_samps) parpos_max=max(par_samps) par_point=parpos_min parpos_Nbins= int(ceil((parpos_max - parpos_min)/par_bin))+1 greedyPoints=np.zeros((parpos_Nbins,2)) greedyHist=np.zeros(parpos_Nbins,dtype='i8') for i in range(parpos_Nbins): greedyPoints[i,0]=par_point greedyPoints[i,1]=par_point par_point+=par_bin for par_samp in par_samps: par_binNumber=int(floor((par_samp-parpos_min)/par_bin)) try: greedyHist[par_binNumber]+=1 except IndexError: print "IndexError: bin number: %i total bins: %i parsamp: %f bin: %f - %f"%(par_binNumber,parpos_Nbins,par_samp,greedyPoints[par_binNumber-1,0],greedyPoints[par_binNumber-1,0]+par_bin) injbin=None if injection: par_injvalue=np.array(injpoint)[par_index] if par_injvalue is not None: par_binNumber=floor((par_injvalue-parpos_min)/par_bin) injbin=par_binNumber j=0 print "Calculating contigious confidence intervals for %s..."%par_name len_par_samps=len(par_samps) while j < len(confidence_levels): confidence_level=confidence_levels[j] max_left=0 max_right=0 for i in range(len(greedyHist)): max_frac=None left=0 right=i while right<len(greedyHist): Npoints=sum(greedyHist[left:right]) frac=float(Npoints)/float(len_par_samps) if frac>confidence_level: if max_frac is None: | def plotSkyMap(skypos,skyres,sky_injpoint): from pylal import skylocutils from mpl_toolkits.basemap import Basemap skypoints=array(skylocutils.gridsky(float(skyres))) skycarts=map(lambda s: pol2cart(s[1],s[0]),skypoints) skyinjectionconfidence=None shist=bayespputils.skyhist_cart(array(skycarts),skypos) #shist=skyhist_cart(skycarts,list(pos)) bins=skycarts # Find the bin of the injection if available injbin=None if sky_injpoint: injhist=skyhist_cart(skycarts,array([sky_injpoint])) injbin=injhist.tolist().index(1) print 'Found injection in bin %d with co-ordinates %f,%f .'%(injbin,skypoints[injbin,0],skypoints[injbin,1]) (skyinjectionconfidence,toppoints,skyreses)=bayespputils.calculateConfidenceLevels(shist,skypoints,injbin,float(opts.skyres),confidence_levels,len(pos)) if injbin and skyinjectionconfidence: i=list(np.nonzero(np.asarray(toppoints)[:,2]==injbin))[0] min_sky_area_containing_injection=float(opts.skyres)*float(opts.skyres)*i print 'Minimum sky area containing injection point = %f square degrees'%min_sky_area_containing_injection myfig=plt.figure() plt.clf() m=Basemap(projection='moll',lon_0=180.0,lat_0=0.0) plx,ply=m(np.asarray(toppoints)[::-1,1]*57.296,np.asarray(toppoints)[::-1,0]*57.296) cnlevel=[1-tp for tp in np.asarray(toppoints)[::-1,3]] plt.scatter(plx,ply,s=5,c=cnlevel,faceted=False,cmap=matplotlib.cm.jet) m.drawmapboundary() m.drawparallels(np.arange(-90.,120.,45.),labels=[1,0,0,0],labelstyle='+/-') # draw parallels m.drawmeridians(np.arange(0.,360.,90.),labels=[0,0,0,1],labelstyle='+/-') # draw meridians plt.title("Skymap") # add a title plt.colorbar() myfig.savefig(os.path.join(outdir,'skymap.png')) plt.clf() #Save skypoints np.savetxt('ranked_sky_pixels',column_stack([np.asarray(toppoints)[:,0:1],np.asarray(toppoints)[:,1],np.asarray(toppoints)[:,3]])) return skyreses,skyinjectionconfidence | 6b7383178dfc3e0bc72b88b696c5636f91f41a6d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/6b7383178dfc3e0bc72b88b696c5636f91f41a6d/cbcBayesSkyRes.py |
left+=1 right+=1 if max_frac is not None: break if max_frac is None: print "Cant determine intervals at %f confidence!"%confidence_level | else: if frac>max_frac: max_frac=frac max_left=left max_right=right left+=1 right+=1 if max_frac is not None: break if max_frac is None: print "Cant determine intervals at %f confidence!"%confidence_level else: summary_file.set('1D contigious cl',par_name,'['+str(max_left*par_bin)+','+str(max_right*par_bin)+','+str((max_right-max_left)*par_bin)+']') oneDContCL[par_name]['left']=max_left*par_bin oneDContCL[par_name]['right']=max_right*par_bin oneDContCL[par_name]['width']=(max_right-max_left)*par_bin k=j while k+1<len(confidence_levels) : if confidence_levels[k+1]<max_frac: j+=1 k+=1 j+=1 (injectionconfidence,toppoints,reses)=bayespputils.calculateConfidenceLevels(greedyHist,greedyPoints,injbin,sqrt(par_bin),confidence_levels,len(par_samps)) areastr='' for (frac,area) in reses: areastr+='%s,'%str(area) oneDGreedyCL[par_name][frac]=area areastr=areastr.rstrip(',') summary_file.set('1D greedy cl',par_name,'['+str(areastr)+']') if injection is not None and injectionconfidence is not None: summary_file.set('1D greedy cl',par_name+'_inj',str(injectionconfidence)) if injection: injvalue=np.array(injpoint)[par_index] if injvalue is not None: stacc=sqrt(np.var(par_samps)+pow((np.mean(par_samps)-injvalue),2) ) summary_file.set('1D stacc',par_name,str(stacc)) oneDStats[par_name]['stacc']=stacc pickle_to_file(oneDGreedyCL,os.path.join(outdir,'oneDGreedyCL.pickle')) pickle_to_file(oneDContCL,os.path.join(outdir,'oneDContCL.pickle')) pickle_to_file(oneDStats,os.path.join(outdir,'oneDStats.pickle')) for par in oneDGreedyCL.keys(): oneD_dict_to_file(oneDGreedyCL[par],os.path.join(outdir,str(par)+'_greedy1.dat')) for par in oneDGreedyCL.keys(): oneD_dict_to_file(oneDContCL[par],os.path.join(outdir,str(par)+'_cont.dat')) for par in oneDStats.keys(): oneD_dict_to_file(oneDStats[par],os.path.join(outdir,str(par)+'_stats.dat')) for par1,par2 in twoDplots: try: i=paramnames.index(par1) except ValueError: print "No input chain for %s, skipping 2D plot of %s-%s."%(par1,par1,par2) continue try: j=paramnames.index(par2) except ValueError: print "No input chain for %s, skipping 2D plot of %s-%s."%(par2,par1,par2) continue print 'Generating %s-%s plot'%(paramnames[i],paramnames[j]) if (size(np.unique(pos[:,i]))<2 or size(np.unique(pos[:,j]))<2): continue plot2Dkernel(pos[:,i],pos[:,j],50,50) if injection and reduce (lambda a,b: a and b, map(lambda idx: getinjpar(paramnames,injection,idx)>min(pos[:,idx]) and getinjpar(paramnames,injection,idx)<max(pos[:,idx]),[i,j])) : if getinjpar(paramnames,injection,i) is not None and getinjpar(paramnames,injection,j) is not None: plt.plot([getinjpar(paramnames,injection,i)],[getinjpar(paramnames,injection,j)],'go',scalex=False,scaley=False) plt.xlabel(paramnames[i]) plt.ylabel(paramnames[j]) plt.grid() margdir=os.path.join(outdir,'2D') if not os.path.isdir(margdir+'/'): os.mkdir(margdir) myfig.savefig(os.path.join(margdir,paramnames[i]+'-'+paramnames[j]+'_2Dkernel.png')) myfig.clear() htmlfile=open(os.path.join(outdir,'posplots.html'),'w') htmlfile.write('<HTML><HEAD><TITLE>Posterior PDFs</TITLE></HEAD><BODY><h3>'+str(means[2])+' Posterior PDFs</h3>') if(skyres is not None): htmlfile.write('<table border=1><tr><td>Confidence region<td>size (sq. deg)</tr>') for (frac,skysize) in skyreses: htmlfile.write('<tr><td>%f<td>%f</tr>'%(frac,skysize)) htmlfile.write('</table>') htmlfile.write('Produced from '+str(size(pos,0))+' posterior samples.<br>') htmlfile.write('Samples read from %s<br>'%(data[0])) htmlfile.write('<h4>Mean parameter estimates</h4>') htmlfile.write('<table border=1><tr>') paramline=reduce(lambda a,b:a+'<td>'+b,paramnames) htmlfile.write('<td>'+paramline+'<td>logLmax</tr><tr>') meanline=reduce(lambda a,b:a+'<td>'+b,meanStr) htmlfile.write('<td>'+meanline+'</tr>') if injection: htmlfile.write('<tr><th colspan='+str(len(paramnames))+'>Injected values</tr>') injline=reduce(lambda a,b:a+'<td>'+b,injvals) htmlfile.write('<td>'+injline+'<td></tr>') htmlfile.write('</table>') if injection: if skyinjectionconfidence: htmlfile.write('<p>Injection found at confidence interval %f in sky location</p>'%(skyinjectionconfidence)) | def plotSkyMap(skypos,skyres,sky_injpoint): from pylal import skylocutils from mpl_toolkits.basemap import Basemap skypoints=array(skylocutils.gridsky(float(skyres))) skycarts=map(lambda s: pol2cart(s[1],s[0]),skypoints) skyinjectionconfidence=None shist=bayespputils.skyhist_cart(array(skycarts),skypos) #shist=skyhist_cart(skycarts,list(pos)) bins=skycarts # Find the bin of the injection if available injbin=None if sky_injpoint: injhist=skyhist_cart(skycarts,array([sky_injpoint])) injbin=injhist.tolist().index(1) print 'Found injection in bin %d with co-ordinates %f,%f .'%(injbin,skypoints[injbin,0],skypoints[injbin,1]) (skyinjectionconfidence,toppoints,skyreses)=bayespputils.calculateConfidenceLevels(shist,skypoints,injbin,float(opts.skyres),confidence_levels,len(pos)) if injbin and skyinjectionconfidence: i=list(np.nonzero(np.asarray(toppoints)[:,2]==injbin))[0] min_sky_area_containing_injection=float(opts.skyres)*float(opts.skyres)*i print 'Minimum sky area containing injection point = %f square degrees'%min_sky_area_containing_injection myfig=plt.figure() plt.clf() m=Basemap(projection='moll',lon_0=180.0,lat_0=0.0) plx,ply=m(np.asarray(toppoints)[::-1,1]*57.296,np.asarray(toppoints)[::-1,0]*57.296) cnlevel=[1-tp for tp in np.asarray(toppoints)[::-1,3]] plt.scatter(plx,ply,s=5,c=cnlevel,faceted=False,cmap=matplotlib.cm.jet) m.drawmapboundary() m.drawparallels(np.arange(-90.,120.,45.),labels=[1,0,0,0],labelstyle='+/-') # draw parallels m.drawmeridians(np.arange(0.,360.,90.),labels=[0,0,0,1],labelstyle='+/-') # draw meridians plt.title("Skymap") # add a title plt.colorbar() myfig.savefig(os.path.join(outdir,'skymap.png')) plt.clf() #Save skypoints np.savetxt('ranked_sky_pixels',column_stack([np.asarray(toppoints)[:,0:1],np.asarray(toppoints)[:,1],np.asarray(toppoints)[:,3]])) return skyreses,skyinjectionconfidence | 6b7383178dfc3e0bc72b88b696c5636f91f41a6d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/6b7383178dfc3e0bc72b88b696c5636f91f41a6d/cbcBayesSkyRes.py |
summary_file.set('1D contigious cl',par_name,'['+str(max_left*par_bin)+','+str(max_right*par_bin)+','+str((max_right-max_left)*par_bin)+']') k=j while k+1<len(confidence_levels) : if confidence_levels[k+1]<max_frac: j+=1 k+=1 j+=1 (injectionconfidence,toppoints,reses)=bayespputils.calculateConfidenceLevels(greedyHist,greedyPoints,injbin,sqrt(par_bin),confidence_levels,len(par_samps)) areastr='' for (frac,area) in reses: areastr+='%s,'%str(area) areastr=areastr.rstrip(',') summary_file.set('1D greedy cl',par_name,'['+str(areastr)+']') if injection is not None and injectionconfidence is not None: summary_file.set('1D greedy cl',par_name+'_inj',str(injectionconfidence)) if injection: injvalue=np.array(injpoint)[par_index] if injvalue is not None: stacc=sqrt(np.var(par_samps)+pow((np.mean(par_samps)-injvalue),2) ) summary_file.set('1D stacc',par_name,str(stacc)) def plot2Dkernel(xdat,ydat,Nx,Ny): xax=np.linspace(min(xdat),max(xdat),Nx) yax=np.linspace(min(ydat),max(ydat),Ny) x,y=np.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = np.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) | htmlfile.write('<p>Injection not found in posterior bins in sky location!</p>') htmlfile.write('<h5>2D Marginal PDFs</h5><br>') htmlfile.write('<table border=1><tr>') if skyres is not None: htmlfile.write('<td width=30%><img width=100% src="skymap.png"></td>') else: htmlfile.write('<td width=30%><img width=100% src="m1dist.png:></td>') row_switch=1 for par1,par2 in twoDplots: if row_switch==3: row_switch=0 plot_path=None if os.path.isfile(os.path.join(outdir,'2D',par1+'-'+par2+'_2Dkernel.png')): plot_path='2D/'+par1+'-'+par2+'_2Dkernel.png' elif os.path.isfile(os.path.join(outdir,'2D',par2+'-'+par1+'_2Dkernel.png')): plot_path='2D/'+par2+'-'+par1+'_2Dkernel.png' if plot_path: if row_switch==0: htmlfile.write('<tr>') htmlfile.write('<td width=30%><img width=100% src="'+plot_path+'"></td>') if row_switch==2: htmlfile.write('</tr>') row_switch+=1 if row_switch==2: htmlfile.write('<td></td></tr>') elif row_switch==1: htmlfile.write('<td></td><td></td></tr>') htmlfile.write('</table>') htmlfile.write('<br><a href="2D/">All 2D Marginal PDFs</a><hr><h5>1D marginal posterior PDFs</h5><br>') summary_file.add_section('1D ranking kde') summary_file.add_section('1D ranking bins') for param in oneDMenu: try: par_index=paramnames.index(param) i=par_index except ValueError: print "No input chain for %s, skipping 1D plot."%param continue myfig=plt.figure(figsize=(4,3.5),dpi=80) histbins=50 (n, bins, patches)=plt.hist(pos[:,i],histbins,normed='true') histbinSize=bins[1]-bins[0] np.seterr(under='ignore') scipy.seterr(under='ignore') try: gkde=stats.kde.gaussian_kde(pos[:,i]) pass except np.linalg.linalg.LinAlgError: print "Error occured generating plot for parameter %s: %s ! Trying next parameter."%(paramnames[i],'LinAlgError') continue print "Generating 1D plot for %s."%param ind=np.linspace(min(pos[:,i]),max(pos[:,i]),101) kdepdf=gkde.evaluate(ind) plt.plot(ind,kdepdf,label='density estimate') if injection and min(pos[:,i])<getinjpar(paramnames,injection,i) and max(pos[:,i])>getinjpar(paramnames,injection,i): plt.plot([getinjpar(paramnames,injection,i),getinjpar(paramnames,injection,i)],[0,max(kdepdf)],'r-.',scalex=False,scaley=False) bins_to_inj=(getinjpar(paramnames,injection,i)-bins[0])/histbinSize injbinh=int(floor(bins_to_inj)) injbin_frac=bins_to_inj-float(injbinh) rbins=(sum(n[0:injbinh-1])+injbin_frac*n[injbinh])*histbinSize print "r of injected value of %s (bins) = %f"%(param, rbins) summary_file.set('1D ranking bins',param,rbins) plt.grid() plt.xlabel(paramnames[i]) plt.ylabel('Probability Density') myfig.savefig(os.path.join(outdir,paramnames[i]+ '.png')) myfig=plt.figure(figsize=(4,3.5),dpi=80) plt.plot(pos[:,i],'.') if injection and min(pos[:,i])<getinjpar(paramnames,injection,i) and max(pos[:,i])>getinjpar(paramnames,injection,i): plt.plot([0,len(pos)],[getinjpar(paramnames,injection,i),getinjpar(paramnames,injection,i)],'r-.') myfig.savefig(os.path.join(outdir,paramnames[i]+'_samps.png')) htmlfile.write('<img src="'+paramnames[i]+'.png"><img src="'+paramnames[i]+'_samps.png"><br>') htmlfile.write('<hr><br />Produced using cbcBayesSkyRes.py at '+strftime("%Y-%m-%d %H:%M:%S")) htmlfile.write('</BODY></HTML>') htmlfile.close() posfilename=os.path.join(outdir,'posterior_samples.dat') posfile=open(posfilename,'w') for row in pos: for i in row: posfile.write('%f\t'%(i)) posfile.write('\n') posfile.close() summary_file.write(summary_fo) if __name__=='__main__': parser=OptionParser() parser.add_option("-o","--outpath", dest="outpath",help="make page and plots in DIR", metavar="DIR") parser.add_option("-d","--data",dest="data",action="append",help="datafile") parser.add_option("-i","--inj",dest="injfile",help="SimInsipral injection file",metavar="INJ.XML",default=None) parser.add_option("--skyres",dest="skyres",help="Sky resolution to use to calculate sky box size",default=None) parser.add_option("--eventnum",dest="eventnum",action="store",default=None,help="event number in SimInspiral file of this signal",type="int",metavar="NUM") (opts,args)=parser.parse_args() oneDMenu=['mtotal','m1','m2','mchirp','mc','distance','distMPC','dist','iota','eta','RA','dec','a1','a2','phi1','theta1','phi2','theta2'] twoDGreedyMenu=[['mc','eta'],['mchirp','eta'],['m1','m2'],['mtotal','eta'],['distance','iota'],['dist','iota'],['dist','m1'],['RA','dec']] greedyRes={'mc':0.025,'m1':0.1,'m2':0.1,'mtotal':0.1,'eta':0.001,'iota':0.01,'time':1e-4,'distance':1.0,'dist':1.0,'mchirp':0.025,'a1':0.02,'a2':0.02,'phi1':0.05,'phi2':0.05,'theta1':0.05,'theta2':0.05,'RA':0.005,'dec':0.005} confidenceLevels=[0.67,0.9,0.95,0.99] twoDplots=[['mc','eta'],['mchirp','eta'],['m1','m2'],['mtotal','eta'],['distance','iota'],['dist','iota'],['RA','dec'],['m1','dist'],['m2','dist']] | def plotSkyMap(skypos,skyres,sky_injpoint): from pylal import skylocutils from mpl_toolkits.basemap import Basemap skypoints=array(skylocutils.gridsky(float(skyres))) skycarts=map(lambda s: pol2cart(s[1],s[0]),skypoints) skyinjectionconfidence=None shist=bayespputils.skyhist_cart(array(skycarts),skypos) #shist=skyhist_cart(skycarts,list(pos)) bins=skycarts # Find the bin of the injection if available injbin=None if sky_injpoint: injhist=skyhist_cart(skycarts,array([sky_injpoint])) injbin=injhist.tolist().index(1) print 'Found injection in bin %d with co-ordinates %f,%f .'%(injbin,skypoints[injbin,0],skypoints[injbin,1]) (skyinjectionconfidence,toppoints,skyreses)=bayespputils.calculateConfidenceLevels(shist,skypoints,injbin,float(opts.skyres),confidence_levels,len(pos)) if injbin and skyinjectionconfidence: i=list(np.nonzero(np.asarray(toppoints)[:,2]==injbin))[0] min_sky_area_containing_injection=float(opts.skyres)*float(opts.skyres)*i print 'Minimum sky area containing injection point = %f square degrees'%min_sky_area_containing_injection myfig=plt.figure() plt.clf() m=Basemap(projection='moll',lon_0=180.0,lat_0=0.0) plx,ply=m(np.asarray(toppoints)[::-1,1]*57.296,np.asarray(toppoints)[::-1,0]*57.296) cnlevel=[1-tp for tp in np.asarray(toppoints)[::-1,3]] plt.scatter(plx,ply,s=5,c=cnlevel,faceted=False,cmap=matplotlib.cm.jet) m.drawmapboundary() m.drawparallels(np.arange(-90.,120.,45.),labels=[1,0,0,0],labelstyle='+/-') # draw parallels m.drawmeridians(np.arange(0.,360.,90.),labels=[0,0,0,1],labelstyle='+/-') # draw meridians plt.title("Skymap") # add a title plt.colorbar() myfig.savefig(os.path.join(outdir,'skymap.png')) plt.clf() #Save skypoints np.savetxt('ranked_sky_pixels',column_stack([np.asarray(toppoints)[:,0:1],np.asarray(toppoints)[:,1],np.asarray(toppoints)[:,3]])) return skyreses,skyinjectionconfidence | 6b7383178dfc3e0bc72b88b696c5636f91f41a6d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/6b7383178dfc3e0bc72b88b696c5636f91f41a6d/cbcBayesSkyRes.py |
z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() plt.imshow(z,extent=(xax[0],xax[-1],yax[0],yax[-1]),aspect=asp,origin='lower') plt.colorbar() for par1,par2 in twoDplots: try: i=paramnames.index(par1) except ValueError: print "No input chain for %s, skipping 2D plot of %s-%s."%(par1,par1,par2) continue try: j=paramnames.index(par2) except ValueError: print "No input chain for %s, skipping 2D plot of %s-%s."%(par2,par1,par2) continue print 'Generating %s-%s plot'%(paramnames[i],paramnames[j]) if (size(np.unique(pos[:,i]))<2 or size(np.unique(pos[:,j]))<2): continue plot2Dkernel(pos[:,i],pos[:,j],50,50) if injection and reduce (lambda a,b: a and b, map(lambda idx: getinjpar(injection,idx)>min(pos[:,idx]) and getinjpar(injection,idx)<max(pos[:,idx]),[i,j])) : if getinjpar(injection,i) is not None and getinjpar(injection,j) is not None: plt.plot([getinjpar(injection,i)],[getinjpar(injection,j)],'go',scalex=False,scaley=False) plt.xlabel(paramnames[i]) plt.ylabel(paramnames[j]) plt.grid() margdir=os.path.join(outdir,'2D') if not os.path.isdir(margdir+'/'): os.mkdir(margdir) myfig.savefig(os.path.join(margdir,paramnames[i]+'-'+paramnames[j]+'_2Dkernel.png')) myfig.clear() htmlfile=open(os.path.join(outdir,'posplots.html'),'w') htmlfile.write('<HTML><HEAD><TITLE>Posterior PDFs</TITLE></HEAD><BODY><h3>'+str(means[2])+' Posterior PDFs</h3>') if(opts.skyres is not None): htmlfile.write('<table border=1><tr><td>Confidence region<td>size (sq. deg)</tr>') for (frac,skysize) in skyreses: htmlfile.write('<tr><td>%f<td>%f</tr>'%(frac,skysize)) htmlfile.write('</table>') htmlfile.write('Produced from '+str(size(pos,0))+' posterior samples.<br>') htmlfile.write('Samples read from %s<br>'%(opts.data[0])) htmlfile.write('<h4>Mean parameter estimates</h4>') htmlfile.write('<table border=1><tr>') paramline=reduce(lambda a,b:a+'<td>'+b,paramnames) htmlfile.write('<td>'+paramline+'<td>logLmax</tr><tr>') meanline=reduce(lambda a,b:a+'<td>'+b,meanStr) htmlfile.write('<td>'+meanline+'</tr>') if injection: htmlfile.write('<tr><th colspan='+str(len(paramnames))+'>Injected values</tr>') injline=reduce(lambda a,b:a+'<td>'+b,injvals) htmlfile.write('<td>'+injline+'<td></tr>') htmlfile.write('</table>') if injection: if skyinjectionconfidence: htmlfile.write('<p>Injection found at confidence interval %f in sky location</p>'%(skyinjectionconfidence)) else: htmlfile.write('<p>Injection not found in posterior bins in sky location!</p>') htmlfile.write('<h5>2D Marginal PDFs</h5><br>') htmlfile.write('<table border=1><tr>') if opts.skyres is not None: htmlfile.write('<td width=30%><img width=100% src="skymap.png"></td>') else: htmlfile.write('<td width=30%><img width=100% src="m1dist.png:></td>') row_switch=1 for par1,par2 in twoDplots: if row_switch==3: row_switch=0 plot_path=None if os.path.isfile(os.path.join(outdir,'2D',par1+'-'+par2+'_2Dkernel.png')): plot_path='2D/'+par1+'-'+par2+'_2Dkernel.png' elif os.path.isfile(os.path.join(outdir,'2D',par2+'-'+par1+'_2Dkernel.png')): plot_path='2D/'+par2+'-'+par1+'_2Dkernel.png' if plot_path: if row_switch==0: htmlfile.write('<tr>') htmlfile.write('<td width=30%><img width=100% src="'+plot_path+'"></td>') if row_switch==2: htmlfile.write('</tr>') row_switch+=1 if row_switch==2: htmlfile.write('<td></td></tr>') elif row_switch==1: htmlfile.write('<td></td><td></td></tr>') htmlfile.write('</table>') htmlfile.write('<br><a href="2D/">All 2D Marginal PDFs</a><hr><h5>1D marginal posterior PDFs</h5><br>') summary_file.add_section('1D ranking kde') summary_file.add_section('1D ranking bins') for param in oneDMenu: try: par_index=paramnames.index(param) i=par_index except ValueError: print "No input chain for %s, skipping 1D plot."%param continue myfig=plt.figure(figsize=(4,3.5),dpi=80) histbins=50 (n, bins, patches)=plt.hist(pos[:,i],histbins,normed='true') histbinSize=bins[1]-bins[0] try: gkde=stats.kde.gaussian_kde(pos[:,i]) pass except np.linalg.linalg.LinAlgError: print "Error occured generating plot for parameter %s: %s ! Trying next parameter."%(paramnames[i],'LinAlgError') continue print "Generating 1D plot for %s."%param ind=np.linspace(min(pos[:,i]),max(pos[:,i]),101) kdepdf=gkde.evaluate(ind) plt.plot(ind,kdepdf,label='density estimate') if injection and min(pos[:,i])<getinjpar(injection,i) and max(pos[:,i])>getinjpar(injection,i): plt.plot([getinjpar(injection,i),getinjpar(injection,i)],[0,max(kdepdf)],'r-.',scalex=False,scaley=False) rkde=gkde.integrate_box_1d(min(pos[:,i]),getinjpar(injection,i)) print "r of injected value of %s (kde) = %f"%(param,rkde) bins_to_inj=(getinjpar(injection,i)-bins[0])/histbinSize injbinh=int(floor(bins_to_inj)) injbin_frac=bins_to_inj-float(injbinh) rbins=(sum(n[0:injbinh-1])+injbin_frac*n[injbinh])*histbinSize print "r of injected value of %s (bins) = %f"%(param, rbins) summary_file.set('1D ranking kde',param,rkde) summary_file.set('1D ranking bins',param,rbins) plt.grid() plt.xlabel(paramnames[i]) plt.ylabel('Probability Density') myfig.savefig(os.path.join(outdir,paramnames[i]+ '.png')) myfig=plt.figure(figsize=(4,3.5),dpi=80) plt.plot(pos[:,i],'.') if injection and min(pos[:,i])<getinjpar(injection,i) and max(pos[:,i])>getinjpar(injection,i): plt.plot([0,len(pos)],[getinjpar(injection,i),getinjpar(injection,i)],'r-.') myfig.savefig(os.path.join(outdir,paramnames[i]+'_samps.png')) htmlfile.write('<img src="'+paramnames[i]+'.png"><img src="'+paramnames[i]+'_samps.png"><br>') htmlfile.write('<hr><br />Produced using cbcBayesSkyRes.py at '+strftime("%Y-%m-%d %H:%M:%S")) htmlfile.write('</BODY></HTML>') htmlfile.close() posfilename=os.path.join(outdir,'posterior_samples.dat') posfile=open(posfilename,'w') for row in pos: for i in row: posfile.write('%f\t'%(i)) posfile.write('\n') posfile.close() summary_file.write(summary_fo) | cbcBayesSkyRes(opts.outpath,opts.data,oneDMenu,twoDGreedyMenu,greedyRes,confidenceLevels,twoDplots,injfile=opts.injfile,eventnum=opts.eventnum,skyres=opts.skyres) | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=np.linspace(min(xdat),max(xdat),Nx) yax=np.linspace(min(ydat),max(ydat),Ny) x,y=np.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = np.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() | 6b7383178dfc3e0bc72b88b696c5636f91f41a6d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/6b7383178dfc3e0bc72b88b696c5636f91f41a6d/cbcBayesSkyRes.py |
self.output_cache = lal.CacheEntry(ifo, job.name.replace("remoteScan_"+job.tag_base+".sh","wpipeline").upper(), segments.segment(float(time), float(time)), "file://localhost/"+self.output_path+"/"+str(time)) | self.output_cache = lal.CacheEntry(ifo, job.name.replace("remoteScan_"+job.dir+"_"+job.tag_base+".sh","wpipeline").upper(), segments.segment(float(time), float(time)), "file://localhost/"+self.output_path+"/"+str(time)) | def __init__(self, dag, job, cp, opts, time, ifo, p_nodes=[], type="ht", variety="fg"): | 28cb4edc339b00e96a52afd22c9d77521a7a7003 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/28cb4edc339b00e96a52afd22c9d77521a7a7003/stfu_pipe.py |
period = float(binjjob.get_opts()["time-step"]) / math.pi | period = float(binjjob.get_opts()["time-step"]) | def make_binj_fragment(dag, seg, tag, offset, flow = None, fhigh = None): # one injection every time-step / pi seconds period = float(binjjob.get_opts()["time-step"]) / math.pi # adjust start time to be commensurate with injection period start = seg[0] - seg[0] % period + period * offset node = BurstInjNode(binjjob) node.set_start(start) node.set_end(seg[1]) if flow is not None: node.set_name("lalapps_binj_%s_%d_%d" % (tag, int(start), int(flow))) else: node.set_name("lalapps_binj_%s_%d" % (tag, int(start))) node.set_user_tag(tag) if flow is not None: node.add_macro("macroflow", flow) if fhigh is not None: node.add_macro("macrofhigh", fhigh) node.add_macro("macroseed", int(time.time() + start)) dag.add_node(node) return set([node]) | 0c24df4b078e9713ce74d1d04dffacbb1da1bd7c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/0c24df4b078e9713ce74d1d04dffacbb1da1bd7c/power.py |
for offset_vector in self.offset_vectors: | for offset_vector in cafepacker.offset_vectors: | def split_bins(cafepacker, extentlimit): """ Split bins of stored in CafePacker until each bin has an extent no longer than extentlimit. """ # # loop overall the bins in cafepacker.bins. we pop items out of # cafepacker.bins and append new ones to the end so need a while loop # checking the extent of each bin in cafepacker.bins until all bins are # done being split # idx = 0 while idx < len(cafepacker.bins): if abs(cafepacker.bins[idx].extent) <= extentlimit: # # bin doesn't need splitting so move to next # idx += 1 continue # # split this bin so pop it out of the list # bigbin = cafepacker.bins.pop(idx) # # calculate the central time of the union of all the input # files in the bin # splittime = lsctables.LIGOTimeGPS(bigbin.extent[0] + (bigbin.extent[1] - bigbin.extent[0])/2) # # split the segmentlistdict at this time # splitseglistdict = segments.segmentlistdict() for key in bigbin.size.keys(): splitseglistdict[key] = segments.segmentlist([segments.segment(-segments.infinity(),splittime)]) # # create bins for the first and second halves # bin1 = LALCacheBin() bin1.size = bigbin.size & splitseglistdict bin1.extent = bigbin.extent & splitseglistdict.values()[0][0] bin2 = LALCacheBin() bin2.size = bigbin.size & ~splitseglistdict bin2.extent = bigbin.extent & (~splitseglistdict.values()[0])[0] # # remove unused keys from the smaller bins' segmentlistdicts # newsize = segments.segmentlistdict() for key in bin1.size.keys(): if len(bin1.size[key]): newsize[key] = bin1.size[key] bin1.size = newsize newsize = segments.segmentlistdict() for key in bin2.size.keys(): if len(bin2.size[key]): newsize[key] = bin2.size[key] bin2.size = newsize # # find which of the objects in bigbin.objects intersect the two # smaller bins # for cache in bigbin.objects: thisseglistdict = cache.to_segmentlistdict() coinc1 = 0 coinc2 = 0 for offset_vector in self.offset_vectors: # # loop over offset vectors updating the smaller # bins and the object we are checking # bin1.size.offsets.update(offset_vector) bin2.size.offsets.update(offset_vector) thisseglistdict.offsets.update(offset_vector) if not coinc1 and bin1.size.is_coincident(thisseglistdict, keys = offset_vector.keys()): # # object is coicident with bin1 # coinc1 = 1 bin1.objects.append(cache) if not coinc2 and bin2.size.is_coincident(thisseglistdict, keys = offset_vector.keys()): # # object is coincident with bin2 # coinc2 = 1 bin2.objects.append(cache) # # end loop if known to be coincident with both # bins # if coinc1 and coinc2: break # # clear offsets applied to object # thisseglistdict.offsets.clear() # # clear offsets applied to bins # bin1.size.offsets.clear() bin2.size.offsets.clear() # # append smaller bins to list of bins # cafepacker.bins.append(bin1) cafepacker.bins.append(bin2) # # do not increment idx as we popped the large bin out of # cafepacker.bins # # # sort the bins in cafepacker # cafepacker.bins.sort() return cafepacker | 058fb73d9a07d6c104f62ca589cbbc9413cf8f55 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/058fb73d9a07d6c104f62ca589cbbc9413cf8f55/ligolw_cafe.py |
if ' ' in self.__options[c] and '$(macro' not in self.__options[c]: self.__options[c] = ''.join([ "'", self.__options[c], "'" ]) | def write_sub_file(self): """ Write a submit file for this Condor job. """ if not self.__log_file: raise CondorSubmitError, "Log file not specified." if not self.__err_file: raise CondorSubmitError, "Error file not specified." if not self.__out_file: raise CondorSubmitError, "Output file not specified." | 3dd1a9df24c5987d3256ef99ff254e16a9c7f94d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/3dd1a9df24c5987d3256ef99ff254e16a9c7f94d/pipeline.py |
|
if ' ' in self.__short_options[c] and '$(macro' not in self.__short_options[c]: self.__short_options[c] = ''.join([ "'", self.__short_options[c], "'" ]) | def write_sub_file(self): """ Write a submit file for this Condor job. """ if not self.__log_file: raise CondorSubmitError, "Log file not specified." if not self.__err_file: raise CondorSubmitError, "Error file not specified." if not self.__out_file: raise CondorSubmitError, "Output file not specified." | 3dd1a9df24c5987d3256ef99ff254e16a9c7f94d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/3dd1a9df24c5987d3256ef99ff254e16a9c7f94d/pipeline.py |
|
print >>dagfile, """\ | print >>dagfile, """ | def write_abstract_dag(self): """ Write all the nodes in the workflow to the DAX file. """ if not self.__dax_file_path: # this workflow is not dax-compatible, so don't write a dax return try: dagfile = open( self.__dax_file_path, 'w' ) except: raise CondorDAGError, "Cannot open file " + self.__dag_file_path | a7bd5749d7e3884dc25892a5f5265bab9a159fab /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/a7bd5749d7e3884dc25892a5f5265bab9a159fab/pipeline.py |
template = """ <profile namespace="condor" key="universe">%s</profile>""" | template = """ <profile namespace="condor" key="universe">%s</profile>\n""" | def write_abstract_dag(self): """ Write all the nodes in the workflow to the DAX file. """ if not self.__dax_file_path: # this workflow is not dax-compatible, so don't write a dax return try: dagfile = open( self.__dax_file_path, 'w' ) except: raise CondorDAGError, "Cannot open file " + self.__dag_file_path | a7bd5749d7e3884dc25892a5f5265bab9a159fab /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/a7bd5749d7e3884dc25892a5f5265bab9a159fab/pipeline.py |
print >>dagfile, xml | print >>dagfile, xml, | def write_abstract_dag(self): """ Write all the nodes in the workflow to the DAX file. """ if not self.__dax_file_path: # this workflow is not dax-compatible, so don't write a dax return try: dagfile = open( self.__dax_file_path, 'w' ) except: raise CondorDAGError, "Cannot open file " + self.__dag_file_path | a7bd5749d7e3884dc25892a5f5265bab9a159fab /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/a7bd5749d7e3884dc25892a5f5265bab9a159fab/pipeline.py |
from glue import LDRdataFindClient if isinstance( filename, LDRdataFindClient.lfnlist ): self.add_var_opt('glob-frame-data',' ') for lfn in filename: a, b, c, d = lfn.split('.')[0].split('-') t_start = int(c) t_end = int(c) + int(d) if (t_start <= (self.__data_end+int(d)+1) and t_end >= (self.__data_start-int(d)-1)): self.add_input_file(lfn) self.add_var_opt('frame-type',b) else: raise CondorDAGNodeError, "Unknown LFN cache format" | raise CondorDAGNodeError, "Unknown LFN cache format" | def set_cache(self,filename): """ Set the LAL frame cache to to use. The frame cache is passed to the job with the --frame-cache argument. @param filename: calibration file to use. """ if isinstance( filename, str ): # the name of a lal cache file created by a datafind node self.add_var_opt('frame-cache', filename) self.add_input_file(filename) else: # check we have an LFN list from glue import LDRdataFindClient if isinstance( filename, LDRdataFindClient.lfnlist ): self.add_var_opt('glob-frame-data',' ') # only add the LFNs that actually overlap with this job # FIXME this doesnt handle edge cases quite right for lfn in filename: a, b, c, d = lfn.split('.')[0].split('-') t_start = int(c) t_end = int(c) + int(d) if (t_start <= (self.__data_end+int(d)+1) and t_end >= (self.__data_start-int(d)-1)): self.add_input_file(lfn) # set the frame type based on the LFNs returned by datafind self.add_var_opt('frame-type',b) else: raise CondorDAGNodeError, "Unknown LFN cache format" | a7bd5749d7e3884dc25892a5f5265bab9a159fab /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/a7bd5749d7e3884dc25892a5f5265bab9a159fab/pipeline.py |
def parse(self): | def parse(self,type_regex=None): | def parse(self): """ Each line of the frame cache file is like the following: | a7bd5749d7e3884dc25892a5f5265bab9a159fab /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/a7bd5749d7e3884dc25892a5f5265bab9a159fab/pipeline.py |
count = 0 countIncluded = 0 | def parse(self): """ Each line of the frame cache file is like the following: | a7bd5749d7e3884dc25892a5f5265bab9a159fab /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/a7bd5749d7e3884dc25892a5f5265bab9a159fab/pipeline.py |
|
count += 1 | if type_filter and type_filter.search(line) is None: continue | def parse(self): """ Each line of the frame cache file is like the following: | a7bd5749d7e3884dc25892a5f5265bab9a159fab /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/a7bd5749d7e3884dc25892a5f5265bab9a159fab/pipeline.py |
def parse(self): """ Each line of the frame cache file is like the following: | a7bd5749d7e3884dc25892a5f5265bab9a159fab /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/a7bd5749d7e3884dc25892a5f5265bab9a159fab/pipeline.py |
||
msg = "The combination %s is not unique in the frame cache file" % str(key) | msg = "The combination %s is not unique in the frame cache file" \ % str(key) | def parse(self): """ Each line of the frame cache file is like the following: | a7bd5749d7e3884dc25892a5f5265bab9a159fab /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/a7bd5749d7e3884dc25892a5f5265bab9a159fab/pipeline.py |
countIncluded += 1 f.close() cache['gwf'] = gwfDict | f.close() cache['gwf'] = gwfDict | def parse(self): """ Each line of the frame cache file is like the following: | a7bd5749d7e3884dc25892a5f5265bab9a159fab /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/a7bd5749d7e3884dc25892a5f5265bab9a159fab/pipeline.py |
else: self.__lsync_cache = None | def __init__(self,cache_dir,log_dir,config_file,dax=0,lsync_cache_file=None): """ @param cache_dir: the directory to write the output lal cache files to. @param log_dir: the directory to write the stderr file to. @param config_file: ConfigParser object containing the path to the LSCdataFind executable in the [condor] section and a [datafind] section from which the LSCdataFind options are read. """ self.__executable = config_file.get('condor','datafind') self.__universe = 'local' CondorDAGJob.__init__(self,self.__universe,self.__executable) AnalysisJob.__init__(self,config_file) self.__cache_dir = cache_dir self.__dax = dax self.__config_file = config_file if lsync_cache_file: self.__lsync_cache = LsyncCache(lsync_cache_file) self.__lsync_cache.parse() else: self.__lsync_cache = None | a7bd5749d7e3884dc25892a5f5265bab9a159fab /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/a7bd5749d7e3884dc25892a5f5265bab9a159fab/pipeline.py |
|
if certFile and keyFile: h = httplib.HTTPSConnection(server, key_file = keyFile, cert_file = certFile) | if cert and key: h = httplib.HTTPSConnection(server, key_file = key, cert_file = cert) | def get_output(self): """ Return the output file, i.e. the file containing the frame cache data. or the files itself as tuple (for DAX) """ if self.__dax: # we are a dax running in grid mode so we need to resolve the # frame file metadata into LFNs so pegasus can query the RLS if self.__lfn_list is None: | a7bd5749d7e3884dc25892a5f5265bab9a159fab /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/a7bd5749d7e3884dc25892a5f5265bab9a159fab/pipeline.py |
def get_coincs(self, eventlists, event_comparefunc, thresholds, verbose = False): # # has this node already been visited? if so, return the # answer we already know # | 71af8d610e523e41824de1730bcf4bdab75f12fc /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/71af8d610e523e41824de1730bcf4bdab75f12fc/snglcoinc.py |
||
(self.coint.type,sngl.ifo,sngl.ifo,timeString) | (self.coinc.type,sngl.ifo,sngl.ifo,timeString) | def get_analyzeQscan_RDS(self): """ """ #analyseQscan.py_FG_RDS_full_data/H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.cache cacheList=list() cacheFiles=list() for sngl in self.coinc.sngls: timeString=str(float(sngl.time)).replace(".","_") myCacheMask="*%s*/%s-analyseQscan_%s_%s_rds*.cache"%\ (self.coint.type,sngl.ifo,sngl.ifo,timeString) #Ignore the files with seis_rds in them for x in fnmatch.filter(self.fsys,myCacheMask): if not x.__contains__('seis_rds'): cacheList.append(x) #Read the cache file or files cacheFiles=self.__readCache__(cacheList) return cacheFiles | 04ee230753c51cc2fa6261a2cf36152529f9394e /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/04ee230753c51cc2fa6261a2cf36152529f9394e/makeCheckListWiki.py |
sys.stdout.write("Found: %s\n",publication_directory) | sys.stdout.write("Found: %s\n" %(publication_directory)) | def __init__(self,type=None,ifo=None,time=None,snr=None,chisqr=None,mass1=None,mass2=None,mchirp=None): """ """ self.type=str(type) self.ifo=str(ifo) self.time=float(time) self.snr=float(snr) self.chisqr=float(chisqr) self.mass1=float(mass1) self.mass2=float(mass2) self.mchirp=float(mchirp) | 04ee230753c51cc2fa6261a2cf36152529f9394e /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/04ee230753c51cc2fa6261a2cf36152529f9394e/makeCheckListWiki.py |
sys.stdout.write("Found: %s\n",publication_url) | sys.stdout.write("Found: %s\n" %(publication_url)) | def __init__(self,type=None,ifo=None,time=None,snr=None,chisqr=None,mass1=None,mass2=None,mchirp=None): """ """ self.type=str(type) self.ifo=str(ifo) self.time=float(time) self.snr=float(snr) self.chisqr=float(chisqr) self.mass1=float(mass1) self.mass2=float(mass2) self.mchirp=float(mchirp) | 04ee230753c51cc2fa6261a2cf36152529f9394e /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/04ee230753c51cc2fa6261a2cf36152529f9394e/makeCheckListWiki.py |
msg = "%s does not have permission to update row entries" % subject msg += " created by %s (process_id %s)" % (dn, known_proc[pid][0]) raise ServerHandlerException, msg | msg = "\"%s\" does not match dn in existing row entries: " % subject msg += "%s (process_id %s)" % (dn, known_proc[pid][0]) logger.warn(msg) | uniq_def = (row[ifos_col],row[name_col],row[vers_col]) | 472da4ef86a559c3f653bcf8dc9b20d5605d9c44 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/472da4ef86a559c3f653bcf8dc9b20d5605d9c44/LDBDWServer.py |
return np.fmin(c, 2 * LAL_PI - c) | return np.where(c < LAL_PI, c, 2 * LAL_PI - c) | def _abs_diff(c): """ For some angular difference c = |a - b| in radians, find the magnitude of the difference, taking into account the wrap-around at 2*pi. """ c = abs(c) % (2 * LAL_PI) return np.fmin(c, 2 * LAL_PI - c) | eee2a7f04265d53a4933a8bcfbb49cb47bd30f44 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/eee2a7f04265d53a4933a8bcfbb49cb47bd30f44/sphericalutils.py |
pol PDF: kappa / (2 * np.sinh(kappa)) * np.exp(kappa * np.cos(theta)) * np.sin(theta)) az PDF: uniform(0, 2*pi) | References: * http://en.wikipedia.org/wiki/Von_Mises–Fisher_distribution * http://arxiv.org/pdf/0902.0737v1 (states the Rayleigh limit) | def fisher_rvs(mu, sigma, size=1): """ Return a random (polar, azimuthal) angle drawn from the Fisher distribution. Assume that the concentration parameter (kappa) is large so that we can use a Rayleigh distribution about the north pole and rotate it to be centered at the (polar, azimuthal) coordinate mu. Assume kappa = 1 / sigma**2 pol PDF: kappa / (2 * np.sinh(kappa)) * np.exp(kappa * np.cos(theta)) * np.sin(theta)) az PDF: uniform(0, 2*pi) """ rayleigh_rv = \ np.array((np.random.rayleigh(scale=sigma, size=size), np.random.uniform(low=0, high=2*LAL_PI, size=size)))\ .reshape((2, size)).T # guarantee 2D and transpose a, b = new_z_to_euler(mu) return rotate_euler(rayleigh_rv, a, b, 0) | eee2a7f04265d53a4933a8bcfbb49cb47bd30f44 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/eee2a7f04265d53a4933a8bcfbb49cb47bd30f44/sphericalutils.py |
class InspiralAnalysisJob(pipeline.CondorDAGJob, pipeline.AnalysisJob): | class InspiralAnalysisJob(pipeline.AnalysisJob, pipeline.CondorDAGJob): | def __init__(self, args=None): self.args = args | a1ebb3ed6454d4f4a8100deaa2f286012d72028d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/a1ebb3ed6454d4f4a8100deaa2f286012d72028d/inspiral.py |
class InspiralAnalysisNode(pipeline.CondorDAGNode, pipeline.AnalysisNode): | class InspiralAnalysisNode(pipeline.AnalysisNode, pipeline.CondorDAGNode): | def __init__(self, cp, dax = False): """ @cp: a ConfigParser object from which the options are read. """ exec_name = 'inspinjfind' sections = ['inspinjfind'] extension = 'xml' InspiralAnalysisJob.__init__(self, cp, sections, exec_name, extension, dax) self.add_condor_cmd('getenv', 'True') # overwrite standard log file names self.set_stdout_file('logs/' + exec_name + '-$(cluster)-$(process).out') self.set_stderr_file('logs/' + exec_name + '-$(cluster)-$(process).err') | a1ebb3ed6454d4f4a8100deaa2f286012d72028d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/a1ebb3ed6454d4f4a8100deaa2f286012d72028d/inspiral.py |
def fisher_rvs(mu, sigma, size=None): | def fisher_rvs(mu, sigma, size=1): | def fisher_rvs(mu, sigma, size=None): """ Return a random (polar, azimuthal) angle drawn from the Fisher distribution. Assume that the concentration parameter (kappa) is large so that we can use a Rayleigh distribution about the north pole and rotate it to be centered at the (polar, azimuthal) coordinate mu. Assume kappa = 1 / sigma**2 pol PDF: kappa / (2 * np.sinh(kappa)) * np.exp(kappa * np.cos(theta)) * np.sin(theta)) az PDF: uniform(0, 2*pi) """ rayleigh_rv = \ np.array((np.random.rayleigh(scale=sigma, size=size), np.random.uniform(low=0, high=2*LAL_PI, size=size)))\ .reshape((2, size)).T # guarantee 2D and transpose a, b = new_z_to_euler(mu) return rotate_euler(rayleigh_rv, a, b, 0) | d4e5cf55a233130a9e8e244eb51db7232816cc27 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/d4e5cf55a233130a9e8e244eb51db7232816cc27/sphericalutils.py |
return None | raise Warning, "input to __patchFrameTypeDef__ included a \ gps time argument specified as None\n" return frametype | def __patchFrameTypeDef__(frametype=None,ifo=None,gpstime=None): """ Temporary patch function, to adjust specfied frame type used in searching the filesystem for files to display in followup. """ if frametype == None: return None if gpstime == None: return None if ifo == None: return None endOfS5=int(875232014) new=None if int(gpstime)<=endOfS5: if not frametype.lower().startswith(ifo.lower()): orig=frametype new=ifo+"_"+frametype return new | 90b92f8357c22c5ae1a2f077fa4f8667b43d588a /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/90b92f8357c22c5ae1a2f077fa4f8667b43d588a/makeCheckListWiki.py |
return None | raise Warning, "input to __patchFrameTypeDef__ included an \ ifo argument specified as None\n" return frametype | def __patchFrameTypeDef__(frametype=None,ifo=None,gpstime=None): """ Temporary patch function, to adjust specfied frame type used in searching the filesystem for files to display in followup. """ if frametype == None: return None if gpstime == None: return None if ifo == None: return None endOfS5=int(875232014) new=None if int(gpstime)<=endOfS5: if not frametype.lower().startswith(ifo.lower()): orig=frametype new=ifo+"_"+frametype return new | 90b92f8357c22c5ae1a2f077fa4f8667b43d588a /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/90b92f8357c22c5ae1a2f077fa4f8667b43d588a/makeCheckListWiki.py |
new=None | def __patchFrameTypeDef__(frametype=None,ifo=None,gpstime=None): """ Temporary patch function, to adjust specfied frame type used in searching the filesystem for files to display in followup. """ if frametype == None: return None if gpstime == None: return None if ifo == None: return None endOfS5=int(875232014) new=None if int(gpstime)<=endOfS5: if not frametype.lower().startswith(ifo.lower()): orig=frametype new=ifo+"_"+frametype return new | 90b92f8357c22c5ae1a2f077fa4f8667b43d588a /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/90b92f8357c22c5ae1a2f077fa4f8667b43d588a/makeCheckListWiki.py |
|
orig=frametype new=ifo+"_"+frametype return new | return ifo+"_"+frametype return frametype | def __patchFrameTypeDef__(frametype=None,ifo=None,gpstime=None): """ Temporary patch function, to adjust specfied frame type used in searching the filesystem for files to display in followup. """ if frametype == None: return None if gpstime == None: return None if ifo == None: return None endOfS5=int(875232014) new=None if int(gpstime)<=endOfS5: if not frametype.lower().startswith(ifo.lower()): orig=frametype new=ifo+"_"+frametype return new | 90b92f8357c22c5ae1a2f077fa4f8667b43d588a /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/90b92f8357c22c5ae1a2f077fa4f8667b43d588a/makeCheckListWiki.py |
fileListing.append(entry) | finalList.append(entry) | def __readCache__(self,cacheListing=list()): """ Simple mehtod to read in a cache or list of cache files and return a list of files or an empty list if nothing found. It uses the pathing information from the files passed via cacheListing to aid in our filesystem search. """ #Open the cache entry and search for those entrys finalList=list() for entry in cacheListing: fileListing=list() #Cache files listed themselves comment out following line fileListing.append(entry) fileListing.extend([x.rstrip("\n") for x in file(entry)]) #PATCH START to add in the z distribution files for fname in fileListing: if ".html" in fname: zFile=fname.replace(".html",".txt") fileListing.append(zFile) #PATCH END #Pathing info pathingInfo=os.path.dirname(entry) for thisFile in fileListing: #Search filesystem for file full path finalList.extend(fnmatch.filter(self.fsys,"*%s*%s"%(pathingInfo,thisFile))) #Look for potential matching thumbnails if thisFile.endswith(".png"): finalList.extend(fnmatch.filter(self.fsys,"*%s"%thisFile.replace(".png","?thumb?png"))) return finalList | 2ecdb740cf7779b329f5e5208424d8c61de2993d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/2ecdb740cf7779b329f5e5208424d8c61de2993d/makeCheckListWiki.py |
def __readCache__(self,cacheListing=list()): """ Simple mehtod to read in a cache or list of cache files and return a list of files or an empty list if nothing found. It uses the pathing information from the files passed via cacheListing to aid in our filesystem search. """ #Open the cache entry and search for those entrys finalList=list() for entry in cacheListing: fileListing=list() #Cache files listed themselves comment out following line fileListing.append(entry) fileListing.extend([x.rstrip("\n") for x in file(entry)]) #PATCH START to add in the z distribution files for fname in fileListing: if ".html" in fname: zFile=fname.replace(".html",".txt") fileListing.append(zFile) #PATCH END #Pathing info pathingInfo=os.path.dirname(entry) for thisFile in fileListing: #Search filesystem for file full path finalList.extend(fnmatch.filter(self.fsys,"*%s*%s"%(pathingInfo,thisFile))) #Look for potential matching thumbnails if thisFile.endswith(".png"): finalList.extend(fnmatch.filter(self.fsys,"*%s"%thisFile.replace(".png","?thumb?png"))) return finalList | 2ecdb740cf7779b329f5e5208424d8c61de2993d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/2ecdb740cf7779b329f5e5208424d8c61de2993d/makeCheckListWiki.py |
||
def get_findVetos(self): tmpList=list() #H1,H2,L1-findFlags_H1,H2,L1_831695156.714.wiki #instrument,ifos ifoString="" for i in range(0,len(self.coinc.ifos)/2):ifoString=ifoString+"%s,"%self.coinc.ifos[2*i:2*i+2] ifoString=ifoString.rstrip(",") insString="" for i in range(0,len(self.coinc.instruments)/2):insString=insString+"%s,"%self.coinc.instruments[2*i:2*i+2] insString=insString.rstrip(",") myMask="*%s*%s-findVetos_%s_%s.wiki"%\ (self.coinc.type,insString,ifoString,self.coinc.time) tmpList.extend(fnmatch.filter(self.fsys,myMask)) return tmpList | 2ecdb740cf7779b329f5e5208424d8c61de2993d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/2ecdb740cf7779b329f5e5208424d8c61de2993d/makeCheckListWiki.py |
||
myMask="*%s*%s-findVetos_%s_%s.wiki"%\ | myMask="*%s/*%s-findVetos_%s_%s.wiki"%\ | def get_findVetos(self): tmpList=list() #H1,H2,L1-findFlags_H1,H2,L1_831695156.714.wiki #instrument,ifos ifoString="" for i in range(0,len(self.coinc.ifos)/2):ifoString=ifoString+"%s,"%self.coinc.ifos[2*i:2*i+2] ifoString=ifoString.rstrip(",") insString="" for i in range(0,len(self.coinc.instruments)/2):insString=insString+"%s,"%self.coinc.instruments[2*i:2*i+2] insString=insString.rstrip(",") myMask="*%s*%s-findVetos_%s_%s.wiki"%\ (self.coinc.type,insString,ifoString,self.coinc.time) tmpList.extend(fnmatch.filter(self.fsys,myMask)) return tmpList | 2ecdb740cf7779b329f5e5208424d8c61de2993d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/2ecdb740cf7779b329f5e5208424d8c61de2993d/makeCheckListWiki.py |
myMask="*%s*%s-findFlags_%s_%s.wiki"%\ | myMask="*%s/*%s-findFlags_%s_%s.wiki"%\ | def get_findFlags(self): """ """ tmpList=list() #H1,H2,L1-findFlags_H1,H2,L1_831695156.714.wiki #instrument,ifos ifoString="" for i in range(0,len(self.coinc.ifos)/2):ifoString=ifoString+"%s,"%self.coinc.ifos[2*i:2*i+2] ifoString=ifoString.rstrip(",") insString="" for i in range(0,len(self.coinc.instruments)/2):insString=insString+"%s,"%self.coinc.instruments[2*i:2*i+2] insString=insString.rstrip(",") myMask="*%s*%s-findFlags_%s_%s.wiki"%\ (self.coinc.type,insString,ifoString,self.coinc.time) tmpList.extend(fnmatch.filter(self.fsys,myMask)) return tmpList | 2ecdb740cf7779b329f5e5208424d8c61de2993d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/2ecdb740cf7779b329f5e5208424d8c61de2993d/makeCheckListWiki.py |
(self.coint.type,sngl.ifo,sngl.ifo,timeString) | (self.coinc.type,sngl.ifo,sngl.ifo,timeString) | def get_analyzeQscan_RDS(self): """ """ #analyseQscan.py_FG_RDS_full_data/H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.cache cacheList=list() cacheFiles=list() for sngl in self.coinc.sngls: timeString=str(float(sngl.time)).replace(".","_") myCacheMask="*%s*/%s-analyseQscan_%s_%s_rds*.cache"%\ (self.coint.type,sngl.ifo,sngl.ifo,timeString) #Ignore the files with seis_rds in them for x in fnmatch.filter(self.fsys,myCacheMask): if not x.__contains__('seis_rds'): cacheList.append(x) #Read the cache file or files cacheFiles=self.__readCache__(cacheList) return cacheFiles | 2ecdb740cf7779b329f5e5208424d8c61de2993d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/2ecdb740cf7779b329f5e5208424d8c61de2993d/makeCheckListWiki.py |
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # # Check to see if wiki file with name already exists # maxCount=0 while os.path.exists(wikiFilename) and maxCount < 10: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # | 2ecdb740cf7779b329f5e5208424d8c61de2993d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/2ecdb740cf7779b329f5e5208424d8c61de2993d/makeCheckListWiki.py |
||
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # # Check to see if wiki file with name already exists # maxCount=0 while os.path.exists(wikiFilename) and maxCount < 10: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # | 2ecdb740cf7779b329f5e5208424d8c61de2993d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/2ecdb740cf7779b329f5e5208424d8c61de2993d/makeCheckListWiki.py |
||
while os.path.exists(wikiFilename) and maxCount < 10: | while os.path.exists(wikiFilename) and maxCount < 15: | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # # Check to see if wiki file with name already exists # maxCount=0 while os.path.exists(wikiFilename) and maxCount < 10: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # | 2ecdb740cf7779b329f5e5208424d8c61de2993d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/2ecdb740cf7779b329f5e5208424d8c61de2993d/makeCheckListWiki.py |
sys.stdout.write("Found: %s\n",publication_directory) | sys.stdout.write("Found: %s\n"%publication_directory) | def __init__(self,type=None,ifo=None,time=None,snr=None,chisqr=None,mass1=None,mass2=None,mchirp=None): """ """ self.type=str(type) self.ifo=str(ifo) self.time=float(time) self.snr=float(snr) self.chisqr=float(chisqr) self.mass1=float(mass1) self.mass2=float(mass2) self.mchirp=float(mchirp) | 2ecdb740cf7779b329f5e5208424d8c61de2993d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/2ecdb740cf7779b329f5e5208424d8c61de2993d/makeCheckListWiki.py |
sys.stdout.write("Found: %s\n",publication_url) | sys.stdout.write("Found: %s\n"%publication_url) | def __init__(self,type=None,ifo=None,time=None,snr=None,chisqr=None,mass1=None,mass2=None,mchirp=None): """ """ self.type=str(type) self.ifo=str(ifo) self.time=float(time) self.snr=float(snr) self.chisqr=float(chisqr) self.mass1=float(mass1) self.mass2=float(mass2) self.mchirp=float(mchirp) | 2ecdb740cf7779b329f5e5208424d8c61de2993d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/2ecdb740cf7779b329f5e5208424d8c61de2993d/makeCheckListWiki.py |
for sngl in coincEvent.sngl_inspiral.itervalues(): myArgString=myArgString+"%s,"%sngl.ifo | if hasattr(coincEvent, "sngl_inspiral"): for sngl in coincEvent.sngl_inspiral.itervalues(): myArgString=myArgString+"%s,"%sngl.ifo elif hasattr(coincEvent, "ifos_list"): for ifo in coincEvent.ifos_list: myArgString=myArgString+"%s,"%ifo | def __init__(self, dag, job, cp, opts, coincEvent=None): """ """ self.__conditionalLoadDefaults__(findFlagsNode.defaults,cp) pipeline.CondorDAGNode.__init__(self,job) self.add_var_opt("trigger-time",coincEvent.time) #Output filename oFilename="%s-findFlags_%s_%s.wiki"%(coincEvent.instruments, coincEvent.ifos, coincEvent.time) self.add_var_opt("output-file",job.outputPath+'/DataProducts/'+oFilename) self.add_var_opt("segment-url",cp.get('findFlags','segment-url')) self.add_var_opt("output-format",cp.get('findFlags','output-format')) self.add_var_opt("window",cp.get('findFlags','window')) #IFO arg string myArgString="" for sngl in coincEvent.sngl_inspiral.itervalues(): myArgString=myArgString+"%s,"%sngl.ifo myArgString=myArgString.rstrip(",") self.add_var_opt("ifo-list",myArgString) if not opts.disable_dag_categories: self.set_category(job.name.lower()) | 130ebf309860df4889826c32bceaee7ddd882e7f /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/130ebf309860df4889826c32bceaee7ddd882e7f/stfu_pipe.py |
for sngl in coincEvent.sngl_inspiral.itervalues(): myArgString=myArgString+"%s,"%sngl.ifo | if hasattr(coincEvent, "sngl_inspiral"): for sngl in coincEvent.sngl_inspiral.itervalues(): myArgString=myArgString+"%s,"%sngl.ifo elif hasattr(coincEvent, "ifos_list"): for ifo in coincEvent.ifos_list: myArgString=myArgString+"%s,"%ifo | def __init__(self, dag, job, cp, opts, coincEvent=None): """ """ self.__conditionalLoadDefaults__(findVetosNode.defaults,cp) pipeline.CondorDAGNode.__init__(self,job) self.add_var_opt("trigger-time",coincEvent.time) #Output filename oFilename="%s-findVetos_%s_%s.wiki"%(coincEvent.instruments, coincEvent.ifos, coincEvent.time) self.add_var_opt("output-file",job.outputPath+'/DataProducts/'+oFilename) self.add_var_opt("segment-url",cp.get('findFlags','segment-url')) self.add_var_opt("output-format",cp.get('findFlags','output-format')) self.add_var_opt("window",cp.get('findFlags','window')) #IFO arg string myArgString="" for sngl in coincEvent.sngl_inspiral.itervalues(): myArgString=myArgString+"%s,"%sngl.ifo myArgString=myArgString.rstrip(",") self.add_var_opt("ifo-list",myArgString) if not opts.disable_dag_categories: self.set_category(job.name.lower()) if not opts.no_findVetoes: dag.add_node(self) self.validate() else: self.invalidate() | 130ebf309860df4889826c32bceaee7ddd882e7f /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/130ebf309860df4889826c32bceaee7ddd882e7f/stfu_pipe.py |
cellString=cellString+" %s "%self.linkedRemoteImage(thumbs[ifo][myOmegaIndex], | cellString=cellString+" %s "%self.linkedRemoteImage(thumbs[ifo][myOmegaIndexT], | def insertAnalyzeQscanTable(self, images=None, thumbs=None, indexes=None, imagesAQ=None, thumbsAQ=None, indexesAQ=None, channelRanks=None): """ Insert a multiple IFO table with 5 cols with the AQ underneath this depends on the numer of IFO keys in indexes dictionary. The option channelRanks is not required to change the plot order! Channel ranks is dict similar in shape to other args. Cells are shaded light grey if they are top N channels and that the trigger is greater in value that 0.5. Assuming the channelRanks dict is not empty. """ #channelRanks={'ifo':[[chan,Zvalue,rank]...[chan,Zvalue,rank]],'ifo2':[[ ]]} #Review the keys for Qscans and analyzeQscans. if not images.keys()==thumbs.keys()==indexes.keys(): sys.stderr.write("Error: Keys for Qscan tables creations inconsistent!\n") if not imagesAQ.keys()==thumbsAQ.keys()==indexesAQ.keys(): sys.stderr.write("Error: Keys for Qscan tables creations inconsistent!\n") | 427c4b33ce7a947c2072db62020e1f9a2d395239 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/427c4b33ce7a947c2072db62020e1f9a2d395239/makeCheckListWiki.py |
cellString=cellString+" %s "%self.linkedRemoteImage(thumbsAQ[ifo][myAQIndex], | cellString=cellString+" %s "%self.linkedRemoteImage(thumbsAQ[ifo][myAQIndexT], | def insertAnalyzeQscanTable(self, images=None, thumbs=None, indexes=None, imagesAQ=None, thumbsAQ=None, indexesAQ=None, channelRanks=None): """ Insert a multiple IFO table with 5 cols with the AQ underneath this depends on the numer of IFO keys in indexes dictionary. The option channelRanks is not required to change the plot order! Channel ranks is dict similar in shape to other args. Cells are shaded light grey if they are top N channels and that the trigger is greater in value that 0.5. Assuming the channelRanks dict is not empty. """ #channelRanks={'ifo':[[chan,Zvalue,rank]...[chan,Zvalue,rank]],'ifo2':[[ ]]} #Review the keys for Qscans and analyzeQscans. if not images.keys()==thumbs.keys()==indexes.keys(): sys.stderr.write("Error: Keys for Qscan tables creations inconsistent!\n") if not imagesAQ.keys()==thumbsAQ.keys()==indexesAQ.keys(): sys.stderr.write("Error: Keys for Qscan tables creations inconsistent!\n") | 427c4b33ce7a947c2072db62020e1f9a2d395239 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/427c4b33ce7a947c2072db62020e1f9a2d395239/makeCheckListWiki.py |
if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): | if not "PEM" in myFile.upper() and not "SEI" in myFile.upper(): | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 sys.stdout.write("Available via browser for wiki upload at %s\n"\ %(file2URL.convert(wikiFilename))) # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") wikiPage.putText("UTC Time of trigger :%s"%(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") # imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() filesOmega=wikiFileFinder.get_RDS_R_L1_SEIS() filesAnalyze=wikiFileFinder.get_analyzeQscan_SEIS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() indexDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if "SEI" in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Search for analyzeQscan files timeString=str(float(sngl.time)).replace(".","_") indexDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) #Load of analyzeQscan z file if available zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if "SEI" in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only PEM channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if "PEM" in chan[0] and not "SEI" in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if "PEM" in chan[0] and not "SEI" in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only AUX channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls: indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if not "PEM" in chan[0] or not "SEI" in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if not "PEM" in chan[0] or not "SEI" in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # | c64520f211dcf13695883191b82cf519c4889317 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/c64520f211dcf13695883191b82cf519c4889317/makeCheckListWiki.py |
if not "PEM" in chan[0] or not "SEI" in chan[0]: | if not "PEM" in chan[0] and not "SEI" in chan[0]: | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 sys.stdout.write("Available via browser for wiki upload at %s\n"\ %(file2URL.convert(wikiFilename))) # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") wikiPage.putText("UTC Time of trigger :%s"%(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") # imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() filesOmega=wikiFileFinder.get_RDS_R_L1_SEIS() filesAnalyze=wikiFileFinder.get_analyzeQscan_SEIS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() indexDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if "SEI" in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Search for analyzeQscan files timeString=str(float(sngl.time)).replace(".","_") indexDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) #Load of analyzeQscan z file if available zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if "SEI" in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only PEM channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if "PEM" in chan[0] and not "SEI" in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if "PEM" in chan[0] and not "SEI" in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only AUX channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls: indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if not "PEM" in chan[0] or not "SEI" in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if not "PEM" in chan[0] or not "SEI" in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # | c64520f211dcf13695883191b82cf519c4889317 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/c64520f211dcf13695883191b82cf519c4889317/makeCheckListWiki.py |
if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): | if not "PEM" in myFile.upper() and not "SEI" in myFile.upper(): | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 sys.stdout.write("Available via browser for wiki upload at %s\n"\ %(file2URL.convert(wikiFilename))) # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") wikiPage.putText("UTC Time of trigger :%s"%(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") # imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() filesOmega=wikiFileFinder.get_RDS_R_L1_SEIS() filesAnalyze=wikiFileFinder.get_analyzeQscan_SEIS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() indexDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if "SEI" in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Search for analyzeQscan files timeString=str(float(sngl.time)).replace(".","_") indexDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) #Load of analyzeQscan z file if available zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if "SEI" in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only PEM channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if "PEM" in chan[0] and not "SEI" in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if "PEM" in chan[0] and not "SEI" in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only AUX channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls: indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if not "PEM" in chan[0] or not "SEI" in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if not "PEM" in chan[0] or not "SEI" in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # | c64520f211dcf13695883191b82cf519c4889317 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/c64520f211dcf13695883191b82cf519c4889317/makeCheckListWiki.py |
if not "PEM" in chan[0] or not "SEI" in chan[0]: | if not "PEM" in chan[0] and not "SEI" in chan[0]: | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 sys.stdout.write("Available via browser for wiki upload at %s\n"\ %(file2URL.convert(wikiFilename))) # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") wikiPage.putText("UTC Time of trigger :%s"%(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") # imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() filesOmega=wikiFileFinder.get_RDS_R_L1_SEIS() filesAnalyze=wikiFileFinder.get_analyzeQscan_SEIS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() indexDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if "SEI" in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Search for analyzeQscan files timeString=str(float(sngl.time)).replace(".","_") indexDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) #Load of analyzeQscan z file if available zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if "SEI" in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only PEM channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if "PEM" in chan[0] and not "SEI" in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if "PEM" in chan[0] and not "SEI" in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only AUX channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls: indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if not "PEM" in chan[0] or not "SEI" in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if not "PEM" in chan[0] or not "SEI" in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # | c64520f211dcf13695883191b82cf519c4889317 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/c64520f211dcf13695883191b82cf519c4889317/makeCheckListWiki.py |
import sqlite3 | import sqlite3 | def set_temp_store_directory( connection, temp_store_directory, verbose = False ): """ Sets the temp_store_directory parameter in sqlite. """ try: import sqlite3 except ImportError: # pre 2.5.x from pysqlite2 import dbapi2 as sqlite3 if verbose: print >> sys.stderr, "setting the temp_store_directory to %s" % temp_store_directory connection.cursor().execute('PRAGMA temp_store_directory = "%s"' % temp_store_directory) | 98147701cf04e51d748c843d2db3236a5f201722 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/98147701cf04e51d748c843d2db3236a5f201722/dbtables.py |
from pysqlite2 import dbapi2 as sqlite3 | from pysqlite2 import dbapi2 as sqlite3 | def set_temp_store_directory( connection, temp_store_directory, verbose = False ): """ Sets the temp_store_directory parameter in sqlite. """ try: import sqlite3 except ImportError: # pre 2.5.x from pysqlite2 import dbapi2 as sqlite3 if verbose: print >> sys.stderr, "setting the temp_store_directory to %s" % temp_store_directory connection.cursor().execute('PRAGMA temp_store_directory = "%s"' % temp_store_directory) | 98147701cf04e51d748c843d2db3236a5f201722 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/98147701cf04e51d748c843d2db3236a5f201722/dbtables.py |
print >> sys.stderr, "setting the temp_store_directory to %s" % temp_store_directory | print >> sys.stderr, "setting the temp_store_directory to %s ..." % temp_store_directory | def set_temp_store_directory( connection, temp_store_directory, verbose = False ): """ Sets the temp_store_directory parameter in sqlite. """ try: import sqlite3 except ImportError: # pre 2.5.x from pysqlite2 import dbapi2 as sqlite3 if verbose: print >> sys.stderr, "setting the temp_store_directory to %s" % temp_store_directory connection.cursor().execute('PRAGMA temp_store_directory = "%s"' % temp_store_directory) | 98147701cf04e51d748c843d2db3236a5f201722 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/98147701cf04e51d748c843d2db3236a5f201722/dbtables.py |
flines[:,i]=asin(flines[:,i]) | flines[:,i]=arcsin(flines[:,i]) | def loadDataFile(filename): print filename infile=open(filename,'r') formatstr=infile.readline().lstrip() header=formatstr.split() llines=[] import re dec=re.compile(r'[^\d.-]+') for line in infile: sline=line.split() proceed=True for s in sline: if dec.search(s) is not None: print 'Warning! Ignoring non-numeric data after the header: %s'%(sline) proceed=False if proceed: llines.append(array(map(float,sline))) flines=array(llines) for i in range(0,len(header)): if header[i].lower().find('log')!=-1 and header[i].lower()!='logl': print 'exponentiating %s'%(header[i]) flines[:,i]=exp(flines[:,i]) header[i]=header[i].replace('log','') if header[i].lower().find('sin')!=-1: print 'asining %s'%(header[i]) flines[:,i]=asin(flines[:,i]) header[i]=header[i].replace('sin','') if header[i].lower().find('cos')!=-1: print 'acosing %s'%(header[i]) flines[:,i]=acos(flines[:,i]) header[i]=header[i].replace('cos','') header[i]=header[i].replace('(','') header[i]=header[i].replace(')','') print 'Read columns %s'%(str(header)) return header,flines | f9cf47f36c4b265a40824c9056905324afc8be9c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/f9cf47f36c4b265a40824c9056905324afc8be9c/cbcBayesSkyRes.py |
flines[:,i]=acos(flines[:,i]) | flines[:,i]=arccos(flines[:,i]) | def loadDataFile(filename): print filename infile=open(filename,'r') formatstr=infile.readline().lstrip() header=formatstr.split() llines=[] import re dec=re.compile(r'[^\d.-]+') for line in infile: sline=line.split() proceed=True for s in sline: if dec.search(s) is not None: print 'Warning! Ignoring non-numeric data after the header: %s'%(sline) proceed=False if proceed: llines.append(array(map(float,sline))) flines=array(llines) for i in range(0,len(header)): if header[i].lower().find('log')!=-1 and header[i].lower()!='logl': print 'exponentiating %s'%(header[i]) flines[:,i]=exp(flines[:,i]) header[i]=header[i].replace('log','') if header[i].lower().find('sin')!=-1: print 'asining %s'%(header[i]) flines[:,i]=asin(flines[:,i]) header[i]=header[i].replace('sin','') if header[i].lower().find('cos')!=-1: print 'acosing %s'%(header[i]) flines[:,i]=acos(flines[:,i]) header[i]=header[i].replace('cos','') header[i]=header[i].replace('(','') header[i]=header[i].replace(')','') print 'Read columns %s'%(str(header)) return header,flines | f9cf47f36c4b265a40824c9056905324afc8be9c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/f9cf47f36c4b265a40824c9056905324afc8be9c/cbcBayesSkyRes.py |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.