rem
stringlengths
1
322k
add
stringlengths
0
2.05M
context
stringlengths
4
228k
meta
stringlengths
156
215
self.veto_definer = os.path.basename(definer_file)
def update_veto_lists(self, timeoffset, veto_definer = None):
1e970725389ab6428d527ad9ebb1d1f3b0c5d669 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/1e970725389ab6428d527ad9ebb1d1f3b0c5d669/pylal_exttrig_llutils.py
xmldoc,digest = ligolw_utils.load_fileobj(file)
xmldoc,digest = utils.load_fileobj(file)
def fromsegmentxml(file): """ Read a segmentlist from the file object file containing an xml segment table. """ xmldoc,digest = ligolw_utils.load_fileobj(file) seg_table = table.get_table(xmldoc,lsctables.SegmentTable.tableName) segs = segmentlist() for seg in seg_table: segs.append(segment(seg.start_time,seg.end_time)) segs = segs.coalesce() return segs
dbdcf1eb646745c0d554835728d14901c48bc55f /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/dbdcf1eb646745c0d554835728d14901c48bc55f/dqSegmentUtils.py
def fromsegmentcsvCSV(csvfile):
def fromsegmentcsv(csvfile):
def fromsegmentcsvCSV(csvfile): """ Read a segmentlist from the file object file containing a comma separated list of segments. """ def CSVLineToSeg(line): tstart, tend = map(int, line.split(',')) return segment(tstart, tend) segs = segmentlist([CSVLineToSeg(line) for line in csvfile]) return segs.coalesce()
dbdcf1eb646745c0d554835728d14901c48bc55f /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/dbdcf1eb646745c0d554835728d14901c48bc55f/dqSegmentUtils.py
exe = make_external_call('which ligolw_segment_query')[0] segment_cmd = ' '.join([exe,'--query-segments',\ '--database','--include-segments',flag,\ '--gps-start-time',str(start),\ '--gps-end-time',str(end)]) segxmlout,segerr = make_external_call(segment_cmd) segs = segmentlist() if not segerr: tmpfile = tempfile.TemporaryFile() tmpfile.write(segxmlout) tmpfile.seek(0) segs = fromsegmentxml(tmpfile)
start = int(start) end = int(end) database_location = os.environ['S6_SEGMENT_SERVER'] connection = segmentdb_utils.setup_database(database_location) engine = query_engine.LdbdQueryEngine(connection) spec = flag.split(':') if len(spec) < 2 or len(spec) > 3: print >>sys.stderr, "Included segements must be of the form ifo:name:version or ifo:name:*" sys.exit(1) ifo = spec[0] name = spec[1] if len(spec) is 3 and spec[2] is not '*': version = int(spec[2]) if version < 1: print >>sys.stderr, "Segment version numbers must be greater than zero" sys.exit(1)
def grab_segments(start,end,flag): """ Returns a segmentlist containing the segments during which the given flag was active in the given period. """ exe = make_external_call('which ligolw_segment_query')[0] #== construct segment query segment_cmd = ' '.join([exe,'--query-segments',\ '--database','--include-segments',flag,\ '--gps-start-time',str(start),\ '--gps-end-time',str(end)]) #== run segment query segxmlout,segerr = make_external_call(segment_cmd) segs = segmentlist() if not segerr: tmpfile = tempfile.TemporaryFile() tmpfile.write(segxmlout) tmpfile.seek(0) segs = fromsegmentxml(tmpfile) else: print >>sys.stderr, "Warning: Call to ligolw_segment_query failed with "+\ "command:" print >>sys.stderr, "\n"+segment_cmd+"\n" return segs
dbdcf1eb646745c0d554835728d14901c48bc55f /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/dbdcf1eb646745c0d554835728d14901c48bc55f/dqSegmentUtils.py
print >>sys.stderr, "Warning: Call to ligolw_segment_query failed with "+\ "command:" print >>sys.stderr, "\n"+segment_cmd+"\n"
version = '*' segdefs = segmentdb_utils.expand_version_number(engine,(ifo,name,version,\ start,end,0,0)) segs = segmentdb_utils.query_segments(engine, 'segment', segdefs) segs = reduce(operator.or_, segs).coalesce()
def grab_segments(start,end,flag): """ Returns a segmentlist containing the segments during which the given flag was active in the given period. """ exe = make_external_call('which ligolw_segment_query')[0] #== construct segment query segment_cmd = ' '.join([exe,'--query-segments',\ '--database','--include-segments',flag,\ '--gps-start-time',str(start),\ '--gps-end-time',str(end)]) #== run segment query segxmlout,segerr = make_external_call(segment_cmd) segs = segmentlist() if not segerr: tmpfile = tempfile.TemporaryFile() tmpfile.write(segxmlout) tmpfile.seek(0) segs = fromsegmentxml(tmpfile) else: print >>sys.stderr, "Warning: Call to ligolw_segment_query failed with "+\ "command:" print >>sys.stderr, "\n"+segment_cmd+"\n" return segs
dbdcf1eb646745c0d554835728d14901c48bc55f /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/dbdcf1eb646745c0d554835728d14901c48bc55f/dqSegmentUtils.py
template = """ <profile namespace="dagman" key="priority">%s</profile>\n"""
template = """ <profile namespace="condor" key="priority">%s</profile>\n"""
def write_abstract_dag(self): """ Write all the nodes in the workflow to the DAX file. """ if not self.__dax_file_path: # this workflow is not dax-compatible, so don't write a dax return try: dagfile = open( self.__dax_file_path, 'w' ) except: raise CondorDAGError, "Cannot open file " + self.__dag_file_path
0f1584337feda1ac8b84e91a4ba6c2f71bd58746 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/0f1584337feda1ac8b84e91a4ba6c2f71bd58746/pipeline.py
if ca and os.path.isfile(ca[0].path()):
if ca and os.path.isfile(ca[0].path()):
def add_dq(self, page): page.add_section("DQ", "Data Quality for %s" % (self.coinctime,)) page.sections["DQ"].div("This section gives vetoes and flags that were on")
5a0624e319d26832dd690150b2471e377b4f06d6 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/5a0624e319d26832dd690150b2471e377b4f06d6/followup_page.py
coarsedict = {}
coarsedict = {}
def map_grids(coarsegrid,finegrid,coarseres=4.0): """ takes the two grids (lists of lat/lon tuples) and returns a dictionary where the points in the coarse grid are the keys and lists of tuples of points in the fine grid are the values """ fgtemp = finegrid[:] coarsedict = {} ds = coarseres*pi/180.0 for cpt in coarsegrid: flist = [] for fpt in fgtemp: if (cpt[0]-fpt[0])*(cpt[0]-fpt[0]) <= ds*ds/4.0 and \ (cpt[1]-fpt[1])*(cpt[1]-fpt[1])*sin(cpt[0])*sin(cpt[0]) <= ds*ds/4.0: flist.append(fpt) coarsedict[cpt] = flist for rpt in flist: fgtemp.remove(rpt) return coarsedict, fgtemp
0eff34c0b6040ed802a1751bd751f465d7f00664 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/0eff34c0b6040ed802a1751bd751f465d7f00664/skylocutils.py
if (cpt[0]-fpt[0])*(cpt[0]-fpt[0]) <= ds*ds/4.0 and \ (cpt[1]-fpt[1])*(cpt[1]-fpt[1])*sin(cpt[0])*sin(cpt[0]) <= ds*ds/4.0:
if (cpt[0]-fpt[0])*(cpt[0]-fpt[0]) - ds*ds/4.0 <= epsilon and \ (cpt[1]-fpt[1])*(cpt[1]-fpt[1])*sin(cpt[0])*sin(cpt[0]) \ - ds*ds/4.0 <= epsilon:
def map_grids(coarsegrid,finegrid,coarseres=4.0): """ takes the two grids (lists of lat/lon tuples) and returns a dictionary where the points in the coarse grid are the keys and lists of tuples of points in the fine grid are the values """ fgtemp = finegrid[:] coarsedict = {} ds = coarseres*pi/180.0 for cpt in coarsegrid: flist = [] for fpt in fgtemp: if (cpt[0]-fpt[0])*(cpt[0]-fpt[0]) <= ds*ds/4.0 and \ (cpt[1]-fpt[1])*(cpt[1]-fpt[1])*sin(cpt[0])*sin(cpt[0]) <= ds*ds/4.0: flist.append(fpt) coarsedict[cpt] = flist for rpt in flist: fgtemp.remove(rpt) return coarsedict, fgtemp
0eff34c0b6040ed802a1751bd751f465d7f00664 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/0eff34c0b6040ed802a1751bd751f465d7f00664/skylocutils.py
while os.path.isfile(base_name): base_name = base_name + '_' + str(counter) + ext
while os.path.isfile(name): name = base_name + '_' + str(counter) + ext
def get_unique_filename(name): """ use this to avoid name collisions """ counter = 1 base_name, ext = os.path.splitext(name) while os.path.isfile(base_name): base_name = base_name + '_' + str(counter) + ext counter += 1 return base_name + ext
f13e4b2290215e52c05743948dd99e447825cb46 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/f13e4b2290215e52c05743948dd99e447825cb46/run_skypoints.py
return base_name + ext
return name
def get_unique_filename(name): """ use this to avoid name collisions """ counter = 1 base_name, ext = os.path.splitext(name) while os.path.isfile(base_name): base_name = base_name + '_' + str(counter) + ext counter += 1 return base_name + ext
f13e4b2290215e52c05743948dd99e447825cb46 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/f13e4b2290215e52c05743948dd99e447825cb46/run_skypoints.py
injected_cols.extend(['injected_end_time', 'injected_end_time_ns', 'injected_end_time_utc__Px_click_for_daily_ihope_xP_'])
injected_cols.extend(['injected_decisive_distance','injected_end_time', 'injected_end_time_ns', 'injected_end_time_utc__Px_click_for_daily_ihope_xP_'])
def convert_duration( duration ): return sqlutils.convert_duration( duration, convert_durations )
161e33ab7c95414ed2db862fbd5567595c48ff8f /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/161e33ab7c95414ed2db862fbd5567595c48ff8f/printutils.py
from glue.lal import LIGOTimeGPS
def get_pyvalue(self): return generic_get_pyvalue(self)
161e33ab7c95414ed2db862fbd5567595c48ff8f /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/161e33ab7c95414ed2db862fbd5567595c48ff8f/printutils.py
AND rank(""", decisive_distance, """) <= """, str(limit), """
%s""" % (limit is not None and ''.join(['AND rank(', decisive_distance, ') <= ', str(limit)]) or ''), """
def get_decisive_distance( *args ): return sorted(args)[1]
161e33ab7c95414ed2db862fbd5567595c48ff8f /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/161e33ab7c95414ed2db862fbd5567595c48ff8f/printutils.py
segtest = Popen('FrCheck -i '+frame,shell=True,stdout=PIPE) if os.waitpid(segtest.pid,0)[1]==11: continue
segtest = subprocess.Popen([frcheck,"-i",frame],stdout=subprocess.PIPE) if os.waitpid(segtest.pid,0)[1]==11: print >>sys.stderr, "Warning. Segmentation fault detected with command:" print >>sys.stderr, "FrCheck -i "+frame continue
def grab_data(start,end,channel,type,\ nds=False,verbose=False,dmt=False): """ This function will return the frame data for the given channel of the given type in the given [start,end] time range and will construct a gps time vector to go with it. The nds option is not yet supported, and the dmt option will return data for dmt channels in frames not found by ligo_data_find. >>>grab_data(960000000,960000001,'H1:IFO-SV_STATE_VECTOR','H1_RDS_R_L3') ([960000000.0,960000001.0,960000002.0,960000003.0,960000004.0,960000005.0], [15.0, 14.125, 13.0, 13.0, 13.0, 13.0]) """ time = [] data = [] #== generate framecache if verbose: print >>sys.stdout, "Generating framecache..." sys.stdout.flush() if not dmt: cache = generate_cache(start,end,channel[0:1],type,return_files=True) else: cache = dmt_cache(start,end,channel[0:1],type) #== loop over frames in cache for frame in cache: #== check frame file exists if not os.path.isfile(frame): continue #== check for Segmentation fault segtest = Popen('FrCheck -i '+frame,shell=True,stdout=PIPE) if os.waitpid(segtest.pid,0)[1]==11: continue segtest.stdout.close() #== try to extract data from frame try: frame_data,data_start,_,dt,_,_ = Fr.frgetvect1d(frame,channel) if frame_data==[]: print >>sys.stderr, "No data for "+channel+" in "+frame continue #== construct time array frame_length = float(dt)*len(frame_data) frame_time = data_start+dt*numpy.arange(len(frame_data)) #== discard frame data outside of time span for i in range(len(frame_data)): if frame_time[i] < start: continue if frame_time[i] > end: continue time.append(frame_time[i]) data.append(frame_data[i]) except: print >>sys.stderr, "Failed to access frame:\n"+frame continue return time,data
d6c81a47cb2c69421bd3fd8e5948cd64f1613028 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/d6c81a47cb2c69421bd3fd8e5948cd64f1613028/dqFrameUtils.py
ldf_exe,sourced = GetCommandOutput('which ligo_data_find') if ldf_exe.find('ligo_data_find')==-1: print "Cannot find ligo_data_find. Please ensure, lscsoft is sourced." sys.exit()
def generate_cache(start_time,end_time,ifos,types,return_files=False): """ This function will return a cache of files as found by ligo_data_find, given start and end time, and lists of ifos and types. If the return_files option is given as 'True' the function will return a list of frames with absolute paths, otherwise it will return a frame cache (as used in wpipline, for example). Example: >>>generate_cache(961977615,962582415,R,H) ['H R 961977600 962000000 32 /archive/frames/S6/L0/LHO/H-R-9619' 'H R 962000000 962064032 32 /archive/frames/S6/L0/LHO/H-R-9620'] >>>generate_cache(961977615,962582415,R,H,return_files=True) [/archive/frames/S6/L0/LHO/H-R-9619/H-R-961977600-32.gwf, /archive/frames/S6/L0/LHO/H-R-9619/H-R-961977632-32.gwf, ... /archive/frames/S6/L0/LHO/H-R-9620/H-R-962064000-32.gwf] """ cache = [] #== if given strings, make single-element lists if isinstance(ifos,str): ifos=[ifos] if isinstance(types,str): types=[types] #== loop over each ifo for ifo in ifos: #== loop over each frame type for type in types: ldf_exe,sourced = GetCommandOutput('which ligo_data_find') if ldf_exe.find('ligo_data_find')==-1: print "Cannot find ligo_data_find. Please ensure, lscsoft is sourced." sys.exit() try: data_find_cmd = ldf_exe.replace('\n','')+\ ''' --gps-start-time '''+str(start_time)+\ ''' --gps-end-time '''+str(end_time)+\ ''' --observatory '''+ifo[0:1]+\ ''' --type '''+type+\ ''' --url-type file '''+\ ''' --frame-cache | sort''' #== run ligo_data_find and append each frame to the cache cache_out = Popen(data_find_cmd,shell=True,stdout=PIPE) for line in cache_out.stdout.readlines(): #== if line is not recognised in standard frame cache format, skip if len(line.split(' '))!=6: continue cache.append(line.replace('\n','')) cache_out.stdout.close() except: continue #== if no files: if cache==[]: print >>sys.stderr, "Warning: no frames found." #== if asked for the files, expand the cache if return_files: cache = expand_cache(cache) return cache
d6c81a47cb2c69421bd3fd8e5948cd64f1613028 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/d6c81a47cb2c69421bd3fd8e5948cd64f1613028/dqFrameUtils.py
data_find_cmd = ldf_exe.replace('\n','')+\
data_find_cmd = ldf+\
def generate_cache(start_time,end_time,ifos,types,return_files=False): """ This function will return a cache of files as found by ligo_data_find, given start and end time, and lists of ifos and types. If the return_files option is given as 'True' the function will return a list of frames with absolute paths, otherwise it will return a frame cache (as used in wpipline, for example). Example: >>>generate_cache(961977615,962582415,R,H) ['H R 961977600 962000000 32 /archive/frames/S6/L0/LHO/H-R-9619' 'H R 962000000 962064032 32 /archive/frames/S6/L0/LHO/H-R-9620'] >>>generate_cache(961977615,962582415,R,H,return_files=True) [/archive/frames/S6/L0/LHO/H-R-9619/H-R-961977600-32.gwf, /archive/frames/S6/L0/LHO/H-R-9619/H-R-961977632-32.gwf, ... /archive/frames/S6/L0/LHO/H-R-9620/H-R-962064000-32.gwf] """ cache = [] #== if given strings, make single-element lists if isinstance(ifos,str): ifos=[ifos] if isinstance(types,str): types=[types] #== loop over each ifo for ifo in ifos: #== loop over each frame type for type in types: ldf_exe,sourced = GetCommandOutput('which ligo_data_find') if ldf_exe.find('ligo_data_find')==-1: print "Cannot find ligo_data_find. Please ensure, lscsoft is sourced." sys.exit() try: data_find_cmd = ldf_exe.replace('\n','')+\ ''' --gps-start-time '''+str(start_time)+\ ''' --gps-end-time '''+str(end_time)+\ ''' --observatory '''+ifo[0:1]+\ ''' --type '''+type+\ ''' --url-type file '''+\ ''' --frame-cache | sort''' #== run ligo_data_find and append each frame to the cache cache_out = Popen(data_find_cmd,shell=True,stdout=PIPE) for line in cache_out.stdout.readlines(): #== if line is not recognised in standard frame cache format, skip if len(line.split(' '))!=6: continue cache.append(line.replace('\n','')) cache_out.stdout.close() except: continue #== if no files: if cache==[]: print >>sys.stderr, "Warning: no frames found." #== if asked for the files, expand the cache if return_files: cache = expand_cache(cache) return cache
d6c81a47cb2c69421bd3fd8e5948cd64f1613028 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/d6c81a47cb2c69421bd3fd8e5948cd64f1613028/dqFrameUtils.py
cache_out = Popen(data_find_cmd,shell=True,stdout=PIPE)
cache_out = subprocess.Popen(data_find_cmd,shell=True,\ stdout=subprocess.PIPE)
def generate_cache(start_time,end_time,ifos,types,return_files=False): """ This function will return a cache of files as found by ligo_data_find, given start and end time, and lists of ifos and types. If the return_files option is given as 'True' the function will return a list of frames with absolute paths, otherwise it will return a frame cache (as used in wpipline, for example). Example: >>>generate_cache(961977615,962582415,R,H) ['H R 961977600 962000000 32 /archive/frames/S6/L0/LHO/H-R-9619' 'H R 962000000 962064032 32 /archive/frames/S6/L0/LHO/H-R-9620'] >>>generate_cache(961977615,962582415,R,H,return_files=True) [/archive/frames/S6/L0/LHO/H-R-9619/H-R-961977600-32.gwf, /archive/frames/S6/L0/LHO/H-R-9619/H-R-961977632-32.gwf, ... /archive/frames/S6/L0/LHO/H-R-9620/H-R-962064000-32.gwf] """ cache = [] #== if given strings, make single-element lists if isinstance(ifos,str): ifos=[ifos] if isinstance(types,str): types=[types] #== loop over each ifo for ifo in ifos: #== loop over each frame type for type in types: ldf_exe,sourced = GetCommandOutput('which ligo_data_find') if ldf_exe.find('ligo_data_find')==-1: print "Cannot find ligo_data_find. Please ensure, lscsoft is sourced." sys.exit() try: data_find_cmd = ldf_exe.replace('\n','')+\ ''' --gps-start-time '''+str(start_time)+\ ''' --gps-end-time '''+str(end_time)+\ ''' --observatory '''+ifo[0:1]+\ ''' --type '''+type+\ ''' --url-type file '''+\ ''' --frame-cache | sort''' #== run ligo_data_find and append each frame to the cache cache_out = Popen(data_find_cmd,shell=True,stdout=PIPE) for line in cache_out.stdout.readlines(): #== if line is not recognised in standard frame cache format, skip if len(line.split(' '))!=6: continue cache.append(line.replace('\n','')) cache_out.stdout.close() except: continue #== if no files: if cache==[]: print >>sys.stderr, "Warning: no frames found." #== if asked for the files, expand the cache if return_files: cache = expand_cache(cache) return cache
d6c81a47cb2c69421bd3fd8e5948cd64f1613028 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/d6c81a47cb2c69421bd3fd8e5948cd64f1613028/dqFrameUtils.py
ldf_exe='ligo_data_find' ldf,ldf_status = GetCommandOutput('which '+ldf_exe) if ldf_status != 0: print >>sys.stderr, \ "Error: ligo_data_find not found. Please ensure lscsoftrc is sourced" sys.exit()
p = subprocess.Popen(["which", "ligo_data_find"], stdout=subprocess.PIPE) ldf = p.communicate()[0].replace('\n','') if p.returncode != 0: raise ValueError, "ligo_data_find" p.stdout.close() ldf = os.path.realpath(ldf)
def find_types(types,search='standard'): """ This function will return a valid list of LIGO frame types given the list of type strings. The search option defines the breadth of the search, to speed up the search, the following search options are supported: 'standard','short','full'. The 'R', 'T', and 'M' (raw, raw second trends, and raw minute trends) are treated as special cases, so as not to return all types containing those letters. Example: >>>find_types('H1_RDS') ['H1_RDS_C01_LX', 'H1_RDS_C02_LX', 'H1_RDS_C03_L1', 'H1_RDS_C03_L2', 'H1_RDS_C03_L2_ET', 'H1_RDS_C03_L2_ET2', 'H1_RDS_C03_L2_ET30', 'H1_RDS_C04_LX', 'H1_RDS_R_L1', 'H1_RDS_R_L3', 'H1_RDS_R_L4'] >>>find_types(['H1_RDS','R'],search='short') ['H1_RDS_R_L1', 'H1_RDS_R_L3', 'H1_RDS_R_L4', 'R'] """ #== check for ldf ldf_exe='ligo_data_find' ldf,ldf_status = GetCommandOutput('which '+ldf_exe) if ldf_status != 0: print >>sys.stderr, \ "Error: ligo_data_find not found. Please ensure lscsoftrc is sourced" sys.exit() #== make sure types is a list if types is None: types = [] if isinstance(types,str): types = [types] #== set up search command find_cmd = ldf_exe+" -y | egrep " #== treat 'R','M' and 'T' as special cases, #== so not to grep for all types containing 'R' special_types = ['M','R','T'] special_cases=[] #== set list of ignored strings in `ligo_data_find -y` #== there are thousands of GRBXXXXXX frame types, so ignore them if search!='full': vgrep_list = ['GRB'] if search=='short': #== all of these strings are part of frame types that can be ignored for a #== short search short_ignore_list = ['CAL','BRST','Mon','SG','IMR','DuoTone','Concat',\ 'BH','WNB','Lock','_M','_S5','Multi','Noise','_C0'] vgrep_list.extend(short_ignore_list) #== add each of those ignored strings to a vgrep command find_cmd+="-v '" for vstring in vgrep_list: find_cmd+=vstring+'|' #== take off last '|' find_cmd = find_cmd[0:-1] + "'" #== if given types if types: find_cmd+=' | egrep ' for type in types: #== if type is one of the special cases, save. if type in special_types: special_cases.append(type) #== otherwise add it to the grep command else: if not find_cmd.endswith('|'): find_cmd+= "'" find_cmd+=type+"|" #== take of the extra character if find_cmd[-1]=="|": find_cmd = find_cmd[0:-1] + "'" found_types = [] #== if not searching only for special types, run the grep command if find_cmd != ldf_exe+" -y | egrep '": found_types_out = Popen(find_cmd,shell=True,stdout=PIPE) for line in found_types_out.stdout.readlines(): if line=='\n': continue found_type = line.replace('\n','') found_types.append(found_type) found_types_out.stdout.close() #== append all special cases to the list for type in special_cases: found_types.append(type) if found_types == ['']: print >>sys.stderr, "No data types found, exiting." return found_types
d6c81a47cb2c69421bd3fd8e5948cd64f1613028 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/d6c81a47cb2c69421bd3fd8e5948cd64f1613028/dqFrameUtils.py
find_cmd = ldf_exe+" -y | egrep "
find_cmd = ldf+" -y | egrep "
def find_types(types,search='standard'): """ This function will return a valid list of LIGO frame types given the list of type strings. The search option defines the breadth of the search, to speed up the search, the following search options are supported: 'standard','short','full'. The 'R', 'T', and 'M' (raw, raw second trends, and raw minute trends) are treated as special cases, so as not to return all types containing those letters. Example: >>>find_types('H1_RDS') ['H1_RDS_C01_LX', 'H1_RDS_C02_LX', 'H1_RDS_C03_L1', 'H1_RDS_C03_L2', 'H1_RDS_C03_L2_ET', 'H1_RDS_C03_L2_ET2', 'H1_RDS_C03_L2_ET30', 'H1_RDS_C04_LX', 'H1_RDS_R_L1', 'H1_RDS_R_L3', 'H1_RDS_R_L4'] >>>find_types(['H1_RDS','R'],search='short') ['H1_RDS_R_L1', 'H1_RDS_R_L3', 'H1_RDS_R_L4', 'R'] """ #== check for ldf ldf_exe='ligo_data_find' ldf,ldf_status = GetCommandOutput('which '+ldf_exe) if ldf_status != 0: print >>sys.stderr, \ "Error: ligo_data_find not found. Please ensure lscsoftrc is sourced" sys.exit() #== make sure types is a list if types is None: types = [] if isinstance(types,str): types = [types] #== set up search command find_cmd = ldf_exe+" -y | egrep " #== treat 'R','M' and 'T' as special cases, #== so not to grep for all types containing 'R' special_types = ['M','R','T'] special_cases=[] #== set list of ignored strings in `ligo_data_find -y` #== there are thousands of GRBXXXXXX frame types, so ignore them if search!='full': vgrep_list = ['GRB'] if search=='short': #== all of these strings are part of frame types that can be ignored for a #== short search short_ignore_list = ['CAL','BRST','Mon','SG','IMR','DuoTone','Concat',\ 'BH','WNB','Lock','_M','_S5','Multi','Noise','_C0'] vgrep_list.extend(short_ignore_list) #== add each of those ignored strings to a vgrep command find_cmd+="-v '" for vstring in vgrep_list: find_cmd+=vstring+'|' #== take off last '|' find_cmd = find_cmd[0:-1] + "'" #== if given types if types: find_cmd+=' | egrep ' for type in types: #== if type is one of the special cases, save. if type in special_types: special_cases.append(type) #== otherwise add it to the grep command else: if not find_cmd.endswith('|'): find_cmd+= "'" find_cmd+=type+"|" #== take of the extra character if find_cmd[-1]=="|": find_cmd = find_cmd[0:-1] + "'" found_types = [] #== if not searching only for special types, run the grep command if find_cmd != ldf_exe+" -y | egrep '": found_types_out = Popen(find_cmd,shell=True,stdout=PIPE) for line in found_types_out.stdout.readlines(): if line=='\n': continue found_type = line.replace('\n','') found_types.append(found_type) found_types_out.stdout.close() #== append all special cases to the list for type in special_cases: found_types.append(type) if found_types == ['']: print >>sys.stderr, "No data types found, exiting." return found_types
d6c81a47cb2c69421bd3fd8e5948cd64f1613028 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/d6c81a47cb2c69421bd3fd8e5948cd64f1613028/dqFrameUtils.py
if find_cmd != ldf_exe+" -y | egrep '": found_types_out = Popen(find_cmd,shell=True,stdout=PIPE)
if find_cmd != ldf+" -y | egrep '": found_types_out = subprocess.Popen(find_cmd,shell=True,\ stdout=subprocess.PIPE)
def find_types(types,search='standard'): """ This function will return a valid list of LIGO frame types given the list of type strings. The search option defines the breadth of the search, to speed up the search, the following search options are supported: 'standard','short','full'. The 'R', 'T', and 'M' (raw, raw second trends, and raw minute trends) are treated as special cases, so as not to return all types containing those letters. Example: >>>find_types('H1_RDS') ['H1_RDS_C01_LX', 'H1_RDS_C02_LX', 'H1_RDS_C03_L1', 'H1_RDS_C03_L2', 'H1_RDS_C03_L2_ET', 'H1_RDS_C03_L2_ET2', 'H1_RDS_C03_L2_ET30', 'H1_RDS_C04_LX', 'H1_RDS_R_L1', 'H1_RDS_R_L3', 'H1_RDS_R_L4'] >>>find_types(['H1_RDS','R'],search='short') ['H1_RDS_R_L1', 'H1_RDS_R_L3', 'H1_RDS_R_L4', 'R'] """ #== check for ldf ldf_exe='ligo_data_find' ldf,ldf_status = GetCommandOutput('which '+ldf_exe) if ldf_status != 0: print >>sys.stderr, \ "Error: ligo_data_find not found. Please ensure lscsoftrc is sourced" sys.exit() #== make sure types is a list if types is None: types = [] if isinstance(types,str): types = [types] #== set up search command find_cmd = ldf_exe+" -y | egrep " #== treat 'R','M' and 'T' as special cases, #== so not to grep for all types containing 'R' special_types = ['M','R','T'] special_cases=[] #== set list of ignored strings in `ligo_data_find -y` #== there are thousands of GRBXXXXXX frame types, so ignore them if search!='full': vgrep_list = ['GRB'] if search=='short': #== all of these strings are part of frame types that can be ignored for a #== short search short_ignore_list = ['CAL','BRST','Mon','SG','IMR','DuoTone','Concat',\ 'BH','WNB','Lock','_M','_S5','Multi','Noise','_C0'] vgrep_list.extend(short_ignore_list) #== add each of those ignored strings to a vgrep command find_cmd+="-v '" for vstring in vgrep_list: find_cmd+=vstring+'|' #== take off last '|' find_cmd = find_cmd[0:-1] + "'" #== if given types if types: find_cmd+=' | egrep ' for type in types: #== if type is one of the special cases, save. if type in special_types: special_cases.append(type) #== otherwise add it to the grep command else: if not find_cmd.endswith('|'): find_cmd+= "'" find_cmd+=type+"|" #== take of the extra character if find_cmd[-1]=="|": find_cmd = find_cmd[0:-1] + "'" found_types = [] #== if not searching only for special types, run the grep command if find_cmd != ldf_exe+" -y | egrep '": found_types_out = Popen(find_cmd,shell=True,stdout=PIPE) for line in found_types_out.stdout.readlines(): if line=='\n': continue found_type = line.replace('\n','') found_types.append(found_type) found_types_out.stdout.close() #== append all special cases to the list for type in special_cases: found_types.append(type) if found_types == ['']: print >>sys.stderr, "No data types found, exiting." return found_types
d6c81a47cb2c69421bd3fd8e5948cd64f1613028 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/d6c81a47cb2c69421bd3fd8e5948cd64f1613028/dqFrameUtils.py
ldf_exe='ligo_data_find' ldf_status = GetCommandOutput('which '+ldf_exe)[1] if ldf_status != 0: print >>sys.stderr, "Error: ligo_data_find not found. "+\ "Please ensure lscsoftrc is sourced" sys.exit()
p = subprocess.Popen(["which", "ligo_data_find"], stdout=subprocess.PIPE) ldf = p.communicate()[0].replace('\n','') if p.returncode != 0: raise ValueError, "ligo_data_find" p.stdout.close() ldf = os.path.realpath(ldf)
def find_channels(channels=None,\ types=None,\ ifos=None,\ ex_channels=None,\ ignore=[],\ match=False,\ time=None,\ unique=False,\ verbose=False): """ This function will use FrChannels to return all LIGO data channels matching the given list of 'channels' strings, whilst exluding the 'ex_channels' strings. Using find_ifos() and find_types() in the same module (if required), the search is performed over the given ifos for each given type. Use match=True to restrict the search to find channels that exactly match the given 'channels' list (i.e. not a partial match). Use time=True to search for channels in frame types defined at the given epoch. Use unique=True to return a unique list of channels, parsed using the parse_unique_channels() function, otherwise can return multiple instance of the same name string in different types. Returns a list of dqFrameUtils.Channel instances. Examples: >>>channels = find_channels(channels='DARM',types='H1_RDS_R_L1') >>>for channel in channels: print channel.name,channel.type,channel.sampling H1:LSC-DARM_CTRL H1_RDS_R_L1 16384.0 H1:LSC-DARM_ERR H1_RDS_R_L1 16384.0 H1:LSC-DARM_CTRL_EXC_DAQ H1_RDS_R_L1 16384.0 H1:LSC-DARM_GAIN H1_RDS_R_L1 16.0 >>>channels = find_channels(channels='DARM_ERR',types=['H1_RDS_R_L1','H1_RDS_R_L3']) >>>for channel in channels: print channel.name,channel.type,channel.sampling H1:LSC-DARM_ERR H1_RDS_R_L1 16384.0 H1:LSC-DARM_ERR H1_RDS_R_L3 16384.0 >>>channels = find_channels(channels='DARM_ERR',types=['H1_RDS_R_L1','H1_RDS_R_L3'],unique=True) >>>for channel in channels: print channel.name,channel.type,channel.sampling H1:LSC-DARM_ERR H1_RDS_R_L1 16384.0 """ #== check for ldf ldf_exe='ligo_data_find' ldf_status = GetCommandOutput('which '+ldf_exe)[1] if ldf_status != 0: print >>sys.stderr, "Error: ligo_data_find not found. "+\ "Please ensure lscsoftrc is sourced" sys.exit() #== cannot work with no ifos if ifos is None: ifos = find_ifos(channels,types,ifos) if types is None: types = find_types(types) #== check list status if isinstance(channels,str): channels = [channels] if isinstance(types,str): types = [types] if isinstance(ifos,str): ifos = [ifos] found_channels=[] #== loop over each ifo for ifo in ifos: #== set ligo_data_find frame search time if time is None: time = \ str(GetCommandOutput('tconvert now -2 days')[0]).replace('\n','') if verbose: print_statement = \ "Searching "+str(len(types))+" frame types for: " if channels is not None: for channel in channels: print_statement += channel+', ' print_statement += " in ifo "+ifo else: print_statement+= "all channels, in ifo "+ifo print print_statement for type in types: count=0 #== skip empty frame types or those set for ignorance if type in ignore: continue if type == '': continue if verbose: print >>sys.stdout, " Searching "+str(type)+"...", sys.stdout.flush() #== find first frame file for type frame_cmd = ldf_exe+''' --observatory '''+ifo[0:1]+\ ''' --type='''+type+\ ''' --gps-start-time '''+str(time)+\ ''' --gps-end-time '''+str(time)+\ ''' --url-type file''' frame_out = Popen(frame_cmd,shell=True,stdout=PIPE,stderr=PIPE) frame_status = 0 frame='' for line in frame_out.stdout.readlines(): if line[0:7]=='file://': frame = line break frame_out.stdout.close() frame = frame.replace('\n','') #== if frame is found: if frame_status == 0 and frame != "": info = frame.split(' ') frame = info[-1].replace('file://localhost','') #== get channels contained in frame, grepping for input channel string channel_find_cmd = "FrChannels "+frame+" | grep "+ifo #== add grep options for each included channel if channels is not None: channel_find_cmd += " | egrep '" for channel in channels: channel_find_cmd += channel+"|" channel_find_cmd = channel_find_cmd[0:-1]+"'" #== add grep options for each excluded channel if ex_channels is not None: channel_find_cmd += " | egrep -v '" for ex_channel in ex_channels: channel_find_cmd += ex_channel+"|" channel_find_cmd = channel_find_cmd[0:-1]+"'" #== grab channels try: channel_list_out = Popen(channel_find_cmd,shell=True,stdout=PIPE) for line in channel_list_out.stdout.readlines(): data = line.replace('\n','') name,sampling = data.split(' ') #== if asked for exact match, check: if match: if name not in channels: continue #== generate structure and append to list found_channel = Channel(name,type=type,sampling=sampling) found_channels.append(found_channel) count+=1 sys.stdout.flush() channel_list_out.stdout.close() except: print " Failed to find channels for type "+type+", using the"+\ " following frame\n"+frame continue #== print channel count for data type if verbose: print >>sys.stdout, count,"channels found" if verbose: print >>sys.stdout if unique: found_channels = parse_unique_channels(found_channels) return found_channels
d6c81a47cb2c69421bd3fd8e5948cd64f1613028 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/d6c81a47cb2c69421bd3fd8e5948cd64f1613028/dqFrameUtils.py
frame_cmd = ldf_exe+''' --observatory '''+ifo[0:1]+\
frame_cmd = ldf+''' --observatory '''+ifo[0:1]+\
def find_channels(channels=None,\ types=None,\ ifos=None,\ ex_channels=None,\ ignore=[],\ match=False,\ time=None,\ unique=False,\ verbose=False): """ This function will use FrChannels to return all LIGO data channels matching the given list of 'channels' strings, whilst exluding the 'ex_channels' strings. Using find_ifos() and find_types() in the same module (if required), the search is performed over the given ifos for each given type. Use match=True to restrict the search to find channels that exactly match the given 'channels' list (i.e. not a partial match). Use time=True to search for channels in frame types defined at the given epoch. Use unique=True to return a unique list of channels, parsed using the parse_unique_channels() function, otherwise can return multiple instance of the same name string in different types. Returns a list of dqFrameUtils.Channel instances. Examples: >>>channels = find_channels(channels='DARM',types='H1_RDS_R_L1') >>>for channel in channels: print channel.name,channel.type,channel.sampling H1:LSC-DARM_CTRL H1_RDS_R_L1 16384.0 H1:LSC-DARM_ERR H1_RDS_R_L1 16384.0 H1:LSC-DARM_CTRL_EXC_DAQ H1_RDS_R_L1 16384.0 H1:LSC-DARM_GAIN H1_RDS_R_L1 16.0 >>>channels = find_channels(channels='DARM_ERR',types=['H1_RDS_R_L1','H1_RDS_R_L3']) >>>for channel in channels: print channel.name,channel.type,channel.sampling H1:LSC-DARM_ERR H1_RDS_R_L1 16384.0 H1:LSC-DARM_ERR H1_RDS_R_L3 16384.0 >>>channels = find_channels(channels='DARM_ERR',types=['H1_RDS_R_L1','H1_RDS_R_L3'],unique=True) >>>for channel in channels: print channel.name,channel.type,channel.sampling H1:LSC-DARM_ERR H1_RDS_R_L1 16384.0 """ #== check for ldf ldf_exe='ligo_data_find' ldf_status = GetCommandOutput('which '+ldf_exe)[1] if ldf_status != 0: print >>sys.stderr, "Error: ligo_data_find not found. "+\ "Please ensure lscsoftrc is sourced" sys.exit() #== cannot work with no ifos if ifos is None: ifos = find_ifos(channels,types,ifos) if types is None: types = find_types(types) #== check list status if isinstance(channels,str): channels = [channels] if isinstance(types,str): types = [types] if isinstance(ifos,str): ifos = [ifos] found_channels=[] #== loop over each ifo for ifo in ifos: #== set ligo_data_find frame search time if time is None: time = \ str(GetCommandOutput('tconvert now -2 days')[0]).replace('\n','') if verbose: print_statement = \ "Searching "+str(len(types))+" frame types for: " if channels is not None: for channel in channels: print_statement += channel+', ' print_statement += " in ifo "+ifo else: print_statement+= "all channels, in ifo "+ifo print print_statement for type in types: count=0 #== skip empty frame types or those set for ignorance if type in ignore: continue if type == '': continue if verbose: print >>sys.stdout, " Searching "+str(type)+"...", sys.stdout.flush() #== find first frame file for type frame_cmd = ldf_exe+''' --observatory '''+ifo[0:1]+\ ''' --type='''+type+\ ''' --gps-start-time '''+str(time)+\ ''' --gps-end-time '''+str(time)+\ ''' --url-type file''' frame_out = Popen(frame_cmd,shell=True,stdout=PIPE,stderr=PIPE) frame_status = 0 frame='' for line in frame_out.stdout.readlines(): if line[0:7]=='file://': frame = line break frame_out.stdout.close() frame = frame.replace('\n','') #== if frame is found: if frame_status == 0 and frame != "": info = frame.split(' ') frame = info[-1].replace('file://localhost','') #== get channels contained in frame, grepping for input channel string channel_find_cmd = "FrChannels "+frame+" | grep "+ifo #== add grep options for each included channel if channels is not None: channel_find_cmd += " | egrep '" for channel in channels: channel_find_cmd += channel+"|" channel_find_cmd = channel_find_cmd[0:-1]+"'" #== add grep options for each excluded channel if ex_channels is not None: channel_find_cmd += " | egrep -v '" for ex_channel in ex_channels: channel_find_cmd += ex_channel+"|" channel_find_cmd = channel_find_cmd[0:-1]+"'" #== grab channels try: channel_list_out = Popen(channel_find_cmd,shell=True,stdout=PIPE) for line in channel_list_out.stdout.readlines(): data = line.replace('\n','') name,sampling = data.split(' ') #== if asked for exact match, check: if match: if name not in channels: continue #== generate structure and append to list found_channel = Channel(name,type=type,sampling=sampling) found_channels.append(found_channel) count+=1 sys.stdout.flush() channel_list_out.stdout.close() except: print " Failed to find channels for type "+type+", using the"+\ " following frame\n"+frame continue #== print channel count for data type if verbose: print >>sys.stdout, count,"channels found" if verbose: print >>sys.stdout if unique: found_channels = parse_unique_channels(found_channels) return found_channels
d6c81a47cb2c69421bd3fd8e5948cd64f1613028 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/d6c81a47cb2c69421bd3fd8e5948cd64f1613028/dqFrameUtils.py
frame_out = Popen(frame_cmd,shell=True,stdout=PIPE,stderr=PIPE)
frame_out = subprocess.Popen(frame_cmd,shell=True,stdout=subprocess.PIPE,\ stderr=subprocess.PIPE)
def find_channels(channels=None,\ types=None,\ ifos=None,\ ex_channels=None,\ ignore=[],\ match=False,\ time=None,\ unique=False,\ verbose=False): """ This function will use FrChannels to return all LIGO data channels matching the given list of 'channels' strings, whilst exluding the 'ex_channels' strings. Using find_ifos() and find_types() in the same module (if required), the search is performed over the given ifos for each given type. Use match=True to restrict the search to find channels that exactly match the given 'channels' list (i.e. not a partial match). Use time=True to search for channels in frame types defined at the given epoch. Use unique=True to return a unique list of channels, parsed using the parse_unique_channels() function, otherwise can return multiple instance of the same name string in different types. Returns a list of dqFrameUtils.Channel instances. Examples: >>>channels = find_channels(channels='DARM',types='H1_RDS_R_L1') >>>for channel in channels: print channel.name,channel.type,channel.sampling H1:LSC-DARM_CTRL H1_RDS_R_L1 16384.0 H1:LSC-DARM_ERR H1_RDS_R_L1 16384.0 H1:LSC-DARM_CTRL_EXC_DAQ H1_RDS_R_L1 16384.0 H1:LSC-DARM_GAIN H1_RDS_R_L1 16.0 >>>channels = find_channels(channels='DARM_ERR',types=['H1_RDS_R_L1','H1_RDS_R_L3']) >>>for channel in channels: print channel.name,channel.type,channel.sampling H1:LSC-DARM_ERR H1_RDS_R_L1 16384.0 H1:LSC-DARM_ERR H1_RDS_R_L3 16384.0 >>>channels = find_channels(channels='DARM_ERR',types=['H1_RDS_R_L1','H1_RDS_R_L3'],unique=True) >>>for channel in channels: print channel.name,channel.type,channel.sampling H1:LSC-DARM_ERR H1_RDS_R_L1 16384.0 """ #== check for ldf ldf_exe='ligo_data_find' ldf_status = GetCommandOutput('which '+ldf_exe)[1] if ldf_status != 0: print >>sys.stderr, "Error: ligo_data_find not found. "+\ "Please ensure lscsoftrc is sourced" sys.exit() #== cannot work with no ifos if ifos is None: ifos = find_ifos(channels,types,ifos) if types is None: types = find_types(types) #== check list status if isinstance(channels,str): channels = [channels] if isinstance(types,str): types = [types] if isinstance(ifos,str): ifos = [ifos] found_channels=[] #== loop over each ifo for ifo in ifos: #== set ligo_data_find frame search time if time is None: time = \ str(GetCommandOutput('tconvert now -2 days')[0]).replace('\n','') if verbose: print_statement = \ "Searching "+str(len(types))+" frame types for: " if channels is not None: for channel in channels: print_statement += channel+', ' print_statement += " in ifo "+ifo else: print_statement+= "all channels, in ifo "+ifo print print_statement for type in types: count=0 #== skip empty frame types or those set for ignorance if type in ignore: continue if type == '': continue if verbose: print >>sys.stdout, " Searching "+str(type)+"...", sys.stdout.flush() #== find first frame file for type frame_cmd = ldf_exe+''' --observatory '''+ifo[0:1]+\ ''' --type='''+type+\ ''' --gps-start-time '''+str(time)+\ ''' --gps-end-time '''+str(time)+\ ''' --url-type file''' frame_out = Popen(frame_cmd,shell=True,stdout=PIPE,stderr=PIPE) frame_status = 0 frame='' for line in frame_out.stdout.readlines(): if line[0:7]=='file://': frame = line break frame_out.stdout.close() frame = frame.replace('\n','') #== if frame is found: if frame_status == 0 and frame != "": info = frame.split(' ') frame = info[-1].replace('file://localhost','') #== get channels contained in frame, grepping for input channel string channel_find_cmd = "FrChannels "+frame+" | grep "+ifo #== add grep options for each included channel if channels is not None: channel_find_cmd += " | egrep '" for channel in channels: channel_find_cmd += channel+"|" channel_find_cmd = channel_find_cmd[0:-1]+"'" #== add grep options for each excluded channel if ex_channels is not None: channel_find_cmd += " | egrep -v '" for ex_channel in ex_channels: channel_find_cmd += ex_channel+"|" channel_find_cmd = channel_find_cmd[0:-1]+"'" #== grab channels try: channel_list_out = Popen(channel_find_cmd,shell=True,stdout=PIPE) for line in channel_list_out.stdout.readlines(): data = line.replace('\n','') name,sampling = data.split(' ') #== if asked for exact match, check: if match: if name not in channels: continue #== generate structure and append to list found_channel = Channel(name,type=type,sampling=sampling) found_channels.append(found_channel) count+=1 sys.stdout.flush() channel_list_out.stdout.close() except: print " Failed to find channels for type "+type+", using the"+\ " following frame\n"+frame continue #== print channel count for data type if verbose: print >>sys.stdout, count,"channels found" if verbose: print >>sys.stdout if unique: found_channels = parse_unique_channels(found_channels) return found_channels
d6c81a47cb2c69421bd3fd8e5948cd64f1613028 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/d6c81a47cb2c69421bd3fd8e5948cd64f1613028/dqFrameUtils.py
channel_list_out = Popen(channel_find_cmd,shell=True,stdout=PIPE)
channel_list_out = subprocess.Popen(channel_find_cmd,shell=True,\ stdout=subprocess.PIPE)
def find_channels(channels=None,\ types=None,\ ifos=None,\ ex_channels=None,\ ignore=[],\ match=False,\ time=None,\ unique=False,\ verbose=False): """ This function will use FrChannels to return all LIGO data channels matching the given list of 'channels' strings, whilst exluding the 'ex_channels' strings. Using find_ifos() and find_types() in the same module (if required), the search is performed over the given ifos for each given type. Use match=True to restrict the search to find channels that exactly match the given 'channels' list (i.e. not a partial match). Use time=True to search for channels in frame types defined at the given epoch. Use unique=True to return a unique list of channels, parsed using the parse_unique_channels() function, otherwise can return multiple instance of the same name string in different types. Returns a list of dqFrameUtils.Channel instances. Examples: >>>channels = find_channels(channels='DARM',types='H1_RDS_R_L1') >>>for channel in channels: print channel.name,channel.type,channel.sampling H1:LSC-DARM_CTRL H1_RDS_R_L1 16384.0 H1:LSC-DARM_ERR H1_RDS_R_L1 16384.0 H1:LSC-DARM_CTRL_EXC_DAQ H1_RDS_R_L1 16384.0 H1:LSC-DARM_GAIN H1_RDS_R_L1 16.0 >>>channels = find_channels(channels='DARM_ERR',types=['H1_RDS_R_L1','H1_RDS_R_L3']) >>>for channel in channels: print channel.name,channel.type,channel.sampling H1:LSC-DARM_ERR H1_RDS_R_L1 16384.0 H1:LSC-DARM_ERR H1_RDS_R_L3 16384.0 >>>channels = find_channels(channels='DARM_ERR',types=['H1_RDS_R_L1','H1_RDS_R_L3'],unique=True) >>>for channel in channels: print channel.name,channel.type,channel.sampling H1:LSC-DARM_ERR H1_RDS_R_L1 16384.0 """ #== check for ldf ldf_exe='ligo_data_find' ldf_status = GetCommandOutput('which '+ldf_exe)[1] if ldf_status != 0: print >>sys.stderr, "Error: ligo_data_find not found. "+\ "Please ensure lscsoftrc is sourced" sys.exit() #== cannot work with no ifos if ifos is None: ifos = find_ifos(channels,types,ifos) if types is None: types = find_types(types) #== check list status if isinstance(channels,str): channels = [channels] if isinstance(types,str): types = [types] if isinstance(ifos,str): ifos = [ifos] found_channels=[] #== loop over each ifo for ifo in ifos: #== set ligo_data_find frame search time if time is None: time = \ str(GetCommandOutput('tconvert now -2 days')[0]).replace('\n','') if verbose: print_statement = \ "Searching "+str(len(types))+" frame types for: " if channels is not None: for channel in channels: print_statement += channel+', ' print_statement += " in ifo "+ifo else: print_statement+= "all channels, in ifo "+ifo print print_statement for type in types: count=0 #== skip empty frame types or those set for ignorance if type in ignore: continue if type == '': continue if verbose: print >>sys.stdout, " Searching "+str(type)+"...", sys.stdout.flush() #== find first frame file for type frame_cmd = ldf_exe+''' --observatory '''+ifo[0:1]+\ ''' --type='''+type+\ ''' --gps-start-time '''+str(time)+\ ''' --gps-end-time '''+str(time)+\ ''' --url-type file''' frame_out = Popen(frame_cmd,shell=True,stdout=PIPE,stderr=PIPE) frame_status = 0 frame='' for line in frame_out.stdout.readlines(): if line[0:7]=='file://': frame = line break frame_out.stdout.close() frame = frame.replace('\n','') #== if frame is found: if frame_status == 0 and frame != "": info = frame.split(' ') frame = info[-1].replace('file://localhost','') #== get channels contained in frame, grepping for input channel string channel_find_cmd = "FrChannels "+frame+" | grep "+ifo #== add grep options for each included channel if channels is not None: channel_find_cmd += " | egrep '" for channel in channels: channel_find_cmd += channel+"|" channel_find_cmd = channel_find_cmd[0:-1]+"'" #== add grep options for each excluded channel if ex_channels is not None: channel_find_cmd += " | egrep -v '" for ex_channel in ex_channels: channel_find_cmd += ex_channel+"|" channel_find_cmd = channel_find_cmd[0:-1]+"'" #== grab channels try: channel_list_out = Popen(channel_find_cmd,shell=True,stdout=PIPE) for line in channel_list_out.stdout.readlines(): data = line.replace('\n','') name,sampling = data.split(' ') #== if asked for exact match, check: if match: if name not in channels: continue #== generate structure and append to list found_channel = Channel(name,type=type,sampling=sampling) found_channels.append(found_channel) count+=1 sys.stdout.flush() channel_list_out.stdout.close() except: print " Failed to find channels for type "+type+", using the"+\ " following frame\n"+frame continue #== print channel count for data type if verbose: print >>sys.stdout, count,"channels found" if verbose: print >>sys.stdout if unique: found_channels = parse_unique_channels(found_channels) return found_channels
d6c81a47cb2c69421bd3fd8e5948cd64f1613028 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/d6c81a47cb2c69421bd3fd8e5948cd64f1613028/dqFrameUtils.py
return map(lambda (a,b,c): detector_threshold(min_threshold,ifos,a,b,c,sensitivities), zip(RA,dec,gps_time))
return map(lambda (a,b,c): detector_thresholds(min_threshold,ifos,a,b,c,sensitivities), zip(RA,dec,gps_time))
def detector_thresholds(min_threshold, ifos, RA, dec, gps_time, sensitivities=None): """ Return a dictionary of sensitivity thresholds for each detector, based on a minimum threshold of min_threshold in the least sensitive one, for a source at position (RA,dec) specified in radians at time gps_time. Specifying a dictionary of sensitivities allows one to weight also by the relative SNR of a reference system in each detector to handle different noise curves. """ # Recurse if multiple RA, dec and GPS times are specified if type(gps_time)!=float or type(RA)!=float or type(dec)!=float: assert len(gps_time)==len(RA),len(gps_time)==len(dec) return map(lambda (a,b,c): detector_threshold(min_threshold,ifos,a,b,c,sensitivities), zip(RA,dec,gps_time)) from pylal import antenna # Sensitivies specifies relative SNRs of a reference signal (BNS) if sensitivities is None: sensitivities={} for det in ifos: sensitivies[det]=1.0 else: assert len(ifos)==len(sensitivites) # Normalise sensitivities minsens=min(sensitivities.values()) for det in ifos: sensitivities[det]/=minsens resps={} threshs={} # Make a dictionary of average responses for det in ifos: resps[det]=antenna.response(gps_time,RA,dec,0,0,'radians',det)[2] worst_resp=min(resps.values()) # Assuming that lowest threshold is in worst detector, return thresholds for det in ifos: threshs[det]=min_threshold*(resps[det]/worst_resp)*sensitivities[det] return threshs
7e46b391c8421215f6aee0f9877e5eba4bda2eef /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/7e46b391c8421215f6aee0f9877e5eba4bda2eef/grbsummary.py
if len(indexList) > 1:
if len(indexList) >= 1:
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # # Check to see if wiki file with name already exists # maxCount=0 while os.path.exists(wikiFilename) and maxCount < 10: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) if elems%3 != 0: sys.stdout.write("Generation of FOM links seems incomplete!\n") cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEIS?_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEIS?_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() #Select only PEM channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if myFile.upper().__contains__("PEM"): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if myFile.upper().__contains__("PEM"): thumbDict[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not myFile.upper().__contains__("PEM"): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not myFile.upper().__contains__("PEM"): thumbDict[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(thumbList[k]) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) > 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImaage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #
b3163edbe9cf1f93b8808384b7cd84df9ce5fcf0 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/b3163edbe9cf1f93b8808384b7cd84df9ce5fcf0/makeCheckListWiki.py
cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImaage(image,thumbList[i])
cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i])
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # # Check to see if wiki file with name already exists # maxCount=0 while os.path.exists(wikiFilename) and maxCount < 10: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) if elems%3 != 0: sys.stdout.write("Generation of FOM links seems incomplete!\n") cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEIS?_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEIS?_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() #Select only PEM channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if myFile.upper().__contains__("PEM"): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if myFile.upper().__contains__("PEM"): thumbDict[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not myFile.upper().__contains__("PEM"): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not myFile.upper().__contains__("PEM"): thumbDict[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(thumbList[k]) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) > 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImaage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #
b3163edbe9cf1f93b8808384b7cd84df9ce5fcf0 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/b3163edbe9cf1f93b8808384b7cd84df9ce5fcf0/makeCheckListWiki.py
while True:
while areasize<len(np.asarray(toppoints)[:,3]):
def greedyBin2(posterior_array,par_bins,confidence_levels,par_names=None,injection=None): if par_names: par1_name,par2_name=par_names else: par1_name="Parameter 1" par2_name="Parameter 2" par1pos=posterior_array[:,0] par2pos=posterior_array[:,1] par1_bin,par2_bin=par_bins if injection: par1_injvalue,par2_injvalue=injection twoDGreedyCL={} twoDGreedyInj={} #Create 2D bin array par1pos_min=min(par1pos) par2pos_min=min(par2pos) par1pos_max=max(par1pos) par2pos_max=max(par2pos) par1pos_Nbins= int(ceil((par1pos_max - par1pos_min)/par1_bin))+1 par2pos_Nbins= int(ceil((par2pos_max - par2pos_min)/par2_bin))+1 greedyHist = np.zeros(par1pos_Nbins*par2pos_Nbins,dtype='i8') greedyPoints = np.zeros((par1pos_Nbins*par2pos_Nbins,2)) #Fill bin values par1_point=par1pos_min par2_point=par2pos_min for i in range(par2pos_Nbins): par1_point=par1pos_min for j in range(par1pos_Nbins): greedyPoints[j+par1pos_Nbins*i,0]=par1_point greedyPoints[j+par1pos_Nbins*i,1]=par2_point par1_point+=par1_bin par2_point+=par2_bin injbin=None #if injection point given find which bin its in if injection: if par1_injvalue is not None and par2_injvalue is not None: par1_binNumber=floor((par1_injvalue-par1pos_min)/par1_bin) par2_binNumber=floor((par2_injvalue-par2pos_min)/par2_bin) injbin=int(par1_binNumber+par2_binNumber*par1pos_Nbins) elif par1_injvalue is None and par2_injvalue is not None: print "Injection value not found for %s!"%par1_name elif par1_injvalue is not None and par2_injvalue is None: print "Injection value not found for %s!"%par2_name #Bin posterior samples for par1_samp,par2_samp in zip(par1pos,par2pos): par1_binNumber=floor((par1_samp-par1pos_min)/par1_bin) par2_binNumber=floor((par2_samp-par2pos_min)/par2_bin) greedyHist[par1_binNumber+par2_binNumber*par1pos_Nbins]+=1 #Now call usual confidence level function #print greedyHist,greedyPoints,injbin,sqrt(par1_bin*par2_bin),confidence_levels,len(par1pos) (injectionconfidence,toppoints,reses)=calculateConfidenceLevels(greedyHist,greedyPoints,injbin,float(sqrt(par1_bin*par2_bin)),confidence_levels,int(len(par1pos))) #Print confidence levels to file areastr='' for (frac,area) in reses: areastr+='%s,'%str(area) twoDGreedyCL[str(frac)]=area areastr=areastr.rstrip(',') if injection is not None and injectionconfidence is not None: twoDGreedyInj['confidence']=injectionconfidence #Recover area contained within injection point interval areasize=0 while True: if injectionconfidence<np.asarray(toppoints)[areasize,3]: break areasize+=1 areasize=areasize*par1_bin*par2_bin twoDGreedyInj['area']=areasize return toppoints,injectionconfidence,twoDGreedyCL,twoDGreedyInj
17fcc5ffbe973067680602c29ae70d94df6864f7 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/17fcc5ffbe973067680602c29ae70d94df6864f7/bayespputils.py
tm = date.XLALGPSToUTC(date.LIGOTimeGPS(grb.time))
tm = date.XLALGPSToUTC(LIGOTimeGPS(grb.time))
coldef = create_col(coldict['nolong'])
ba16d4d51c4acdce4300378295193616b0a6adee /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/ba16d4d51c4acdce4300378295193616b0a6adee/pylal_exttrig_llutils.py
os.path.join("bin", "OddsPostProc.py"),
def run(self): # remove the automatically generated user env scripts for script in ["pylal-user-env.sh", "pylal-user-env.csh"]: log.info("removing " + script ) try: os.unlink(os.path.join("etc", script)) except: pass
7416e5478265e1daf9ccdb3c1282d7c388da0107 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/7416e5478265e1daf9ccdb3c1282d7c388da0107/setup.py
raise NotImplemented
raise NotImplementedError
def add_content(self, data, label="_nolabel_"): """ Stub. Replace with a method that appends values or lists of values to self.data_sets and appends labels to self.data_labels. Feel free to accept complicated inputs, but try to store only the raw numbers that will enter the plot. """ raise NotImplemented
42bbef7c1d60a2ed11a378bc8ea6cb5ffffd3807 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/42bbef7c1d60a2ed11a378bc8ea6cb5ffffd3807/plotutils.py
raise NotImplemented
raise NotImplementedError
def finalize(self): """ Stub. Replace with a function that creates and makes your plot pretty. Do not do I/O here. """ raise NotImplemented
42bbef7c1d60a2ed11a378bc8ea6cb5ffffd3807 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/42bbef7c1d60a2ed11a378bc8ea6cb5ffffd3807/plotutils.py
def __init__(self, tag, title="", secnum="1", pagenum="1", level=2):
def __init__(self, tag, title="", secnum="1", pagenum="1", level=2, open_by_default=False):
def __init__(self, tag, title="", secnum="1", pagenum="1", level=2): markup.page.__init__(self, mode="strict_html") self.pagenum = pagenum self.secnum = secnum self._title = title self.sections = {} self.section_ids = [] self.level = level self.tag = tag self.id = tag + self.secnum self.tables = 0 self.add('<div class="contenu"><h%d id="toggle_%s" onclick="javascript:toggle2(\'div_%s\', \'toggle_%s\');"> %s.%s %s </h%d>' % (level, self.id, secnum, self.id, pagenum, secnum, title, level) ) self.div(id="div_"+secnum , style='display:none;')
4a1883f5d99ab45340837694512c34f07e262d9d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/4a1883f5d99ab45340837694512c34f07e262d9d/cbcwebpage.py
self.div(id="div_"+secnum , style='display:none;') def add_section(self, tag, title=""):
if open_by_default: style = 'display:block;' else: style = 'display:none;' self.div(id="div_"+secnum , style=style) def add_section(self, tag, title="", open_by_default=False):
def __init__(self, tag, title="", secnum="1", pagenum="1", level=2): markup.page.__init__(self, mode="strict_html") self.pagenum = pagenum self.secnum = secnum self._title = title self.sections = {} self.section_ids = [] self.level = level self.tag = tag self.id = tag + self.secnum self.tables = 0 self.add('<div class="contenu"><h%d id="toggle_%s" onclick="javascript:toggle2(\'div_%s\', \'toggle_%s\');"> %s.%s %s </h%d>' % (level, self.id, secnum, self.id, pagenum, secnum, title, level) ) self.div(id="div_"+secnum , style='display:none;')
4a1883f5d99ab45340837694512c34f07e262d9d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/4a1883f5d99ab45340837694512c34f07e262d9d/cbcwebpage.py
self.sections[tag] = _section(tag, title=title, secnum=secnum, pagenum=self.pagenum, level=self.level+1)
self.sections[tag] = _section(tag, title=title, secnum=secnum, pagenum=self.pagenum, level=self.level+1, open_by_default=open_by_default)
def add_section(self, tag, title=""): secnum = "%s.%d" % (self.secnum, len(self.sections.values())+1) self.sections[tag] = _section(tag, title=title, secnum=secnum, pagenum=self.pagenum, level=self.level+1) self.section_ids.append([len(self.sections.values()), tag]) return self.sections[tag]
4a1883f5d99ab45340837694512c34f07e262d9d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/4a1883f5d99ab45340837694512c34f07e262d9d/cbcwebpage.py
def add_section(self, tag, title="", level=2):
def add_section(self, tag, title="", level=2, open_by_default=False):
def add_section(self, tag, title="", level=2): """ """ secnum = len(self.sections.values()) + 1 self.section_ids.append([secnum, tag]) self.sections[tag] = _section(title=title, tag=tag, secnum=str(secnum), pagenum=str(self.pagenum), level=level) return self.sections[tag]
4a1883f5d99ab45340837694512c34f07e262d9d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/4a1883f5d99ab45340837694512c34f07e262d9d/cbcwebpage.py
self.sections[tag] = _section(title=title, tag=tag, secnum=str(secnum), pagenum=str(self.pagenum), level=level)
self.sections[tag] = _section(title=title, tag=tag, secnum=str(secnum), pagenum=str(self.pagenum), level=level, open_by_default=open_by_default)
def add_section(self, tag, title="", level=2): """ """ secnum = len(self.sections.values()) + 1 self.section_ids.append([secnum, tag]) self.sections[tag] = _section(title=title, tag=tag, secnum=str(secnum), pagenum=str(self.pagenum), level=level) return self.sections[tag]
4a1883f5d99ab45340837694512c34f07e262d9d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/4a1883f5d99ab45340837694512c34f07e262d9d/cbcwebpage.py
os.path.join("bin", "checkPerformedInjections.py")
os.path.join("bin", "pylal_cbc_select_hardware_injections")
def run(self): # remove the automatically generated user env scripts for script in ["pylal-user-env.sh", "pylal-user-env.csh"]: log.info("removing " + script ) try: os.unlink(os.path.join("etc", script)) except: pass
d4021458e10470bbc2dd3ba9c511a6154a557ccf /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/d4021458e10470bbc2dd3ba9c511a6154a557ccf/setup.py
os.path.join('bin','LSCdataFindcheck'),
def run(self): # remove the automatically generated user env scripts for script in [ 'glue-user-env.sh', 'glue-user-env.csh' ]: log.info( 'removing ' + script ) try: os.unlink(os.path.join('etc',script)) except: pass
eeaee3c5f13d6d10cc61da4e50c863008b069fcb /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/eeaee3c5f13d6d10cc61da4e50c863008b069fcb/setup.py
rate.to_moving_mean_density(binnedarray, filters.get(name, default_filter))
def finish(self, filters = {}, verbose = False): default_filter = rate.gaussian_window(21) # normalizing each array so that its sum is 1 has the # effect of making the integral of P(x) dx equal 1 after # the array is transformed to an array of densities (which # is done by dividing each bin by dx). N = len(self.zero_lag_rates) + len(self.background_rates) + len(self.injection_rates) n = 0 threads = [] for group, (name, binnedarray) in itertools.chain(zip(["zero lag"] * len(self.zero_lag_rates), self.zero_lag_rates.items()), zip(["background"] * len(self.background_rates), self.background_rates.items()), zip(["injections"] * len(self.injection_rates), self.injection_rates.items())): n += 1 if verbose: print >>sys.stderr, "\t%d / %d: %s \"%s\"" % (n, N, group, name) binnedarray.array /= numpy.sum(binnedarray.array) threads.append(threading.Thread(target = rate.to_moving_mean_density, args = (binnedarray, filters.get(name, default_filter)))) threads[-1].start() rate.to_moving_mean_density(binnedarray, filters.get(name, default_filter)) for thread in threads: thread.join() return self
378566a4728fcf70944273ef258cafeac653e076 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/378566a4728fcf70944273ef258cafeac653e076/ligolw_burca_tailor.py
def load_fileobj(fileobj, gz = False, xmldoc = None, contenthandler = None):
def load_fileobj(fileobj, gz = None, xmldoc = None, contenthandler = None):
def load_fileobj(fileobj, gz = False, xmldoc = None, contenthandler = None): """ Parse the contents of the file object fileobj, and return the contents as a LIGO Light Weight document tree. The file object does not need to be seekable. The file is gzip decompressed while reading if gz is set to True. If the optional xmldoc argument is provided and not None, the parsed XML tree will be appended to that document, otherwise a new document will be created. The return value is a tuple, the first element of the tuple is the XML document and the second is a string containing the MD5 digest in hex digits of the bytestream that was parsed. Example: >>> import sys >>> xmldoc, digest = utils.load_fileobj(sys.stdin, verbose = True, gz = True) """ fileobj = MD5File(fileobj) md5obj = fileobj.md5obj if gz: fileobj = gzip.GzipFile(mode = "rb", fileobj = RewindableInputFile(fileobj)) if xmldoc is None: xmldoc = ligolw.Document() if contenthandler is None: if ContentHandler is not __orig_ContentHandler: warnings.warn("modification of glue.ligolw.utils.ContentHandler global variable for input customization is deprecated. Use contenthandler parameter of glue.ligolw.utils.load_*() functions instead", DeprecationWarning) contenthandler = ContentHandler ligolw.make_parser((contenthandler or ContentHandler)(xmldoc)).parse(fileobj) return xmldoc, md5obj.hexdigest()
a6609b0e22abdcb993241fb93f7d83d0e32abf28 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/a6609b0e22abdcb993241fb93f7d83d0e32abf28/__init__.py
does not need to be seekable. The file is gzip decompressed while reading if gz is set to True. If the optional xmldoc argument is provided and not None, the parsed XML tree will be appended to that document, otherwise a new document will be created. The return value is a tuple, the first element of the tuple is the XML document and the second is a string containing the MD5 digest in hex digits of the bytestream that was parsed.
does not need to be seekable. If the gz parameter is None (the default) then gzip compressed data will be automatically detected and decompressed, otherwise decompression can be forced on or off by setting gz to True or False respectively. If the optional xmldoc argument is provided and not None, the parsed XML tree will be appended to that document, otherwise a new document will be created. The return value is a tuple, the first element of the tuple is the XML document and the second is a string containing the MD5 digest in hex digits of the bytestream that was parsed.
def load_fileobj(fileobj, gz = False, xmldoc = None, contenthandler = None): """ Parse the contents of the file object fileobj, and return the contents as a LIGO Light Weight document tree. The file object does not need to be seekable. The file is gzip decompressed while reading if gz is set to True. If the optional xmldoc argument is provided and not None, the parsed XML tree will be appended to that document, otherwise a new document will be created. The return value is a tuple, the first element of the tuple is the XML document and the second is a string containing the MD5 digest in hex digits of the bytestream that was parsed. Example: >>> import sys >>> xmldoc, digest = utils.load_fileobj(sys.stdin, verbose = True, gz = True) """ fileobj = MD5File(fileobj) md5obj = fileobj.md5obj if gz: fileobj = gzip.GzipFile(mode = "rb", fileobj = RewindableInputFile(fileobj)) if xmldoc is None: xmldoc = ligolw.Document() if contenthandler is None: if ContentHandler is not __orig_ContentHandler: warnings.warn("modification of glue.ligolw.utils.ContentHandler global variable for input customization is deprecated. Use contenthandler parameter of glue.ligolw.utils.load_*() functions instead", DeprecationWarning) contenthandler = ContentHandler ligolw.make_parser((contenthandler or ContentHandler)(xmldoc)).parse(fileobj) return xmldoc, md5obj.hexdigest()
a6609b0e22abdcb993241fb93f7d83d0e32abf28 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/a6609b0e22abdcb993241fb93f7d83d0e32abf28/__init__.py
>>> xmldoc, digest = utils.load_fileobj(sys.stdin, verbose = True, gz = True)
>>> xmldoc, digest = utils.load_fileobj(sys.stdin)
def load_fileobj(fileobj, gz = False, xmldoc = None, contenthandler = None): """ Parse the contents of the file object fileobj, and return the contents as a LIGO Light Weight document tree. The file object does not need to be seekable. The file is gzip decompressed while reading if gz is set to True. If the optional xmldoc argument is provided and not None, the parsed XML tree will be appended to that document, otherwise a new document will be created. The return value is a tuple, the first element of the tuple is the XML document and the second is a string containing the MD5 digest in hex digits of the bytestream that was parsed. Example: >>> import sys >>> xmldoc, digest = utils.load_fileobj(sys.stdin, verbose = True, gz = True) """ fileobj = MD5File(fileobj) md5obj = fileobj.md5obj if gz: fileobj = gzip.GzipFile(mode = "rb", fileobj = RewindableInputFile(fileobj)) if xmldoc is None: xmldoc = ligolw.Document() if contenthandler is None: if ContentHandler is not __orig_ContentHandler: warnings.warn("modification of glue.ligolw.utils.ContentHandler global variable for input customization is deprecated. Use contenthandler parameter of glue.ligolw.utils.load_*() functions instead", DeprecationWarning) contenthandler = ContentHandler ligolw.make_parser((contenthandler or ContentHandler)(xmldoc)).parse(fileobj) return xmldoc, md5obj.hexdigest()
a6609b0e22abdcb993241fb93f7d83d0e32abf28 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/a6609b0e22abdcb993241fb93f7d83d0e32abf28/__init__.py
if gz: fileobj = gzip.GzipFile(mode = "rb", fileobj = RewindableInputFile(fileobj))
if gz != False: fileobj = RewindableInputFile(fileobj) magic = fileobj.read(2) fileobj.seek(0, os.SEEK_SET) if gz == True or magic == '\037\213': fileobj = gzip.GzipFile(mode = "rb", fileobj = fileobj)
def load_fileobj(fileobj, gz = False, xmldoc = None, contenthandler = None): """ Parse the contents of the file object fileobj, and return the contents as a LIGO Light Weight document tree. The file object does not need to be seekable. The file is gzip decompressed while reading if gz is set to True. If the optional xmldoc argument is provided and not None, the parsed XML tree will be appended to that document, otherwise a new document will be created. The return value is a tuple, the first element of the tuple is the XML document and the second is a string containing the MD5 digest in hex digits of the bytestream that was parsed. Example: >>> import sys >>> xmldoc, digest = utils.load_fileobj(sys.stdin, verbose = True, gz = True) """ fileobj = MD5File(fileobj) md5obj = fileobj.md5obj if gz: fileobj = gzip.GzipFile(mode = "rb", fileobj = RewindableInputFile(fileobj)) if xmldoc is None: xmldoc = ligolw.Document() if contenthandler is None: if ContentHandler is not __orig_ContentHandler: warnings.warn("modification of glue.ligolw.utils.ContentHandler global variable for input customization is deprecated. Use contenthandler parameter of glue.ligolw.utils.load_*() functions instead", DeprecationWarning) contenthandler = ContentHandler ligolw.make_parser((contenthandler or ContentHandler)(xmldoc)).parse(fileobj) return xmldoc, md5obj.hexdigest()
a6609b0e22abdcb993241fb93f7d83d0e32abf28 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/a6609b0e22abdcb993241fb93f7d83d0e32abf28/__init__.py
def load_filename(filename, verbose = False, gz = False, xmldoc = None, contenthandler = None):
def load_filename(filename, verbose = False, gz = None, xmldoc = None, contenthandler = None):
def load_filename(filename, verbose = False, gz = False, xmldoc = None, contenthandler = None): """ Parse the contents of the file identified by filename, and return the contents as a LIGO Light Weight document tree. Helpful verbosity messages are printed to stderr if verbose is True, and the file is gzip decompressed while reading if gz is set to True. If filename is None, then stdin is parsed. If the optional xmldoc argument is provided and not None, the parsed XML tree will be appended to that document, otherwise a new document will be created. Example: >>> from glue.ligolw import utils >>> xmldoc = utils.load_filename(name, verbose = True, gz = (name or "stdin").endswidth(".gz")) """ if verbose: print >>sys.stderr, "reading %s ..." % (filename and ("'%s'" % filename) or "stdin") if filename is not None: fileobj = file(filename) else: fileobj = sys.stdin xmldoc, hexdigest = load_fileobj(fileobj, gz = gz, xmldoc = xmldoc, contenthandler = contenthandler) if verbose: print >>sys.stderr, "md5sum: %s %s" % (hexdigest, filename or "") return xmldoc
a6609b0e22abdcb993241fb93f7d83d0e32abf28 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/a6609b0e22abdcb993241fb93f7d83d0e32abf28/__init__.py
verbosity messages are printed to stderr if verbose is True, and the file is gzip decompressed while reading if gz is set to True. If filename is None, then stdin is parsed. If the optional xmldoc argument is provided and not None, the parsed XML tree will be appended to that document, otherwise a new document will be created.
verbosity messages are printed to stderr if verbose is True. All other parameters are passed verbatim to load_fileobj(), see that function for more information.
def load_filename(filename, verbose = False, gz = False, xmldoc = None, contenthandler = None): """ Parse the contents of the file identified by filename, and return the contents as a LIGO Light Weight document tree. Helpful verbosity messages are printed to stderr if verbose is True, and the file is gzip decompressed while reading if gz is set to True. If filename is None, then stdin is parsed. If the optional xmldoc argument is provided and not None, the parsed XML tree will be appended to that document, otherwise a new document will be created. Example: >>> from glue.ligolw import utils >>> xmldoc = utils.load_filename(name, verbose = True, gz = (name or "stdin").endswidth(".gz")) """ if verbose: print >>sys.stderr, "reading %s ..." % (filename and ("'%s'" % filename) or "stdin") if filename is not None: fileobj = file(filename) else: fileobj = sys.stdin xmldoc, hexdigest = load_fileobj(fileobj, gz = gz, xmldoc = xmldoc, contenthandler = contenthandler) if verbose: print >>sys.stderr, "md5sum: %s %s" % (hexdigest, filename or "") return xmldoc
a6609b0e22abdcb993241fb93f7d83d0e32abf28 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/a6609b0e22abdcb993241fb93f7d83d0e32abf28/__init__.py
>>> xmldoc = utils.load_filename(name, verbose = True, gz = (name or "stdin").endswidth(".gz"))
>>> xmldoc = utils.load_filename(name, verbose = True)
def load_filename(filename, verbose = False, gz = False, xmldoc = None, contenthandler = None): """ Parse the contents of the file identified by filename, and return the contents as a LIGO Light Weight document tree. Helpful verbosity messages are printed to stderr if verbose is True, and the file is gzip decompressed while reading if gz is set to True. If filename is None, then stdin is parsed. If the optional xmldoc argument is provided and not None, the parsed XML tree will be appended to that document, otherwise a new document will be created. Example: >>> from glue.ligolw import utils >>> xmldoc = utils.load_filename(name, verbose = True, gz = (name or "stdin").endswidth(".gz")) """ if verbose: print >>sys.stderr, "reading %s ..." % (filename and ("'%s'" % filename) or "stdin") if filename is not None: fileobj = file(filename) else: fileobj = sys.stdin xmldoc, hexdigest = load_fileobj(fileobj, gz = gz, xmldoc = xmldoc, contenthandler = contenthandler) if verbose: print >>sys.stderr, "md5sum: %s %s" % (hexdigest, filename or "") return xmldoc
a6609b0e22abdcb993241fb93f7d83d0e32abf28 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/a6609b0e22abdcb993241fb93f7d83d0e32abf28/__init__.py
fileobj = file(filename)
fileobj = open(filename, "rb")
def load_filename(filename, verbose = False, gz = False, xmldoc = None, contenthandler = None): """ Parse the contents of the file identified by filename, and return the contents as a LIGO Light Weight document tree. Helpful verbosity messages are printed to stderr if verbose is True, and the file is gzip decompressed while reading if gz is set to True. If filename is None, then stdin is parsed. If the optional xmldoc argument is provided and not None, the parsed XML tree will be appended to that document, otherwise a new document will be created. Example: >>> from glue.ligolw import utils >>> xmldoc = utils.load_filename(name, verbose = True, gz = (name or "stdin").endswidth(".gz")) """ if verbose: print >>sys.stderr, "reading %s ..." % (filename and ("'%s'" % filename) or "stdin") if filename is not None: fileobj = file(filename) else: fileobj = sys.stdin xmldoc, hexdigest = load_fileobj(fileobj, gz = gz, xmldoc = xmldoc, contenthandler = contenthandler) if verbose: print >>sys.stderr, "md5sum: %s %s" % (hexdigest, filename or "") return xmldoc
a6609b0e22abdcb993241fb93f7d83d0e32abf28 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/a6609b0e22abdcb993241fb93f7d83d0e32abf28/__init__.py
def load_url(url, verbose = False, gz = False, xmldoc = None, contenthandler = None):
def load_url(url, verbose = False, gz = None, xmldoc = None, contenthandler = None):
def load_url(url, verbose = False, gz = False, xmldoc = None, contenthandler = None): """ This function has the same behaviour as load_filename() but accepts a URL instead of a filename. Any source from which Python's urllib2 library can read data is acceptable. stdin is parsed if the URL is None. If the optional xmldoc argument is provided and is not None, the parsed XML tree will be appended to that document, otherwise a new document will be created. Example: >>> from glue.ligolw import utils >>> xmldoc = utils.load_url("file://localhost/tmp/data.xml") """ if verbose: print >>sys.stderr, "reading %s ..." % (url and ("'%s'" % url) or "stdin") if url is not None: scheme, host, path, nul, nul, nul = urlparse.urlparse(url) if scheme.lower() in ("", "file") and host.lower() in ("", "localhost"): fileobj = file(path) else: fileobj = urllib2.urlopen(url) else: fileobj = sys.stdin xmldoc, hexdigest = load_fileobj(fileobj, gz = gz, xmldoc = xmldoc, contenthandler = contenthandler) if verbose: print >>sys.stderr, "md5sum: %s %s" % (hexdigest, url or "") return xmldoc
a6609b0e22abdcb993241fb93f7d83d0e32abf28 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/a6609b0e22abdcb993241fb93f7d83d0e32abf28/__init__.py
the URL is None. If the optional xmldoc argument is provided and is not None, the parsed XML tree will be appended to that document, otherwise a new document will be created.
the URL is None.
def load_url(url, verbose = False, gz = False, xmldoc = None, contenthandler = None): """ This function has the same behaviour as load_filename() but accepts a URL instead of a filename. Any source from which Python's urllib2 library can read data is acceptable. stdin is parsed if the URL is None. If the optional xmldoc argument is provided and is not None, the parsed XML tree will be appended to that document, otherwise a new document will be created. Example: >>> from glue.ligolw import utils >>> xmldoc = utils.load_url("file://localhost/tmp/data.xml") """ if verbose: print >>sys.stderr, "reading %s ..." % (url and ("'%s'" % url) or "stdin") if url is not None: scheme, host, path, nul, nul, nul = urlparse.urlparse(url) if scheme.lower() in ("", "file") and host.lower() in ("", "localhost"): fileobj = file(path) else: fileobj = urllib2.urlopen(url) else: fileobj = sys.stdin xmldoc, hexdigest = load_fileobj(fileobj, gz = gz, xmldoc = xmldoc, contenthandler = contenthandler) if verbose: print >>sys.stderr, "md5sum: %s %s" % (hexdigest, url or "") return xmldoc
a6609b0e22abdcb993241fb93f7d83d0e32abf28 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/a6609b0e22abdcb993241fb93f7d83d0e32abf28/__init__.py
cp.add_section("condor-max-jobs","remoteScan_FG_RDS.sh_FG_RDS_full_data","30") cp.add_section("condor-max-jobs","remoteScan_FG_SEIS_RDS.sh_FG_SEIS_RDS_full_data","30")
cp.set("condor-max-jobs","remoteScan_FG_RDS.sh_FG_RDS_full_data","30") cp.set("condor-max-jobs","remoteScan_FG_SEIS_RDS.sh_FG_SEIS_RDS_full_data","30")
def __init__(self, configfile=None): cp = ConfigParser.ConfigParser() self.cp = cp self.time_now = "_".join([str(i) for i in time_method.gmtime()[0:6]]) self.ini_file=self.time_now + ".ini" home_base = home_dirs() # CONDOR SECTION NEEDED BY THINGS IN INSPIRAL.PY cp.add_section("condor") cp.set("condor","datafind",self.which("ligo_data_find")) cp.set("condor","inspiral",self.which("lalapps_inspiral")) cp.set("condor","chia", self.which("lalapps_coherent_inspiral")) cp.set("condor","universe","standard") # SECTIONS TO SHUT UP WARNINGS cp.add_section("inspiral") cp.add_section("data") # DATAFIND SECTION cp.add_section("datafind")
052ec25d80b675a95dc5fd874cc352cf2aa11f12 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/052ec25d80b675a95dc5fd874cc352cf2aa11f12/stfu_pipe.py
def figure_out_site(ifo): siteDico = {"H1":"LHO","H2":"LHO","L1":"LLO","V1":"Virgo"} if ifo in siteDico: return siteDico[ifo] else: print >> sys.stderr, "ifo " + ifo + "is not defined in siteDico dictionary" sys.exit(1)
def figure_out_site(ifo): siteDico = {"H1":"LHO","H2":"LHO","L1":"LLO","V1":"Virgo"} if ifo in siteDico: return siteDico[ifo] else: print >> sys.stderr, "ifo " + ifo + "is not defined in siteDico dictionary" sys.exit(1)
ec40ee923d9affd6888443d0124fc88d8f5c424b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/ec40ee923d9affd6888443d0124fc88d8f5c424b/stfu_pipe.py
preString = "omega/" + science_run(time).upper() + "/background/" + figure_out_site(ifo)
preString = "omega/" + science_run(time).upper() + "/background"
def __init__(self, dag, job, cp, opts, time, ifo, frame_cache, p_nodes=[], type="ht", variety="fg"): """ """ pipeline.CondorDAGNode.__init__(self,job)
ec40ee923d9affd6888443d0124fc88d8f5c424b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/ec40ee923d9affd6888443d0124fc88d8f5c424b/stfu_pipe.py
preString = "omega/" + science_run(time).upper() + "/background/" + figure_out_site(ifo)
preString = "omega/" + science_run(time).upper() + "/background"
def __init__(self, dag, job, cp, opts, time, ifo, p_nodes=[], type="ht", variety="fg"):
ec40ee923d9affd6888443d0124fc88d8f5c424b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/ec40ee923d9affd6888443d0124fc88d8f5c424b/stfu_pipe.py
return re.sub(r"([+-]?[.0-9]+)[Ee]?([+-]?[0-9]+)", r"\1 \\times 10^{\2}", s)
m, e = floatpattern.match(s).groups() return r"%s \\times 10^{%d}" % (m, int(e))
def latexnumber(s): """ Convert a string of the form "d.dddde-dd" to "d.dddd \times 10^{-dd}" """ return re.sub(r"([+-]?[.0-9]+)[Ee]?([+-]?[0-9]+)", r"\1 \\times 10^{\2}", s)
834cd2d5dfbacb521f80a99119fe428908027602 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/834cd2d5dfbacb521f80a99119fe428908027602/SnglBurstUtils.py
signal during the operation
signal during the operation. Example: >>> try: ... put_connection_filename(filename, working_filename, verbose = True) ... except IOTrappedSignal, e: ... os.kill(os.getpid(), e.signum) ... This example re-transmits the most-recently received signal back to itself following completion of the function call, if a signal was trapped while the function ran.
def set_temp_store_directory(connection, temp_store_directory, verbose = False): """ Sets the temp_store_directory parameter in sqlite. """ if verbose: print >>sys.stderr, "setting the temp_store_directory to %s ..." % temp_store_directory, cursor = connection.cursor() cursor.execute("PRAGMA temp_store_directory = '%s'" % temp_store_directory) cursor.close() if verbose: print >>sys.stderr, "done"
7f8dd863143d760693b9186ed756a001d2756557 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/7f8dd863143d760693b9186ed756a001d2756557/dbtables.py
file(working_filename, "w")
file(working_filename, "w").close()
def newsigterm(signum, frame): global __llwapp_write_filename_got_sig __llwapp_write_filename_got_sig.append(signum)
7f8dd863143d760693b9186ed756a001d2756557 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/7f8dd863143d760693b9186ed756a001d2756557/dbtables.py
raise IOTrappedSignal(__llwapp_write_filename_got_sig.pop())
raise IOTrappedSignal(__llwapp_write_filename_got_sig.pop())
def newsigterm(signum, frame): global __llwapp_write_filename_got_sig __llwapp_write_filename_got_sig.append(signum)
7f8dd863143d760693b9186ed756a001d2756557 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/7f8dd863143d760693b9186ed756a001d2756557/dbtables.py
background_livetime[on_instruments].setdfault(key, 0)
background_livetime[on_instruments].setdefault(key, 0)
def background_livetime_ring_by_slide(connection, live_time_program, seglists, veto_segments, verbose = False): background_livetime = {} instruments = frozenset(seglists.keys()) offset_vectors = db_thinca_rings.get_background_offset_vectors(connection) # first work out time slide live time for on_instruments, livetimes in db_thinca_rings.get_thinca_livetimes(db_thinca_rings.get_thinca_rings_by_available_instruments(connection, program_name = live_time_program), veto_segments, offset_vectors, verbose = verbose).items(): on_instruments = frozenset(on_instruments)#lsctables.ifos_from_instrument_set(on_instruments) for offset, lt in zip(offset_vectors,livetimes): background_livetime.setdefault(on_instruments,{}) key = frozenset(offset.items()) background_livetime[on_instruments].setdfault(key, 0) background_livetime[on_instruments][key] += lt return background_livetime
ce3500bebf02864a1a9953ea5df886bbd259572d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/ce3500bebf02864a1a9953ea5df886bbd259572d/farutils.py
for offset_vector in self.offset_vectors:
for offset_vector in cafepacker.offset_vectors:
def split_bins(cafepacker, extentlimit): """ Split bins of stored in CafePacker until each bin has an extent no longer than extentlimit. """ # # loop overall the bins in cafepacker.bins. we pop items out of # cafepacker.bins and append new ones to the end so need a while loop # checking the extent of each bin in cafepacker.bins until all bins are # done being split # idx = 0 while idx < len(cafepacker.bins): if abs(cafepacker.bins[idx].extent) <= extentlimit: # # bin doesn't need splitting so move to next # idx += 1 continue # # split this bin so pop it out of the list # bigbin = cafepacker.bins.pop(idx) # # calculate the central time of the union of all the input # files in the bin # splittime = lsctables.LIGOTimeGPS(bigbin.extent[0] + (bigbin.extent[1] - bigbin.extent[0])/2) # # split the segmentlistdict at this time # splitseglistdict = segments.segmentlistdict() for key in bigbin.size.keys(): splitseglistdict[key] = segments.segmentlist([segments.segment(-segments.infinity(),splittime)]) # # create bins for the first and second halves # bin1 = LALCacheBin() bin1.size = bigbin.size & splitseglistdict bin1.extent = bigbin.extent & splitseglistdict.values()[0][0] bin2 = LALCacheBin() bin2.size = bigbin.size & ~splitseglistdict bin2.extent = bigbin.extent & (~splitseglistdict.values()[0])[0] # # remove unused keys from the smaller bins' segmentlistdicts # newsize = segments.segmentlistdict() for key in bin1.size.keys(): if len(bin1.size[key]): newsize[key] = bin1.size[key] bin1.size = newsize newsize = segments.segmentlistdict() for key in bin2.size.keys(): if len(bin2.size[key]): newsize[key] = bin2.size[key] bin2.size = newsize # # find which of the objects in bigbin.objects intersect the two # smaller bins # for cache in bigbin.objects: thisseglistdict = cache.to_segmentlistdict() coinc1 = 0 coinc2 = 0 for offset_vector in self.offset_vectors: # # loop over offset vectors updating the smaller # bins and the object we are checking # bin1.size.offsets.update(offset_vector) bin2.size.offsets.update(offset_vector) thisseglistdict.offsets.update(offset_vector) if not coinc1 and bin1.size.is_coincident(thisseglistdict, keys = offset_vector.keys()): # # object is coicident with bin1 # coinc1 = 1 bin1.objects.append(cache) if not coinc2 and bin2.size.is_coincident(thisseglistdict, keys = offset_vector.keys()): # # object is coincident with bin2 # coinc2 = 1 bin2.objects.append(cache) # # end loop if known to be coincident with both # bins # if coinc1 and coinc2: break # # clear offsets applied to object # thisseglistdict.offsets.clear() # # clear offsets applied to bins # bin1.size.offsets.clear() bin2.size.offsets.clear() # # append smaller bins to list of bins # cafepacker.bins.append(bin1) cafepacker.bins.append(bin2) # # do not increment idx as we popped the large bin out of # cafepacker.bins # # # sort the bins in cafepacker # cafepacker.bins.sort() return cafepacker
c783c0ac3f8680b2c401e5cffc416b579389f0a8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/c783c0ac3f8680b2c401e5cffc416b579389f0a8/ligolw_cafe.py
frametype=__patchFrameTypeDef__(frametype,sngl.ifo,sngl.time)
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 sys.stdout.write("Available via browser for wiki upload at %s\n"\ %(file2URL.convert(wikiFilename))) # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") wikiPage.putText("UTC Time of trigger :%s"%(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') frametype=__patchFrameTypeDef__(frametype,sngl.ifo,sngl.time) indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") # imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() filesOmega=wikiFileFinder.get_RDS_R_L1_SEIS() filesAnalyze=wikiFileFinder.get_analyzeQscan_SEIS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') frametype=__patchFrameTypeDef__(frametype,sngl.ifo,sngl.time) if sngl.ifo == "V1": chankey = "Em_SE" else: chankey = "SEI" indexDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*%s*_512.00_spectrogram_whitened.png"%\ (frametype,sngl.time,chankey)) thumbDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*%s*_512.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time,chankey)) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s_*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if chankey in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not found or seen empty for %s. ...continuing...\n"%sngl.ifo) #Search for analyzeQscan files timeString=str(float(sngl.time)).replace(".","_") indexDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_%s*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString,chankey)) thumbDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_%s*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString,chankey)) #Load of analyzeQscan z file if available zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if chankey in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only PEM channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') frametype=__patchFrameTypeDef__(frametype,sngl.ifo,sngl.time) if sngl.ifo == "V1": chankeyseis = "Em_SE" chankeyenv = "Em_" else: chankeyseis = "SEI" chankeyenv = "PEM" for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*html"%(frametype,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened.png"%\ (frametype,sngl.time)): if chankeyenv in myFile and not chankeyseis in myFile: imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time)): if chankeyenv in myFile and not chankeyseis in myFile: thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not found or seen empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if chankeyenv in myFile and not chankeyseis in myFile: imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if chankeyenv in myFile and not chankeyseis in myFile: thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only AUX channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') frametype=__patchFrameTypeDef__(frametype,sngl.ifo,sngl.time) if sngl.ifo == "V1": chankeyseis = "Em_SE" chankeyenv = "Em_" else: chankeyseis = "SEI" chankeyenv = "PEM" indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*html"%(frametype,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened.png"%\ (frametype,sngl.time)): if not chankeyenv in myFile or not chankeyseis in myFile: imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time)): if not chankeyenv in myFile and not chankeyseis in myFile: thumbDict[sngl.ifo].append(myFile) zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if not chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not found or seen empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not chankeyenv in myFile or not chankeyseis in myFile: imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not chankeyenv in myFile and not chankeyseis in myFile: thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if not chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #
e9e5a10e9706a2981c26188d2ab4026bcf14d7e1 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/e9e5a10e9706a2981c26188d2ab4026bcf14d7e1/makeCheckListWiki.py
stdin, out, err = os.popen3(command) pid, status = os.wait() if status != 0:
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=isinstance(command, str)) out, err = p.communicate() if p.returncode != 0:
def make_external_call(command, show_stdout=False, show_command=False): """ Run a program on the shell and print informative messages on failure. """ if show_command: print command stdin, out, err = os.popen3(command) pid, status = os.wait() if status != 0: print >>sys.stderr, "External call failed." print >>sys.stderr, " status: %d" % status print >>sys.stderr, " stdout: %s" % out.read() print >>sys.stderr, " stderr: %s" % err.read() print >>sys.stderr, " command: %s" % command sys.exit(status) if show_stdout: print out.read() stdin.close() out.close() err.close()
79ded5e1e800a978bb85c465f48b4b83a8274ddb /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/79ded5e1e800a978bb85c465f48b4b83a8274ddb/inspiralutils.py
print >>sys.stderr, " status: %d" % status print >>sys.stderr, " stdout: %s" % out.read() print >>sys.stderr, " stderr: %s" % err.read() print >>sys.stderr, " command: %s" % command sys.exit(status)
print >>sys.stderr, " stdout: %s" % out print >>sys.stderr, " stderr: %s" % err raise subprocess.CalledProcessError(p.returncode, command)
def make_external_call(command, show_stdout=False, show_command=False): """ Run a program on the shell and print informative messages on failure. """ if show_command: print command stdin, out, err = os.popen3(command) pid, status = os.wait() if status != 0: print >>sys.stderr, "External call failed." print >>sys.stderr, " status: %d" % status print >>sys.stderr, " stdout: %s" % out.read() print >>sys.stderr, " stderr: %s" % err.read() print >>sys.stderr, " command: %s" % command sys.exit(status) if show_stdout: print out.read() stdin.close() out.close() err.close()
79ded5e1e800a978bb85c465f48b4b83a8274ddb /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/79ded5e1e800a978bb85c465f48b4b83a8274ddb/inspiralutils.py
print out.read() stdin.close() out.close() err.close()
print out
def make_external_call(command, show_stdout=False, show_command=False): """ Run a program on the shell and print informative messages on failure. """ if show_command: print command stdin, out, err = os.popen3(command) pid, status = os.wait() if status != 0: print >>sys.stderr, "External call failed." print >>sys.stderr, " status: %d" % status print >>sys.stderr, " stdout: %s" % out.read() print >>sys.stderr, " stderr: %s" % err.read() print >>sys.stderr, " command: %s" % command sys.exit(status) if show_stdout: print out.read() stdin.close() out.close() err.close()
79ded5e1e800a978bb85c465f48b4b83a8274ddb /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/79ded5e1e800a978bb85c465f48b4b83a8274ddb/inspiralutils.py
def gridsky(resolution):
def gridsky(resolution,shifted=False):
def gridsky(resolution): """ grid the sky up into roughly square regions resolution is the length of a side the points get placed at the center of the squares and to first order each square has an area of resolution^2 """ latitude = 0.0 longitude = pi ds = pi*resolution/180.0 points = [(latitude-0.5*pi, longitude)] while latitude <= pi: latitude += ds longitude = 0.0 points.append((latitude-0.5*pi, longitude)) while longitude <= 2.0*pi: longitude += ds / abs(sin(latitude)) points.append((latitude-0.5*pi, longitude)) #there's some slop so get rid of it and only focus on points on the sphere sphpts = [] for pt in points: if pt[0] > pi/2 or pt[0] < -pi/2 \ or pt[1] > 2*pi or pt[1] < 0: pass else: sphpts.append(pt) return sphpts
f92b8a09994e6eca850ba21adf1061202a15b274 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/f92b8a09994e6eca850ba21adf1061202a15b274/skylocutils.py
longitude = pi
longitude = 0.0
def gridsky(resolution): """ grid the sky up into roughly square regions resolution is the length of a side the points get placed at the center of the squares and to first order each square has an area of resolution^2 """ latitude = 0.0 longitude = pi ds = pi*resolution/180.0 points = [(latitude-0.5*pi, longitude)] while latitude <= pi: latitude += ds longitude = 0.0 points.append((latitude-0.5*pi, longitude)) while longitude <= 2.0*pi: longitude += ds / abs(sin(latitude)) points.append((latitude-0.5*pi, longitude)) #there's some slop so get rid of it and only focus on points on the sphere sphpts = [] for pt in points: if pt[0] > pi/2 or pt[0] < -pi/2 \ or pt[1] > 2*pi or pt[1] < 0: pass else: sphpts.append(pt) return sphpts
f92b8a09994e6eca850ba21adf1061202a15b274 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/f92b8a09994e6eca850ba21adf1061202a15b274/skylocutils.py
points = [(latitude-0.5*pi, longitude)]
points = [] if shifted: dlat = 0.0 else: dlat = 0.5*pi
def gridsky(resolution): """ grid the sky up into roughly square regions resolution is the length of a side the points get placed at the center of the squares and to first order each square has an area of resolution^2 """ latitude = 0.0 longitude = pi ds = pi*resolution/180.0 points = [(latitude-0.5*pi, longitude)] while latitude <= pi: latitude += ds longitude = 0.0 points.append((latitude-0.5*pi, longitude)) while longitude <= 2.0*pi: longitude += ds / abs(sin(latitude)) points.append((latitude-0.5*pi, longitude)) #there's some slop so get rid of it and only focus on points on the sphere sphpts = [] for pt in points: if pt[0] > pi/2 or pt[0] < -pi/2 \ or pt[1] > 2*pi or pt[1] < 0: pass else: sphpts.append(pt) return sphpts
f92b8a09994e6eca850ba21adf1061202a15b274 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/f92b8a09994e6eca850ba21adf1061202a15b274/skylocutils.py
points.append((latitude-0.5*pi, longitude))
points.append((latitude-dlat, longitude))
def gridsky(resolution): """ grid the sky up into roughly square regions resolution is the length of a side the points get placed at the center of the squares and to first order each square has an area of resolution^2 """ latitude = 0.0 longitude = pi ds = pi*resolution/180.0 points = [(latitude-0.5*pi, longitude)] while latitude <= pi: latitude += ds longitude = 0.0 points.append((latitude-0.5*pi, longitude)) while longitude <= 2.0*pi: longitude += ds / abs(sin(latitude)) points.append((latitude-0.5*pi, longitude)) #there's some slop so get rid of it and only focus on points on the sphere sphpts = [] for pt in points: if pt[0] > pi/2 or pt[0] < -pi/2 \ or pt[1] > 2*pi or pt[1] < 0: pass else: sphpts.append(pt) return sphpts
f92b8a09994e6eca850ba21adf1061202a15b274 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/f92b8a09994e6eca850ba21adf1061202a15b274/skylocutils.py
points.append((latitude-0.5*pi, longitude))
points.append((latitude-dlat, longitude)) points.append((0.0-dlat,0.0))
def gridsky(resolution): """ grid the sky up into roughly square regions resolution is the length of a side the points get placed at the center of the squares and to first order each square has an area of resolution^2 """ latitude = 0.0 longitude = pi ds = pi*resolution/180.0 points = [(latitude-0.5*pi, longitude)] while latitude <= pi: latitude += ds longitude = 0.0 points.append((latitude-0.5*pi, longitude)) while longitude <= 2.0*pi: longitude += ds / abs(sin(latitude)) points.append((latitude-0.5*pi, longitude)) #there's some slop so get rid of it and only focus on points on the sphere sphpts = [] for pt in points: if pt[0] > pi/2 or pt[0] < -pi/2 \ or pt[1] > 2*pi or pt[1] < 0: pass else: sphpts.append(pt) return sphpts
f92b8a09994e6eca850ba21adf1061202a15b274 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/f92b8a09994e6eca850ba21adf1061202a15b274/skylocutils.py
if pt[0] > pi/2 or pt[0] < -pi/2 \ or pt[1] > 2*pi or pt[1] < 0:
if pt[0] > latmax or pt[0] < latmin or pt[1] > 2*pi or pt[1] < 0.0:
def gridsky(resolution): """ grid the sky up into roughly square regions resolution is the length of a side the points get placed at the center of the squares and to first order each square has an area of resolution^2 """ latitude = 0.0 longitude = pi ds = pi*resolution/180.0 points = [(latitude-0.5*pi, longitude)] while latitude <= pi: latitude += ds longitude = 0.0 points.append((latitude-0.5*pi, longitude)) while longitude <= 2.0*pi: longitude += ds / abs(sin(latitude)) points.append((latitude-0.5*pi, longitude)) #there's some slop so get rid of it and only focus on points on the sphere sphpts = [] for pt in points: if pt[0] > pi/2 or pt[0] < -pi/2 \ or pt[1] > 2*pi or pt[1] < 0: pass else: sphpts.append(pt) return sphpts
f92b8a09994e6eca850ba21adf1061202a15b274 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/f92b8a09994e6eca850ba21adf1061202a15b274/skylocutils.py
takes the two grids (lists of lat/lon tuples) and returns a dictionary
takes the two grids (lists of lat/lon tuples) and returns (1) a dictionary
def map_grids(coarsegrid,finegrid,coarseres=4.0): """ takes the two grids (lists of lat/lon tuples) and returns a dictionary where the points in the coarse grid are the keys and lists of tuples of points in the fine grid are the values """ fgtemp = finegrid[:] coarsedict = {} ds = coarseres*pi/180.0 for cpt in coarsegrid: flist = [] for fpt in fgtemp: if (cpt[0]-fpt[0])*(cpt[0]-fpt[0]) <= ds*ds/4.0 and \ (cpt[1]-fpt[1])*(cpt[1]-fpt[1])*sin(cpt[0])*sin(cpt[0]) \ <= ds*ds/4.0: flist.append(fpt) coarsedict[cpt] = flist for rpt in flist: fgtemp.remove(rpt) return coarsedict, fgtemp
f92b8a09994e6eca850ba21adf1061202a15b274 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/f92b8a09994e6eca850ba21adf1061202a15b274/skylocutils.py
points in the fine grid are the values
points in the fine grid are the values and (2) (for debugging purposes) returns a list of points in the fine grid that didn't make it NB: *** should work alright if the resolution isn't too coarse (because it uses the infinitesimal form of the metric); 5 is at the upper end of ok *** there is a fudge factor (epsilon) in the distance computation to help account for not using the integrated distance and to help with floating point comparisons
def map_grids(coarsegrid,finegrid,coarseres=4.0): """ takes the two grids (lists of lat/lon tuples) and returns a dictionary where the points in the coarse grid are the keys and lists of tuples of points in the fine grid are the values """ fgtemp = finegrid[:] coarsedict = {} ds = coarseres*pi/180.0 for cpt in coarsegrid: flist = [] for fpt in fgtemp: if (cpt[0]-fpt[0])*(cpt[0]-fpt[0]) <= ds*ds/4.0 and \ (cpt[1]-fpt[1])*(cpt[1]-fpt[1])*sin(cpt[0])*sin(cpt[0]) \ <= ds*ds/4.0: flist.append(fpt) coarsedict[cpt] = flist for rpt in flist: fgtemp.remove(rpt) return coarsedict, fgtemp
f92b8a09994e6eca850ba21adf1061202a15b274 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/f92b8a09994e6eca850ba21adf1061202a15b274/skylocutils.py
if (cpt[0]-fpt[0])*(cpt[0]-fpt[0]) <= ds*ds/4.0 and \ (cpt[1]-fpt[1])*(cpt[1]-fpt[1])*sin(cpt[0])*sin(cpt[0]) \ <= ds*ds/4.0:
if (cpt[0]-fpt[0])*(cpt[0]-fpt[0])-ds*ds/4.0 <= epsilon and \ (cpt[1]-fpt[1])*(cpt[1]-fpt[1])*sin(cpt[0])*sin(cpt[0])-ds*ds/4.0 <= epsilon:
def map_grids(coarsegrid,finegrid,coarseres=4.0): """ takes the two grids (lists of lat/lon tuples) and returns a dictionary where the points in the coarse grid are the keys and lists of tuples of points in the fine grid are the values """ fgtemp = finegrid[:] coarsedict = {} ds = coarseres*pi/180.0 for cpt in coarsegrid: flist = [] for fpt in fgtemp: if (cpt[0]-fpt[0])*(cpt[0]-fpt[0]) <= ds*ds/4.0 and \ (cpt[1]-fpt[1])*(cpt[1]-fpt[1])*sin(cpt[0])*sin(cpt[0]) \ <= ds*ds/4.0: flist.append(fpt) coarsedict[cpt] = flist for rpt in flist: fgtemp.remove(rpt) return coarsedict, fgtemp
f92b8a09994e6eca850ba21adf1061202a15b274 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/f92b8a09994e6eca850ba21adf1061202a15b274/skylocutils.py
first_col = [pt for pt in coarsegrid if pt[1] == 0.0] for cpt in first_col: flist = [] for fpt in fgtemp: if (cpt[0]-fpt[0])*(cpt[0]-fpt[0])-ds*ds/4.0 <= epsilon and \ (2*pi-fpt[1])*(2*pi-fpt[1])*sin(cpt[0])*sin(cpt[0])-ds*ds/4.0 <= epsilon: coarsedict[cpt].append(fpt) flist.append(fpt) for rpt in flist: fgtemp.remove(rpt)
def map_grids(coarsegrid,finegrid,coarseres=4.0): """ takes the two grids (lists of lat/lon tuples) and returns a dictionary where the points in the coarse grid are the keys and lists of tuples of points in the fine grid are the values """ fgtemp = finegrid[:] coarsedict = {} ds = coarseres*pi/180.0 for cpt in coarsegrid: flist = [] for fpt in fgtemp: if (cpt[0]-fpt[0])*(cpt[0]-fpt[0]) <= ds*ds/4.0 and \ (cpt[1]-fpt[1])*(cpt[1]-fpt[1])*sin(cpt[0])*sin(cpt[0]) \ <= ds*ds/4.0: flist.append(fpt) coarsedict[cpt] = flist for rpt in flist: fgtemp.remove(rpt) return coarsedict, fgtemp
f92b8a09994e6eca850ba21adf1061202a15b274 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/f92b8a09994e6eca850ba21adf1061202a15b274/skylocutils.py
" --segment_url https://segdb.ligo.caltech.edu"+\
" --segment-url https://segdb.ligo.caltech.edu"+\
def grab_segments(start,end,flag): """ Returns a segmentlist containing the segments during which the given flag was active in the given period. """ #== construct segment query segment_cmd = "ligolw_segment_query --query-segments"+\ " --segment_url https://segdb.ligo.caltech.edu"+\ " --include-segments "+flag+\ " --gps-start-time "+str(start)+\ " --gps-end-time "+str(end)+\ ''' | ligolw_print -t segment -c start_time -c end_time --delimiter " "''' #== run segment query segs = GetCommandOutput(segment_cmd) #== construct segments as structure seglist=[] segs=segs.split('\n') for seg in segs: if seg=='': continue try: [seg_start,seg_end]=seg.split(' ') seglist.append(segment(int(seg_start),int(seg_end))) except: continue seglist = segmentlist([seglist]) return seglist
3ff03be0c8cc77ecae9200efb1d883d6c9166fdc /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/3ff03be0c8cc77ecae9200efb1d883d6c9166fdc/dqSegmentUtils.py
" --gps-start-time "+str(start)+\ " --gps-end-time "+str(end)+\
" --gps-start-time "+str(int(start))+\ " --gps-end-time "+str(int(end))+\
def grab_segments(start,end,flag): """ Returns a segmentlist containing the segments during which the given flag was active in the given period. """ #== construct segment query segment_cmd = "ligolw_segment_query --query-segments"+\ " --segment_url https://segdb.ligo.caltech.edu"+\ " --include-segments "+flag+\ " --gps-start-time "+str(start)+\ " --gps-end-time "+str(end)+\ ''' | ligolw_print -t segment -c start_time -c end_time --delimiter " "''' #== run segment query segs = GetCommandOutput(segment_cmd) #== construct segments as structure seglist=[] segs=segs.split('\n') for seg in segs: if seg=='': continue try: [seg_start,seg_end]=seg.split(' ') seglist.append(segment(int(seg_start),int(seg_end))) except: continue seglist = segmentlist([seglist]) return seglist
3ff03be0c8cc77ecae9200efb1d883d6c9166fdc /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/3ff03be0c8cc77ecae9200efb1d883d6c9166fdc/dqSegmentUtils.py
segs = GetCommandOutput(segment_cmd)
segs,status = GetCommandOutput(segment_cmd)
def grab_segments(start,end,flag): """ Returns a segmentlist containing the segments during which the given flag was active in the given period. """ #== construct segment query segment_cmd = "ligolw_segment_query --query-segments"+\ " --segment_url https://segdb.ligo.caltech.edu"+\ " --include-segments "+flag+\ " --gps-start-time "+str(start)+\ " --gps-end-time "+str(end)+\ ''' | ligolw_print -t segment -c start_time -c end_time --delimiter " "''' #== run segment query segs = GetCommandOutput(segment_cmd) #== construct segments as structure seglist=[] segs=segs.split('\n') for seg in segs: if seg=='': continue try: [seg_start,seg_end]=seg.split(' ') seglist.append(segment(int(seg_start),int(seg_end))) except: continue seglist = segmentlist([seglist]) return seglist
3ff03be0c8cc77ecae9200efb1d883d6c9166fdc /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/3ff03be0c8cc77ecae9200efb1d883d6c9166fdc/dqSegmentUtils.py
segs=segs.split('\n') for seg in segs: if seg=='': continue try: [seg_start,seg_end]=seg.split(' ') seglist.append(segment(int(seg_start),int(seg_end))) except: continue
if status==0: segs=segs.split('\n') for seg in segs: if seg=='': continue try: [seg_start,seg_end]=seg.split(' ') seglist.append(segment(int(seg_start),int(seg_end))) except: continue seglist = segmentlist(seglist) else: print >>sys.stderr, "Warning: Call to ligolw_segment_query failed with "+\ "command:" print >>sys.stderr, "\n"+segment_cmd+"\n"
def grab_segments(start,end,flag): """ Returns a segmentlist containing the segments during which the given flag was active in the given period. """ #== construct segment query segment_cmd = "ligolw_segment_query --query-segments"+\ " --segment_url https://segdb.ligo.caltech.edu"+\ " --include-segments "+flag+\ " --gps-start-time "+str(start)+\ " --gps-end-time "+str(end)+\ ''' | ligolw_print -t segment -c start_time -c end_time --delimiter " "''' #== run segment query segs = GetCommandOutput(segment_cmd) #== construct segments as structure seglist=[] segs=segs.split('\n') for seg in segs: if seg=='': continue try: [seg_start,seg_end]=seg.split(' ') seglist.append(segment(int(seg_start),int(seg_end))) except: continue seglist = segmentlist([seglist]) return seglist
3ff03be0c8cc77ecae9200efb1d883d6c9166fdc /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/3ff03be0c8cc77ecae9200efb1d883d6c9166fdc/dqSegmentUtils.py
seglist = segmentlist([seglist])
def grab_segments(start,end,flag): """ Returns a segmentlist containing the segments during which the given flag was active in the given period. """ #== construct segment query segment_cmd = "ligolw_segment_query --query-segments"+\ " --segment_url https://segdb.ligo.caltech.edu"+\ " --include-segments "+flag+\ " --gps-start-time "+str(start)+\ " --gps-end-time "+str(end)+\ ''' | ligolw_print -t segment -c start_time -c end_time --delimiter " "''' #== run segment query segs = GetCommandOutput(segment_cmd) #== construct segments as structure seglist=[] segs=segs.split('\n') for seg in segs: if seg=='': continue try: [seg_start,seg_end]=seg.split(' ') seglist.append(segment(int(seg_start),int(seg_end))) except: continue seglist = segmentlist([seglist]) return seglist
3ff03be0c8cc77ecae9200efb1d883d6c9166fdc /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/3ff03be0c8cc77ecae9200efb1d883d6c9166fdc/dqSegmentUtils.py
try: cPickle.dump(self.__backgroundDict__,file(pickleLocale,'w')) except: sys.stdout.write("Problem saving pickle of DQ information.") sys.stdout.write("Trying to place pickle in your home directory.")
if not backgroundPickle:
def createDQbackground(self,ifoEpochList=list(),pickleLocale=None): """ Two inputs a list of tuples (ifo,epochname) for each instrument. Also a place to save the potential pickle to for quick access later. """ if type(ifoEpochList) != type(list()): raise Exception, \ "Invalid input argument ifoEpochList,%s type(%s)"\ %(ifoEpochList,type(ifoEpochList)) #Make sure epoch exists for reach ifo for ifo,epoch in ifoEpochList: if ifo not in runEpochs.keys(): raise Exception, "Bad ifo specified, %s"%ifo if epoch not in runEpochs[ifo].keys(): raise Exception, "Bad ifo epoch specified, %s:%s"%(ifo,epoch) #If pickle location given try to load that pickle first. backgroundPickle=False if pickleLocale!=None: #If pickle file exists read it if not make sure we can #generate it properly otherwise skip creating background if os.path.isfile(pickleLocale): try: self.__backgroundDict__=cPickle.load(file(pickleLocale,'r')) backgroundPickle=True except: backgroundPickle=False sys.stderr.write("Error importing the pickle file! %s\n"\ %(pickleLocale)) return for (ifo,epoch) in ifoEpochList: if (ifo.upper().strip(),epoch.upper().strip()) \ not in self.__backgroundDict__["ifoepoch"]: raise Exception,\ "Invalid ifo and epoch information in \
6dd8d677924d136a0f3b5fb060f859178f7cc82c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/6dd8d677924d136a0f3b5fb060f859178f7cc82c/fu_utils.py
cPickle.dump(self.__backgroundDict__, file(home_dir()+"/"+os.path.basename(pickleLocale),'w'))
cPickle.dump(self.__backgroundDict__,file(pickleLocale,'w'))
def createDQbackground(self,ifoEpochList=list(),pickleLocale=None): """ Two inputs a list of tuples (ifo,epochname) for each instrument. Also a place to save the potential pickle to for quick access later. """ if type(ifoEpochList) != type(list()): raise Exception, \ "Invalid input argument ifoEpochList,%s type(%s)"\ %(ifoEpochList,type(ifoEpochList)) #Make sure epoch exists for reach ifo for ifo,epoch in ifoEpochList: if ifo not in runEpochs.keys(): raise Exception, "Bad ifo specified, %s"%ifo if epoch not in runEpochs[ifo].keys(): raise Exception, "Bad ifo epoch specified, %s:%s"%(ifo,epoch) #If pickle location given try to load that pickle first. backgroundPickle=False if pickleLocale!=None: #If pickle file exists read it if not make sure we can #generate it properly otherwise skip creating background if os.path.isfile(pickleLocale): try: self.__backgroundDict__=cPickle.load(file(pickleLocale,'r')) backgroundPickle=True except: backgroundPickle=False sys.stderr.write("Error importing the pickle file! %s\n"\ %(pickleLocale)) return for (ifo,epoch) in ifoEpochList: if (ifo.upper().strip(),epoch.upper().strip()) \ not in self.__backgroundDict__["ifoepoch"]: raise Exception,\ "Invalid ifo and epoch information in \
6dd8d677924d136a0f3b5fb060f859178f7cc82c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/6dd8d677924d136a0f3b5fb060f859178f7cc82c/fu_utils.py
sys.stdout.write("Really ignoring pickle generation now!\n")
sys.stdout.write("Problem saving pickle of DQ information.") sys.stdout.write("Trying to place pickle in your home directory.") try: cPickle.dump(self.__backgroundDict__, file(home_dir()+"/"+os.path.basename(pickleLocale),'w')) except: sys.stdout.write("Really ignoring pickle generation now!\n")
def createDQbackground(self,ifoEpochList=list(),pickleLocale=None): """ Two inputs a list of tuples (ifo,epochname) for each instrument. Also a place to save the potential pickle to for quick access later. """ if type(ifoEpochList) != type(list()): raise Exception, \ "Invalid input argument ifoEpochList,%s type(%s)"\ %(ifoEpochList,type(ifoEpochList)) #Make sure epoch exists for reach ifo for ifo,epoch in ifoEpochList: if ifo not in runEpochs.keys(): raise Exception, "Bad ifo specified, %s"%ifo if epoch not in runEpochs[ifo].keys(): raise Exception, "Bad ifo epoch specified, %s:%s"%(ifo,epoch) #If pickle location given try to load that pickle first. backgroundPickle=False if pickleLocale!=None: #If pickle file exists read it if not make sure we can #generate it properly otherwise skip creating background if os.path.isfile(pickleLocale): try: self.__backgroundDict__=cPickle.load(file(pickleLocale,'r')) backgroundPickle=True except: backgroundPickle=False sys.stderr.write("Error importing the pickle file! %s\n"\ %(pickleLocale)) return for (ifo,epoch) in ifoEpochList: if (ifo.upper().strip(),epoch.upper().strip()) \ not in self.__backgroundDict__["ifoepoch"]: raise Exception,\ "Invalid ifo and epoch information in \
6dd8d677924d136a0f3b5fb060f859178f7cc82c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/6dd8d677924d136a0f3b5fb060f859178f7cc82c/fu_utils.py
def insert(connection, urls, preserve_ids = False, verbose = False):
def insert_from_urls(connection, urls, preserve_ids = False, verbose = False):
def insert(connection, urls, preserve_ids = False, verbose = False): """ Iterate over a sequence of URLs and parse and insert each one into the database the dbtables.DBTable class is currently connected to. """ if not preserve_ids: # enable ID remapping dbtables.idmap_create(connection) dbtables.DBTable.append = dbtables.DBTable._remapping_append else: # disable ID remapping dbtables.DBTable.append = dbtables.DBTable._append for n, url in enumerate(urls): # load document (if enabled, row IDs are reassigned on # input) if verbose: print >>sys.stderr, "%d/%d:" % (n + 1, len(urls)), xmldoc = utils.load_url(url, verbose = verbose, gz = (url or "stdin").endswith(".gz")) # update references to row IDs if not preserve_ids: update_ids(xmldoc, connection, verbose) # delete cursors xmldoc.unlink() connection.commit() dbtables.build_indexes(connection, verbose)
50dc3e487cb3a2bfe02787ec6f9e175a28daafe5 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/50dc3e487cb3a2bfe02787ec6f9e175a28daafe5/ligolw_sqlite.py
table_elems = xmldoc.getElementsByTagName(ligolw.Table.tagName) for tbl in table_elems:
for tbl in xmldoc.getElementsByTagName(ligolw.Table.tagName):
def insert_from_xmldoc(connection, xmldoc, preserve_ids = False, verbose = False): """ Insert the tables from an in-ram XML document into the database at the given connection. """ if not preserve_ids: # enable ID remapping dbtables.idmap_create(connection) dbtables.DBTable.append = dbtables.DBTable._remapping_append else: # disable ID remapping dbtables.DBTable.append = dbtables.DBTable._append table_elems = xmldoc.getElementsByTagName(ligolw.Table.tagName) for tbl in table_elems: dbtab = dbtables.DBTable(tbl.attributes, connection=connection) for elem in tbl.childNodes: if isinstance(elem, dbtables.table.TableStream): dbtab._end_of_columns() dbtab.appendChild(type(elem)(elem.attributes)) for row in tbl: dbtab.append(row) dbtab._end_of_rows() if not preserve_ids: update_ids(dbtables.get_xml(connection), connection, verbose) dbtables.build_indexes(connection, verbose)
50dc3e487cb3a2bfe02787ec6f9e175a28daafe5 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/50dc3e487cb3a2bfe02787ec6f9e175a28daafe5/ligolw_sqlite.py
for n, (coinc_event_id, time_slide_id) in enumerate(database.connection.cursor().execute("SELECT coinc_event_id, time_slide_id FROM coinc_event WHERE coinc_def_id == ?", (database.bb_definer_id,))): if verbose and not n % 200: print >>sys.stderr, "\t%.1f%%\r" % (100.0 * n / n_coincs), events = map(database.sngl_burst_table.row_from_cols, cursor.execute("""SELECT * FROM coinc_burst_map WHERE coinc_event_id == ?""", (coinc_event_id,))) cursor.execute("""
def get_likelihood_ratio(coinc_event_id, time_slide_id, row_from_cols = database.sngl_burst_table.row_from_cols, cursor = cursor, time_slides = time_slides, params_func = params_func, params_func_extra_args = params_func_extra_args): events = map(row_from_cols, cursor.execute("""SELECT * FROM coinc_burst_map WHERE coinc_event_id == ?""", (coinc_event_id,))) likelihood_ratio(params_func, events, time_slides[ilwd.get_ilwdchar(time_slide_id)], *params_func_extra_args) database.connection.create_function("likelihood_ratio", 2, get_likelihood_ratio) database.connection.cursor().execute("""
def ligolw_burca2(database, likelihood_ratio, params_func, verbose = False, params_func_extra_args = ()): """ Assigns likelihood ratio values to excess power coincidences. database is pylal.SnglBurstUtils.CoincDatabase instance, and likelihood_ratio is a LikelihoodRatio class instance. """ # # Find document parts. # time_slides = database.time_slide_table.as_dict() # # Iterate over all coincs, assigning likelihood ratios to # burst+burst coincs if the document contains them. # if verbose: print >>sys.stderr, "computing likelihood ratios ..." n_coincs = len(database.coinc_table) cursor = database.connection.cursor() cursor.execute("""
9f53ea4d76a94a23ce55054885f31fd96d4b31a4 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/9f53ea4d76a94a23ce55054885f31fd96d4b31a4/ligolw_burca2.py
likelihood = ?
likelihood = likelihood_ratio(coinc_event_id, time_slide_id)
def ligolw_burca2(database, likelihood_ratio, params_func, verbose = False, params_func_extra_args = ()): """ Assigns likelihood ratio values to excess power coincidences. database is pylal.SnglBurstUtils.CoincDatabase instance, and likelihood_ratio is a LikelihoodRatio class instance. """ # # Find document parts. # time_slides = database.time_slide_table.as_dict() # # Iterate over all coincs, assigning likelihood ratios to # burst+burst coincs if the document contains them. # if verbose: print >>sys.stderr, "computing likelihood ratios ..." n_coincs = len(database.coinc_table) cursor = database.connection.cursor() cursor.execute("""
9f53ea4d76a94a23ce55054885f31fd96d4b31a4 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/9f53ea4d76a94a23ce55054885f31fd96d4b31a4/ligolw_burca2.py
coinc_event_id == ? """, (likelihood_ratio(params_func, events, time_slides[time_slide_id], *params_func_extra_args), coinc_event_id))
coinc_def_id == ? """, (database.bb_definer_id,))
def ligolw_burca2(database, likelihood_ratio, params_func, verbose = False, params_func_extra_args = ()): """ Assigns likelihood ratio values to excess power coincidences. database is pylal.SnglBurstUtils.CoincDatabase instance, and likelihood_ratio is a LikelihoodRatio class instance. """ # # Find document parts. # time_slides = database.time_slide_table.as_dict() # # Iterate over all coincs, assigning likelihood ratios to # burst+burst coincs if the document contains them. # if verbose: print >>sys.stderr, "computing likelihood ratios ..." n_coincs = len(database.coinc_table) cursor = database.connection.cursor() cursor.execute("""
9f53ea4d76a94a23ce55054885f31fd96d4b31a4 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/9f53ea4d76a94a23ce55054885f31fd96d4b31a4/ligolw_burca2.py
"ISI-OMC_DISPPF_H1_IN1_DAQ",\ "PEM-LVEA2_V2",\ "OMC-ASC_POS_X_IN1_DAQ",\ "OMC-ASC_POS_Y_IN1_DAQ",\ "OMC-QPD3_SUM_IN1_DAQ",\ "OMC-QPD1_SUM_IN1_DAQ",\ "OMC-QPD2_SUM_IN1_DAQ",\ "OMC-QPD4_SUM_IN1_DAQ"\
"XX:ISI-OMC_DISPPF_H1_IN1_DAQ",\ "XX:PEM-LVEA2_V2",\ "XX:OMC-ASC_POS_X_IN1_DAQ",\ "XX:OMC-ASC_POS_Y_IN1_DAQ",\ "XX:OMC-QPD3_SUM_IN1_DAQ",\ "XX:OMC-QPD1_SUM_IN1_DAQ",\ "XX:OMC-QPD2_SUM_IN1_DAQ",\ "XX:OMC-QPD4_SUM_IN1_DAQ"\
def __filenameToChannelList__(self,filenameList=[]): """ This method attempts to construct a set of simplified channel names based of a list of image filenames. """ #Parsed filename channel list specialChannelList=[ "ISI-OMC_DISPPF_H1_IN1_DAQ",\ "PEM-LVEA2_V2",\ "OMC-ASC_POS_X_IN1_DAQ",\ "OMC-ASC_POS_Y_IN1_DAQ",\ "OMC-QPD3_SUM_IN1_DAQ",\ "OMC-QPD1_SUM_IN1_DAQ",\ "OMC-QPD2_SUM_IN1_DAQ",\ "OMC-QPD4_SUM_IN1_DAQ"\ ] fileBasenames=[os.path.basename(x) for x in filenameList] startREG=re.compile('_[H,V,L][0,1,2][:,-,_]') stopREG=re.compile('_(?=[0-9,a-z])') channelNames=[[x,re.split(stopREG,re.split(startREG,x).pop().strip())[0].strip()]\ for x in filenameList] #Correct badly parsed names finalChannelList=list() for myURL,myName in channelNames: for specialName in specialChannelList: if myURL.__contains__(specialName): finalChannelList.append(specialName) else: finalChannelList.append(myName) return [str(x).strip() for x in finalChannelList]
fb84ac33aba836b8fcb61075cfaec34ced2846c0 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/fb84ac33aba836b8fcb61075cfaec34ced2846c0/makeCheckListWiki.py
return [str(x).strip() for x in finalChannelList]
return ["XX:"+str(x).strip() for x in finalChannelList]
def __filenameToChannelList__(self,filenameList=[]): """ This method attempts to construct a set of simplified channel names based of a list of image filenames. """ #Parsed filename channel list specialChannelList=[ "ISI-OMC_DISPPF_H1_IN1_DAQ",\ "PEM-LVEA2_V2",\ "OMC-ASC_POS_X_IN1_DAQ",\ "OMC-ASC_POS_Y_IN1_DAQ",\ "OMC-QPD3_SUM_IN1_DAQ",\ "OMC-QPD1_SUM_IN1_DAQ",\ "OMC-QPD2_SUM_IN1_DAQ",\ "OMC-QPD4_SUM_IN1_DAQ"\ ] fileBasenames=[os.path.basename(x) for x in filenameList] startREG=re.compile('_[H,V,L][0,1,2][:,-,_]') stopREG=re.compile('_(?=[0-9,a-z])') channelNames=[[x,re.split(stopREG,re.split(startREG,x).pop().strip())[0].strip()]\ for x in filenameList] #Correct badly parsed names finalChannelList=list() for myURL,myName in channelNames: for specialName in specialChannelList: if myURL.__contains__(specialName): finalChannelList.append(specialName) else: finalChannelList.append(myName) return [str(x).strip() for x in finalChannelList]
fb84ac33aba836b8fcb61075cfaec34ced2846c0 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/fb84ac33aba836b8fcb61075cfaec34ced2846c0/makeCheckListWiki.py
imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict()
imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() filesOmega=wikiFileFinder.get_RDS_R_L1_SEIS() filesAnalyze=wikiFileFinder.get_analyzeQscan_SEIS()
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() # for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #
fb84ac33aba836b8fcb61075cfaec34ced2846c0 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/fb84ac33aba836b8fcb61075cfaec34ced2846c0/makeCheckListWiki.py
indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\
indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() indexDict[sngl.ifo]=fnmatch.filter(filesOmega,\
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() # for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #
fb84ac33aba836b8fcb61075cfaec34ced2846c0 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/fb84ac33aba836b8fcb61075cfaec34ced2846c0/makeCheckListWiki.py
imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\
imageDict[sngl.ifo]=fnmatch.filter(filesOmega,\
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() # for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #
fb84ac33aba836b8fcb61075cfaec34ced2846c0 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/fb84ac33aba836b8fcb61075cfaec34ced2846c0/makeCheckListWiki.py
thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\
thumbDict[sngl.ifo]=fnmatch.filter(filesOmega,\
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() # for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #
fb84ac33aba836b8fcb61075cfaec34ced2846c0 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/fb84ac33aba836b8fcb61075cfaec34ced2846c0/makeCheckListWiki.py
zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time))
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() # for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #
fb84ac33aba836b8fcb61075cfaec34ced2846c0 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/fb84ac33aba836b8fcb61075cfaec34ced2846c0/makeCheckListWiki.py
if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) tmpList=list() for chan in zValueDict[sngl.ifo]:
for zFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile):
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() # for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #
fb84ac33aba836b8fcb61075cfaec34ced2846c0 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/fb84ac33aba836b8fcb61075cfaec34ced2846c0/makeCheckListWiki.py
tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo)
zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo)
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() # for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #
fb84ac33aba836b8fcb61075cfaec34ced2846c0 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/fb84ac33aba836b8fcb61075cfaec34ced2846c0/makeCheckListWiki.py
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() # for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #
fb84ac33aba836b8fcb61075cfaec34ced2846c0 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/fb84ac33aba836b8fcb61075cfaec34ced2846c0/makeCheckListWiki.py
zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\
indexDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() # for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #
fb84ac33aba836b8fcb61075cfaec34ced2846c0 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/fb84ac33aba836b8fcb61075cfaec34ced2846c0/makeCheckListWiki.py
thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\
imageDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\
def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() # for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #
fb84ac33aba836b8fcb61075cfaec34ced2846c0 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/fb84ac33aba836b8fcb61075cfaec34ced2846c0/makeCheckListWiki.py