rem
stringlengths 1
322k
| add
stringlengths 0
2.05M
| context
stringlengths 4
228k
| meta
stringlengths 156
215
|
---|---|---|---|
(home_dirs()+"/romain/followupbackgrounds/omega/VSR1b/background/background_937800015_947260815.cache",935798415,999999999,"V1") | (home_dirs()+"/romain/followupbackgrounds/omega/VSR2b/background/background_937800015_947260815.cache",935798415,999999999,"V1") | def figure_out_cache(time,ifo): cacheList=( (home_dirs()+"/romain/followupbackgrounds/omega/S5/background/background_815155213_875232014.cache",815155213,875232014,"H1H2L1"), (home_dirs()+"/romain/followupbackgrounds/omega/S6a/background/background_931035296_935798415.cache",931035296,935798415,"H1L1"), (home_dirs()+"/romain/followupbackgrounds/omega/S6b/background/background_937800015_944587815.cache",935798415,999999999,"H1L1"), (home_dirs()+"/romain/followupbackgrounds/omega/VSR1b/background/background_937800015_947260815.cache",935798415,999999999,"V1") ) foundCache = "" for cacheFile,start,stop,ifos in cacheList: if ((start<=time) and (time<stop) and ifo in ifos): foundCache = cacheFile break if 'phy.syr.edu' in get_hostname(): foundCache = foundCache.replace("romain","rgouaty") if foundCache == "": print ifo, time, " not found in method stfu_pipe.figure_out_cache" else: if not os.path.isfile(foundCache): print "file " + foundCache + " not found" foundCache = "" return foundCache | b726adecb7be59dfbc873941cdbe5900132f4046 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/b726adecb7be59dfbc873941cdbe5900132f4046/stfu_pipe.py |
self.offset_vectors = offset_vectors | self.offset_vectors = list(offset_vectors) self.offset_vectors.sort(key = lambda offset_vector: sorted(offset_vector.items())) | def set_offset_vectors(self, offset_vectors): """ Set the list of offset vectors to be considered when deciding the bins in which each file belongs. Must be called before packing any files. The input is a list of dictionaries, each mapping instruments to offsets. """ self.offset_vectors = offset_vectors min_offset = min(min(offset_vector.values()) for offset_vector in offset_vectors) max_offset = max(max(offset_vector.values()) for offset_vector in offset_vectors) # largest gap that can conceivably be closed by the time # slides self.max_gap = max_offset - min_offset assert self.max_gap >= 0 | d665ff5b1086411540dd407fbadfcc4d28eb43e6 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/d665ff5b1086411540dd407fbadfcc4d28eb43e6/ligolw_cafe.py |
def set_offset_vectors(self, offset_vectors): """ Set the list of offset vectors to be considered when deciding the bins in which each file belongs. Must be called before packing any files. The input is a list of dictionaries, each mapping instruments to offsets. """ self.offset_vectors = offset_vectors min_offset = min(min(offset_vector.values()) for offset_vector in offset_vectors) max_offset = max(max(offset_vector.values()) for offset_vector in offset_vectors) # largest gap that can conceivably be closed by the time # slides self.max_gap = max_offset - min_offset assert self.max_gap >= 0 | d665ff5b1086411540dd407fbadfcc4d28eb43e6 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/d665ff5b1086411540dd407fbadfcc4d28eb43e6/ligolw_cafe.py |
||
def split_bins(cafepacker, extentlimit): """ Split bins of stored in CafePacker until each bin has an extent no longer than extentlimit. """ | def split_bins(cafepacker, extentlimit, verbose = False): """ Split bins in CafePacker so that each bin has an extent no longer than extentlimit. """ | def split_bins(cafepacker, extentlimit): """ Split bins of stored in CafePacker until each bin has an extent no longer than extentlimit. """ # # loop overall the bins in cafepacker.bins. we pop items out of # cafepacker.bins and append new ones to the end so need a while loop # checking the extent of each bin in cafepacker.bins until all bins are # done being split # idx = 0 while idx < len(cafepacker.bins): if abs(cafepacker.bins[idx].extent) <= extentlimit: # # bin doesn't need splitting so move to next # idx += 1 continue # # split this bin so pop it out of the list # bigbin = cafepacker.bins.pop(idx) # # calculate the central time of the union of all the input # files in the bin # splittime = lsctables.LIGOTimeGPS(bigbin.extent[0] + (bigbin.extent[1] - bigbin.extent[0])/2) # # split the segmentlistdict at this time # splitseglistdict = segments.segmentlistdict() for key in bigbin.size.keys(): splitseglistdict[key] = segments.segmentlist([segments.segment(-segments.infinity(),splittime)]) # # create bins for the first and second halves # bin1 = LALCacheBin() bin1.size = bigbin.size & splitseglistdict bin1.extent = bigbin.extent & splitseglistdict.values()[0][0] bin2 = LALCacheBin() bin2.size = bigbin.size & ~splitseglistdict bin2.extent = bigbin.extent & (~splitseglistdict.values()[0])[0] # # remove unused keys from the smaller bins' segmentlistdicts # newsize = segments.segmentlistdict() for key in bin1.size.keys(): if len(bin1.size[key]): newsize[key] = bin1.size[key] bin1.size = newsize newsize = segments.segmentlistdict() for key in bin2.size.keys(): if len(bin2.size[key]): newsize[key] = bin2.size[key] bin2.size = newsize # # find which of the objects in bigbin.objects intersect the two # smaller bins # for cache in bigbin.objects: thisseglistdict = cache.to_segmentlistdict() coinc1 = 0 coinc2 = 0 for offset_vector in cafepacker.offset_vectors: # # loop over offset vectors updating the smaller # bins and the object we are checking # bin1.size.offsets.update(offset_vector) bin2.size.offsets.update(offset_vector) thisseglistdict.offsets.update(offset_vector) if not coinc1 and bin1.size.is_coincident(thisseglistdict, keys = offset_vector.keys()): # # object is coicident with bin1 # coinc1 = 1 bin1.objects.append(cache) if not coinc2 and bin2.size.is_coincident(thisseglistdict, keys = offset_vector.keys()): # # object is coincident with bin2 # coinc2 = 1 bin2.objects.append(cache) # # end loop if known to be coincident with both # bins # if coinc1 and coinc2: break # # clear offsets applied to object # thisseglistdict.offsets.clear() # # clear offsets applied to bins # bin1.size.offsets.clear() bin2.size.offsets.clear() # # append smaller bins to list of bins # cafepacker.bins.append(bin1) cafepacker.bins.append(bin2) # # do not increment idx as we popped the large bin out of # cafepacker.bins # # # sort the bins in cafepacker # cafepacker.bins.sort() return cafepacker | d665ff5b1086411540dd407fbadfcc4d28eb43e6 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/d665ff5b1086411540dd407fbadfcc4d28eb43e6/ligolw_cafe.py |
if abs(cafepacker.bins[idx].extent) <= extentlimit: | origbin = cafepacker.bins[idx] n = int(math.ceil(float(abs(origbin.extent)) / extentlimit)) if n <= 1: | def split_bins(cafepacker, extentlimit): """ Split bins of stored in CafePacker until each bin has an extent no longer than extentlimit. """ # # loop overall the bins in cafepacker.bins. we pop items out of # cafepacker.bins and append new ones to the end so need a while loop # checking the extent of each bin in cafepacker.bins until all bins are # done being split # idx = 0 while idx < len(cafepacker.bins): if abs(cafepacker.bins[idx].extent) <= extentlimit: # # bin doesn't need splitting so move to next # idx += 1 continue # # split this bin so pop it out of the list # bigbin = cafepacker.bins.pop(idx) # # calculate the central time of the union of all the input # files in the bin # splittime = lsctables.LIGOTimeGPS(bigbin.extent[0] + (bigbin.extent[1] - bigbin.extent[0])/2) # # split the segmentlistdict at this time # splitseglistdict = segments.segmentlistdict() for key in bigbin.size.keys(): splitseglistdict[key] = segments.segmentlist([segments.segment(-segments.infinity(),splittime)]) # # create bins for the first and second halves # bin1 = LALCacheBin() bin1.size = bigbin.size & splitseglistdict bin1.extent = bigbin.extent & splitseglistdict.values()[0][0] bin2 = LALCacheBin() bin2.size = bigbin.size & ~splitseglistdict bin2.extent = bigbin.extent & (~splitseglistdict.values()[0])[0] # # remove unused keys from the smaller bins' segmentlistdicts # newsize = segments.segmentlistdict() for key in bin1.size.keys(): if len(bin1.size[key]): newsize[key] = bin1.size[key] bin1.size = newsize newsize = segments.segmentlistdict() for key in bin2.size.keys(): if len(bin2.size[key]): newsize[key] = bin2.size[key] bin2.size = newsize # # find which of the objects in bigbin.objects intersect the two # smaller bins # for cache in bigbin.objects: thisseglistdict = cache.to_segmentlistdict() coinc1 = 0 coinc2 = 0 for offset_vector in cafepacker.offset_vectors: # # loop over offset vectors updating the smaller # bins and the object we are checking # bin1.size.offsets.update(offset_vector) bin2.size.offsets.update(offset_vector) thisseglistdict.offsets.update(offset_vector) if not coinc1 and bin1.size.is_coincident(thisseglistdict, keys = offset_vector.keys()): # # object is coicident with bin1 # coinc1 = 1 bin1.objects.append(cache) if not coinc2 and bin2.size.is_coincident(thisseglistdict, keys = offset_vector.keys()): # # object is coincident with bin2 # coinc2 = 1 bin2.objects.append(cache) # # end loop if known to be coincident with both # bins # if coinc1 and coinc2: break # # clear offsets applied to object # thisseglistdict.offsets.clear() # # clear offsets applied to bins # bin1.size.offsets.clear() bin2.size.offsets.clear() # # append smaller bins to list of bins # cafepacker.bins.append(bin1) cafepacker.bins.append(bin2) # # do not increment idx as we popped the large bin out of # cafepacker.bins # # # sort the bins in cafepacker # cafepacker.bins.sort() return cafepacker | d665ff5b1086411540dd407fbadfcc4d28eb43e6 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/d665ff5b1086411540dd407fbadfcc4d28eb43e6/ligolw_cafe.py |
bigbin = cafepacker.bins.pop(idx) splittime = lsctables.LIGOTimeGPS(bigbin.extent[0] + (bigbin.extent[1] - bigbin.extent[0])/2) splitseglistdict = segments.segmentlistdict() for key in bigbin.size.keys(): splitseglistdict[key] = segments.segmentlist([segments.segment(-segments.infinity(),splittime)]) bin1 = LALCacheBin() bin1.size = bigbin.size & splitseglistdict bin1.extent = bigbin.extent & splitseglistdict.values()[0][0] bin2 = LALCacheBin() bin2.size = bigbin.size & ~splitseglistdict bin2.extent = bigbin.extent & (~splitseglistdict.values()[0])[0] newsize = segments.segmentlistdict() for key in bin1.size.keys(): if len(bin1.size[key]): newsize[key] = bin1.size[key] bin1.size = newsize newsize = segments.segmentlistdict() for key in bin2.size.keys(): if len(bin2.size[key]): newsize[key] = bin2.size[key] bin2.size = newsize for cache in bigbin.objects: thisseglistdict = cache.to_segmentlistdict() coinc1 = 0 coinc2 = 0 for offset_vector in cafepacker.offset_vectors: | splits = [-segments.infinity()] + [lsctables.LIGOTimeGPS(origbin.extent[0] + i * float(origbin.extent[1] - origbin.extent[0]) / n) for i in range(1, n)] + [+segments.infinity()] if verbose: print >>sys.stderr, "\tsplitting cache spanning %s at %s" % (str(origbin.extent), ", ".join(str(split) for split in splits[1:-1])) splits = [segments.segmentlist([segments.segment(*bounds)]) for bounds in zip(splits[:-1], splits[1:])] splits = [segments.segmentlistdict.fromkeys(origbin.size, seglist) for seglist in splits] newbins = [] for split in splits: newbins.append(LALCacheBin()) newbins[-1].size = origbin.size & split for key in tuple(newbins[-1].size): if not newbins[-1].size[key]: del newbins[-1].size[key] newbins[-1].extent = newbins[-1].size.extent_all() for bin in newbins: for cache_entry in origbin.objects: | def split_bins(cafepacker, extentlimit): """ Split bins of stored in CafePacker until each bin has an extent no longer than extentlimit. """ # # loop overall the bins in cafepacker.bins. we pop items out of # cafepacker.bins and append new ones to the end so need a while loop # checking the extent of each bin in cafepacker.bins until all bins are # done being split # idx = 0 while idx < len(cafepacker.bins): if abs(cafepacker.bins[idx].extent) <= extentlimit: # # bin doesn't need splitting so move to next # idx += 1 continue # # split this bin so pop it out of the list # bigbin = cafepacker.bins.pop(idx) # # calculate the central time of the union of all the input # files in the bin # splittime = lsctables.LIGOTimeGPS(bigbin.extent[0] + (bigbin.extent[1] - bigbin.extent[0])/2) # # split the segmentlistdict at this time # splitseglistdict = segments.segmentlistdict() for key in bigbin.size.keys(): splitseglistdict[key] = segments.segmentlist([segments.segment(-segments.infinity(),splittime)]) # # create bins for the first and second halves # bin1 = LALCacheBin() bin1.size = bigbin.size & splitseglistdict bin1.extent = bigbin.extent & splitseglistdict.values()[0][0] bin2 = LALCacheBin() bin2.size = bigbin.size & ~splitseglistdict bin2.extent = bigbin.extent & (~splitseglistdict.values()[0])[0] # # remove unused keys from the smaller bins' segmentlistdicts # newsize = segments.segmentlistdict() for key in bin1.size.keys(): if len(bin1.size[key]): newsize[key] = bin1.size[key] bin1.size = newsize newsize = segments.segmentlistdict() for key in bin2.size.keys(): if len(bin2.size[key]): newsize[key] = bin2.size[key] bin2.size = newsize # # find which of the objects in bigbin.objects intersect the two # smaller bins # for cache in bigbin.objects: thisseglistdict = cache.to_segmentlistdict() coinc1 = 0 coinc2 = 0 for offset_vector in cafepacker.offset_vectors: # # loop over offset vectors updating the smaller # bins and the object we are checking # bin1.size.offsets.update(offset_vector) bin2.size.offsets.update(offset_vector) thisseglistdict.offsets.update(offset_vector) if not coinc1 and bin1.size.is_coincident(thisseglistdict, keys = offset_vector.keys()): # # object is coicident with bin1 # coinc1 = 1 bin1.objects.append(cache) if not coinc2 and bin2.size.is_coincident(thisseglistdict, keys = offset_vector.keys()): # # object is coincident with bin2 # coinc2 = 1 bin2.objects.append(cache) # # end loop if known to be coincident with both # bins # if coinc1 and coinc2: break # # clear offsets applied to object # thisseglistdict.offsets.clear() # # clear offsets applied to bins # bin1.size.offsets.clear() bin2.size.offsets.clear() # # append smaller bins to list of bins # cafepacker.bins.append(bin1) cafepacker.bins.append(bin2) # # do not increment idx as we popped the large bin out of # cafepacker.bins # # # sort the bins in cafepacker # cafepacker.bins.sort() return cafepacker | d665ff5b1086411540dd407fbadfcc4d28eb43e6 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/d665ff5b1086411540dd407fbadfcc4d28eb43e6/ligolw_cafe.py |
def split_bins(cafepacker, extentlimit): """ Split bins of stored in CafePacker until each bin has an extent no longer than extentlimit. """ # # loop overall the bins in cafepacker.bins. we pop items out of # cafepacker.bins and append new ones to the end so need a while loop # checking the extent of each bin in cafepacker.bins until all bins are # done being split # idx = 0 while idx < len(cafepacker.bins): if abs(cafepacker.bins[idx].extent) <= extentlimit: # # bin doesn't need splitting so move to next # idx += 1 continue # # split this bin so pop it out of the list # bigbin = cafepacker.bins.pop(idx) # # calculate the central time of the union of all the input # files in the bin # splittime = lsctables.LIGOTimeGPS(bigbin.extent[0] + (bigbin.extent[1] - bigbin.extent[0])/2) # # split the segmentlistdict at this time # splitseglistdict = segments.segmentlistdict() for key in bigbin.size.keys(): splitseglistdict[key] = segments.segmentlist([segments.segment(-segments.infinity(),splittime)]) # # create bins for the first and second halves # bin1 = LALCacheBin() bin1.size = bigbin.size & splitseglistdict bin1.extent = bigbin.extent & splitseglistdict.values()[0][0] bin2 = LALCacheBin() bin2.size = bigbin.size & ~splitseglistdict bin2.extent = bigbin.extent & (~splitseglistdict.values()[0])[0] # # remove unused keys from the smaller bins' segmentlistdicts # newsize = segments.segmentlistdict() for key in bin1.size.keys(): if len(bin1.size[key]): newsize[key] = bin1.size[key] bin1.size = newsize newsize = segments.segmentlistdict() for key in bin2.size.keys(): if len(bin2.size[key]): newsize[key] = bin2.size[key] bin2.size = newsize # # find which of the objects in bigbin.objects intersect the two # smaller bins # for cache in bigbin.objects: thisseglistdict = cache.to_segmentlistdict() coinc1 = 0 coinc2 = 0 for offset_vector in cafepacker.offset_vectors: # # loop over offset vectors updating the smaller # bins and the object we are checking # bin1.size.offsets.update(offset_vector) bin2.size.offsets.update(offset_vector) thisseglistdict.offsets.update(offset_vector) if not coinc1 and bin1.size.is_coincident(thisseglistdict, keys = offset_vector.keys()): # # object is coicident with bin1 # coinc1 = 1 bin1.objects.append(cache) if not coinc2 and bin2.size.is_coincident(thisseglistdict, keys = offset_vector.keys()): # # object is coincident with bin2 # coinc2 = 1 bin2.objects.append(cache) # # end loop if known to be coincident with both # bins # if coinc1 and coinc2: break # # clear offsets applied to object # thisseglistdict.offsets.clear() # # clear offsets applied to bins # bin1.size.offsets.clear() bin2.size.offsets.clear() # # append smaller bins to list of bins # cafepacker.bins.append(bin1) cafepacker.bins.append(bin2) # # do not increment idx as we popped the large bin out of # cafepacker.bins # # # sort the bins in cafepacker # cafepacker.bins.sort() return cafepacker | d665ff5b1086411540dd407fbadfcc4d28eb43e6 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/d665ff5b1086411540dd407fbadfcc4d28eb43e6/ligolw_cafe.py |
||
bin1.size.offsets.update(offset_vector) bin2.size.offsets.update(offset_vector) thisseglistdict.offsets.update(offset_vector) if not coinc1 and bin1.size.is_coincident(thisseglistdict, keys = offset_vector.keys()): | if cache_entry.segment.protract(cafepacker.max_gap).disjoint(bin.extent): continue cache_entry_segs = cache_entry.to_segmentlistdict() for offset_vector in cafepacker.offset_vectors: cache_entry_segs.offsets.update(offset_vector) | def split_bins(cafepacker, extentlimit): """ Split bins of stored in CafePacker until each bin has an extent no longer than extentlimit. """ # # loop overall the bins in cafepacker.bins. we pop items out of # cafepacker.bins and append new ones to the end so need a while loop # checking the extent of each bin in cafepacker.bins until all bins are # done being split # idx = 0 while idx < len(cafepacker.bins): if abs(cafepacker.bins[idx].extent) <= extentlimit: # # bin doesn't need splitting so move to next # idx += 1 continue # # split this bin so pop it out of the list # bigbin = cafepacker.bins.pop(idx) # # calculate the central time of the union of all the input # files in the bin # splittime = lsctables.LIGOTimeGPS(bigbin.extent[0] + (bigbin.extent[1] - bigbin.extent[0])/2) # # split the segmentlistdict at this time # splitseglistdict = segments.segmentlistdict() for key in bigbin.size.keys(): splitseglistdict[key] = segments.segmentlist([segments.segment(-segments.infinity(),splittime)]) # # create bins for the first and second halves # bin1 = LALCacheBin() bin1.size = bigbin.size & splitseglistdict bin1.extent = bigbin.extent & splitseglistdict.values()[0][0] bin2 = LALCacheBin() bin2.size = bigbin.size & ~splitseglistdict bin2.extent = bigbin.extent & (~splitseglistdict.values()[0])[0] # # remove unused keys from the smaller bins' segmentlistdicts # newsize = segments.segmentlistdict() for key in bin1.size.keys(): if len(bin1.size[key]): newsize[key] = bin1.size[key] bin1.size = newsize newsize = segments.segmentlistdict() for key in bin2.size.keys(): if len(bin2.size[key]): newsize[key] = bin2.size[key] bin2.size = newsize # # find which of the objects in bigbin.objects intersect the two # smaller bins # for cache in bigbin.objects: thisseglistdict = cache.to_segmentlistdict() coinc1 = 0 coinc2 = 0 for offset_vector in cafepacker.offset_vectors: # # loop over offset vectors updating the smaller # bins and the object we are checking # bin1.size.offsets.update(offset_vector) bin2.size.offsets.update(offset_vector) thisseglistdict.offsets.update(offset_vector) if not coinc1 and bin1.size.is_coincident(thisseglistdict, keys = offset_vector.keys()): # # object is coicident with bin1 # coinc1 = 1 bin1.objects.append(cache) if not coinc2 and bin2.size.is_coincident(thisseglistdict, keys = offset_vector.keys()): # # object is coincident with bin2 # coinc2 = 1 bin2.objects.append(cache) # # end loop if known to be coincident with both # bins # if coinc1 and coinc2: break # # clear offsets applied to object # thisseglistdict.offsets.clear() # # clear offsets applied to bins # bin1.size.offsets.clear() bin2.size.offsets.clear() # # append smaller bins to list of bins # cafepacker.bins.append(bin1) cafepacker.bins.append(bin2) # # do not increment idx as we popped the large bin out of # cafepacker.bins # # # sort the bins in cafepacker # cafepacker.bins.sort() return cafepacker | d665ff5b1086411540dd407fbadfcc4d28eb43e6 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/d665ff5b1086411540dd407fbadfcc4d28eb43e6/ligolw_cafe.py |
def split_bins(cafepacker, extentlimit): """ Split bins of stored in CafePacker until each bin has an extent no longer than extentlimit. """ # # loop overall the bins in cafepacker.bins. we pop items out of # cafepacker.bins and append new ones to the end so need a while loop # checking the extent of each bin in cafepacker.bins until all bins are # done being split # idx = 0 while idx < len(cafepacker.bins): if abs(cafepacker.bins[idx].extent) <= extentlimit: # # bin doesn't need splitting so move to next # idx += 1 continue # # split this bin so pop it out of the list # bigbin = cafepacker.bins.pop(idx) # # calculate the central time of the union of all the input # files in the bin # splittime = lsctables.LIGOTimeGPS(bigbin.extent[0] + (bigbin.extent[1] - bigbin.extent[0])/2) # # split the segmentlistdict at this time # splitseglistdict = segments.segmentlistdict() for key in bigbin.size.keys(): splitseglistdict[key] = segments.segmentlist([segments.segment(-segments.infinity(),splittime)]) # # create bins for the first and second halves # bin1 = LALCacheBin() bin1.size = bigbin.size & splitseglistdict bin1.extent = bigbin.extent & splitseglistdict.values()[0][0] bin2 = LALCacheBin() bin2.size = bigbin.size & ~splitseglistdict bin2.extent = bigbin.extent & (~splitseglistdict.values()[0])[0] # # remove unused keys from the smaller bins' segmentlistdicts # newsize = segments.segmentlistdict() for key in bin1.size.keys(): if len(bin1.size[key]): newsize[key] = bin1.size[key] bin1.size = newsize newsize = segments.segmentlistdict() for key in bin2.size.keys(): if len(bin2.size[key]): newsize[key] = bin2.size[key] bin2.size = newsize # # find which of the objects in bigbin.objects intersect the two # smaller bins # for cache in bigbin.objects: thisseglistdict = cache.to_segmentlistdict() coinc1 = 0 coinc2 = 0 for offset_vector in cafepacker.offset_vectors: # # loop over offset vectors updating the smaller # bins and the object we are checking # bin1.size.offsets.update(offset_vector) bin2.size.offsets.update(offset_vector) thisseglistdict.offsets.update(offset_vector) if not coinc1 and bin1.size.is_coincident(thisseglistdict, keys = offset_vector.keys()): # # object is coicident with bin1 # coinc1 = 1 bin1.objects.append(cache) if not coinc2 and bin2.size.is_coincident(thisseglistdict, keys = offset_vector.keys()): # # object is coincident with bin2 # coinc2 = 1 bin2.objects.append(cache) # # end loop if known to be coincident with both # bins # if coinc1 and coinc2: break # # clear offsets applied to object # thisseglistdict.offsets.clear() # # clear offsets applied to bins # bin1.size.offsets.clear() bin2.size.offsets.clear() # # append smaller bins to list of bins # cafepacker.bins.append(bin1) cafepacker.bins.append(bin2) # # do not increment idx as we popped the large bin out of # cafepacker.bins # # # sort the bins in cafepacker # cafepacker.bins.sort() return cafepacker | d665ff5b1086411540dd407fbadfcc4d28eb43e6 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/d665ff5b1086411540dd407fbadfcc4d28eb43e6/ligolw_cafe.py |
||
coinc1 = 1 bin1.objects.append(cache) if not coinc2 and bin2.size.is_coincident(thisseglistdict, keys = offset_vector.keys()): coinc2 = 1 bin2.objects.append(cache) if coinc1 and coinc2: break thisseglistdict.offsets.clear() bin1.size.offsets.clear() bin2.size.offsets.clear() cafepacker.bins.append(bin1) cafepacker.bins.append(bin2) cafepacker.bins.sort() return cafepacker | if cache_entry_segs.intersects_segment(bin.extent): bin.objects.append(cache_entry) break cafepacker.bins[idx:idx+1] = newbins idx += len(newbins) | def split_bins(cafepacker, extentlimit): """ Split bins of stored in CafePacker until each bin has an extent no longer than extentlimit. """ # # loop overall the bins in cafepacker.bins. we pop items out of # cafepacker.bins and append new ones to the end so need a while loop # checking the extent of each bin in cafepacker.bins until all bins are # done being split # idx = 0 while idx < len(cafepacker.bins): if abs(cafepacker.bins[idx].extent) <= extentlimit: # # bin doesn't need splitting so move to next # idx += 1 continue # # split this bin so pop it out of the list # bigbin = cafepacker.bins.pop(idx) # # calculate the central time of the union of all the input # files in the bin # splittime = lsctables.LIGOTimeGPS(bigbin.extent[0] + (bigbin.extent[1] - bigbin.extent[0])/2) # # split the segmentlistdict at this time # splitseglistdict = segments.segmentlistdict() for key in bigbin.size.keys(): splitseglistdict[key] = segments.segmentlist([segments.segment(-segments.infinity(),splittime)]) # # create bins for the first and second halves # bin1 = LALCacheBin() bin1.size = bigbin.size & splitseglistdict bin1.extent = bigbin.extent & splitseglistdict.values()[0][0] bin2 = LALCacheBin() bin2.size = bigbin.size & ~splitseglistdict bin2.extent = bigbin.extent & (~splitseglistdict.values()[0])[0] # # remove unused keys from the smaller bins' segmentlistdicts # newsize = segments.segmentlistdict() for key in bin1.size.keys(): if len(bin1.size[key]): newsize[key] = bin1.size[key] bin1.size = newsize newsize = segments.segmentlistdict() for key in bin2.size.keys(): if len(bin2.size[key]): newsize[key] = bin2.size[key] bin2.size = newsize # # find which of the objects in bigbin.objects intersect the two # smaller bins # for cache in bigbin.objects: thisseglistdict = cache.to_segmentlistdict() coinc1 = 0 coinc2 = 0 for offset_vector in cafepacker.offset_vectors: # # loop over offset vectors updating the smaller # bins and the object we are checking # bin1.size.offsets.update(offset_vector) bin2.size.offsets.update(offset_vector) thisseglistdict.offsets.update(offset_vector) if not coinc1 and bin1.size.is_coincident(thisseglistdict, keys = offset_vector.keys()): # # object is coicident with bin1 # coinc1 = 1 bin1.objects.append(cache) if not coinc2 and bin2.size.is_coincident(thisseglistdict, keys = offset_vector.keys()): # # object is coincident with bin2 # coinc2 = 1 bin2.objects.append(cache) # # end loop if known to be coincident with both # bins # if coinc1 and coinc2: break # # clear offsets applied to object # thisseglistdict.offsets.clear() # # clear offsets applied to bins # bin1.size.offsets.clear() bin2.size.offsets.clear() # # append smaller bins to list of bins # cafepacker.bins.append(bin1) cafepacker.bins.append(bin2) # # do not increment idx as we popped the large bin out of # cafepacker.bins # # # sort the bins in cafepacker # cafepacker.bins.sort() return cafepacker | d665ff5b1086411540dd407fbadfcc4d28eb43e6 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/d665ff5b1086411540dd407fbadfcc4d28eb43e6/ligolw_cafe.py |
pattern = "%%s%%0%dd.cache" % int(log10(len(bins)) + 1) | pattern = "%%s%%0%dd.cache" % int(math.log10(len(bins)) + 1) | def write_caches(base, bins, instruments, verbose = False): filenames = [] if len(bins): pattern = "%%s%%0%dd.cache" % int(log10(len(bins)) + 1) for n, bin in enumerate(bins): filename = pattern % (base, n) filenames.append(filename) if verbose: print >>sys.stderr, "writing %s ..." % filename f = file(filename, "w") for cacheentry in bin.objects: if instruments & set(cacheentry.to_segmentlistdict().keys()): print >>f, str(cacheentry) return filenames | d665ff5b1086411540dd407fbadfcc4d28eb43e6 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/d665ff5b1086411540dd407fbadfcc4d28eb43e6/ligolw_cafe.py |
html_filename = prefix + opts.suffix +".html" | html_filename = prefix + opts.suffix +".html" | def write_html_output(opts, args, fnameList, tagLists, \ doThumb=True, mapList = [],\ comment=None, CoincSummTable=None,\ html_tag = '', add_box_flag=False): """ @param opts: The options from the calling code @param args: The args from the calling code @param fnameList: A list of the filenames @param tagLists: A list for the tags, getting added to the links @param doThumb: Uses the _thumb file as the sourcs for the images @param mapList: A list of dictionaries to create the image maps @html_tag: tag to add to html filename @add_box_flag: Adds _OPEN_BOX to the html file name if any of the files in filelist have "_OPEN_BOX" in their name. Otherwise, will add "_CLOSED_BOX" to the file name. These flags go between opts.prefix and opts.suffix """ prefix = opts.prefix # add the html_tag if desired if html_tag != '': prefix += '_' + html_tag # add the box-flag to the prefix if desired if add_box_flag: box_flag = '' if any(fname for fname in fnameList if 'OPEN_BOX' in fname): box_flag ='_OPEN_BOX' else: box_flag = '_CLOSED_BOX' # add the box flag to the prefix prefix += box_flag # -- the HTML document and output cache file # -- initialise the web page calling init_page page, extra = init_markup_page(opts) page.h1(opts.name + " results") page.p(prefix + opts.suffix) page.hr() # -- filename html_filename = prefix + opts.suffix +".html" if opts.output_path: html_filename = opts.output_path + html_filename html_file = file(html_filename, "w") # loop over the contents for tag,filename in zip(tagLists,fnameList): # set the correct name for linking (two '//' does not bother) fname = "Images/" + os.path.basename(filename) # set the thumbnail pictures if required if doThumb: fname_thumb = fname[:-4] + "_thumb.png" else: fname_thumb =fname # add the image to tge page page.a(extra.img(src=[fname_thumb], width=400, \ alt=tag, border="2"), title=tag, href=[ fname]) page.add("<hr/>") # add maps to this page if len(mapList)>0: m=0 for mapDict in mapList: m+=1 page.add( mapDict['text']+'<br>' ) page.add( '<IMG src="%s" width=800px '\ 'usemap="#map%d">' % ( mapDict['object'], m) ) page.add( '<MAP name="map%d"> <P>' % m ) n=0 for px, py, link in zip( mapDict['xCoords'], \ mapDict['yCoords'], \ mapDict['links']): n+=1 page.add( '<area href="%s" shape="circle" '\ 'coords="%d, %d, 5"> Point%d</a>' %\ ( link, px, py, n) ) page.add('</P></MAP></OBJECT><br>') page.add("<hr/>") if opts.enable_output: if comment is not None: page.add("<div> "+comment+"</div>") page.hr() if CoincSummTable is not None: page.add(CoincSummTable) page.hr() text = writeProcessParams( opts.name, opts.version, args) page.add(text) html_file.write(page(False)) html_file.close() return html_filename | fa6ee53541fd0c20802a47ce5fd5ed270fff5deb /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/fa6ee53541fd0c20802a47ce5fd5ed270fff5deb/InspiralUtils.py |
qscanList = stfu_pipe.getParamsFromCache(eval("opts.qscan_cache_" + type_string),type) | qscanList = getParamsFromCache(eval("opts.qscan_cache_" + type_string),type) | def getQscanTable(opts,type): summary = readSummaryFiles() if "FG" in type: type_string = "foreground" elif "BG" in type: type_string = "background" if eval("opts.qscan_cache_" + type_string): qscanList = stfu_pipe.getParamsFromCache(eval("opts.qscan_cache_" + type_string),type) else: try: inputPath = eval("opts." + type_string + "_input_path") qscanList = parseDirectoryList(inputPath) except: print >> sys.stderr, "cannot get input path for " + type_string print >> sys.stderr, "specify at least one of the following options:" print >> sys.stderr, "--qscan-cache, --" + type_string + "-input-path" sys.exit(1) table = summary.parseQscanList(qscanList) # perform a sanity check if not (len(table['channel_name']) == len(table['qscan_dir'])): print >> sys.stderr, "the length of channel_name does not match the length of qscan_dir in the " + type_string + " table" print >> sys.stderr, "check for data corruption in the qscan summary files in the " + type_string + " table" sys.exit(1) return table | 42c544601128c09e8288999f42f6491129a468cb /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/42c544601128c09e8288999f42f6491129a468cb/analyseQscan.py |
candidates_path = stfu_pipe.getParamsFromCache(opts.qscan_cache_foreground,type,ifo,time_string) | candidates_path = getParamsFromCache(opts.qscan_cache_foreground,type,ifo,time_string) | def plotHistogram(chan,opts,distribution,histoList,binList,percentiles=None,candidate=None,candidateRank=None): parameter = distribution.split('-')[0] step = binList[1] - binList[0] counter = sum(histoList) xlimInf = min(binList) if candidate and not parameter == 'dt': xlimSup = max(max(binList),candidate + 2.0) else: xlimSup = max(binList) pylab.figure() # semilogy(bins + step/2., z_dist+0.0001, 'r^',markerfacecolor="b",markersize=12) # plot(bins + step/2., z_dist) pylab.bar(binList[0:len(binList)-1], histoList, width=step, bottom=0) if percentiles: line1 = pylab.axvline(x=percentiles[0], ymin=0, ymax=max(histoList), color='g', label='50th percentile', linewidth=2, linestyle='--') line2 = pylab.axvline(x=percentiles[1], ymin=0, ymax=max(histoList), color='m', label='95th percentile', linewidth=2, linestyle='--') line3 = pylab.axvline(x=percentiles[2], ymin=0, ymax=max(histoList), color='r', label='99th percentile', linewidth=2, linestyle='--') if parameter == 'dt': pylab.axvline(x=-percentiles[0], ymin=0, ymax=max(histoList), color='g', label='50th percentile', linewidth=2, linestyle='--') pylab.axvline(x=-percentiles[1], ymin=0, ymax=max(histoList), color='m', label='95th percentile', linewidth=2, linestyle='--') pylab.axvline(x=-percentiles[2], ymin=0, ymax=max(histoList), color='r', label='99th percentile', linewidth=2, linestyle='--') if candidate: line0 = pylab.axvline(x=candidate, ymin=0, ymax=max(histoList), color='k', label='candidate value (%s percentile)' % (candidateRank), linewidth=2, linestyle='-') if percentiles and candidate: pylab.legend((line0,line1,line2,line3),('candidate','50%','95%','99%'),loc = 'upper right') if percentiles and not candidate: pylab.legend((line1,line2,line3),('50%','95%','99%'),loc = 'upper right') pylab.xlim(xlimInf,xlimSup) pylab.xlabel(parameter + ' value',size='large') # ylabel(r'#',size='x-large') pylab.grid() pylab.title("Histogram of the " + parameter + " value for " + chan + ', Statistics = ' + str(counter)) figText = chan.split(':')[0] + '_' + chan.split(':')[1] + '_' + parameter + '_dist' figFileName = InspiralUtils.set_figure_name(opts,figText) InspiralUtils.savefig_pylal(figFileName) pylab.close() return figFileName | 42c544601128c09e8288999f42f6491129a468cb /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/42c544601128c09e8288999f42f6491129a468cb/analyseQscan.py |
filename = "%s-STRING_LIKELIHOOD_%s-%d-%d.xml.gz" % (cache_entry.observatory, cache_entry.description, int(cache_entry.segment[0]), int(abs(cache_entry.segment))) | filename = os.path.join(self.output_dir, "%s-STRING_LIKELIHOOD_%s-%d-%d.xml.gz" % (cache_entry.observatory, cache_entry.description, int(cache_entry.segment[0]), int(abs(cache_entry.segment)))) | def set_output(self, description): if self.output_cache: raise AttributeError, "cannot change attributes after computing output cache" cache_entry = power.make_cache_entry(self.input_cache, description, "") filename = "%s-STRING_LIKELIHOOD_%s-%d-%d.xml.gz" % (cache_entry.observatory, cache_entry.description, int(cache_entry.segment[0]), int(abs(cache_entry.segment))) self.add_var_opt("output", filename) cache_entry.url = "file://localhost" + os.path.abspath(filename) del self.output_cache[:] self.output_cache.append(cache_entry) return filename | 7a4f024f8dc816ab58a8babf63ba6bf956b5262e /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/7a4f024f8dc816ab58a8babf63ba6bf956b5262e/cosmicstring.py |
self.triggers_dir = power.get_triggers_dir(config_parser) | self.output_dir = power.get_triggers_dir(config_parser) | def __init__(self,config_parser): """ config_parser = ConfigParser object from which options are read. """ pipeline.CondorDAGJob.__init__(self, power.get_universe(config_parser), power.get_executable(config_parser, "lalapps_StringSearch")) pipeline.AnalysisJob.__init__(self, config_parser) self.add_ini_opts(config_parser, "lalapps_StringSearch") self.set_stdout_file(os.path.join(power.get_out_dir(config_parser), "lalapps_StringSearch-$(cluster)-$(process).out")) self.set_stderr_file(os.path.join(power.get_out_dir(config_parser), "lalapps_StringSearch-$(cluster)-$(process).err")) self.set_sub_file("lalapps_StringSearch.sub") self.add_condor_cmd("Requirements", "Memory > 1100") | 7a4f024f8dc816ab58a8babf63ba6bf956b5262e /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/7a4f024f8dc816ab58a8babf63ba6bf956b5262e/cosmicstring.py |
self.triggers_dir = self.job().triggers_dir | self.output_dir = os.path.join(os.getcwd(), self.job().output_dir) | def __init__(self,job): """ job = A CondorDAGJob that can run an instance of lalapps_StringSearch. """ pipeline.CondorDAGNode.__init__(self,job) pipeline.AnalysisNode.__init__(self) self.__usertag = job.get_config('pipeline','user_tag') self.output_cache = [] self.triggers_dir = self.job().triggers_dir | 7a4f024f8dc816ab58a8babf63ba6bf956b5262e /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/7a4f024f8dc816ab58a8babf63ba6bf956b5262e/cosmicstring.py |
self.set_output(os.path.join(self.triggers_dir, "%s-STRINGSEARCH_%s-%d-%d.xml.gz" % (self.get_ifo(), self.__usertag, int(self.get_start()), int(self.get_end()) - int(self.get_start())))) | self.set_output(os.path.join(self.output_dir, "%s-STRINGSEARCH_%s-%d-%d.xml.gz" % (self.get_ifo(), self.__usertag, int(self.get_start()), int(self.get_end()) - int(self.get_start())))) | def get_output(self): """ Returns the file name of output from the ring code. This must be kept synchronized with the name of the output file in ring.c. """ if self._AnalysisNode__output is None: if None in (self.get_start(), self.get_end(), self.get_ifo(), self.__usertag): raise ValueError, "start time, end time, ifo, or user tag has not been set" seg = segments.segment(LIGOTimeGPS(self.get_start()), LIGOTimeGPS(self.get_end())) self.set_output(os.path.join(self.triggers_dir, "%s-STRINGSEARCH_%s-%d-%d.xml.gz" % (self.get_ifo(), self.__usertag, int(self.get_start()), int(self.get_end()) - int(self.get_start())))) | 7a4f024f8dc816ab58a8babf63ba6bf956b5262e /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/7a4f024f8dc816ab58a8babf63ba6bf956b5262e/cosmicstring.py |
self.coincs = [tuple(event.event_id for event in sorted(double, lambda a, b: cmp(a.ifo, b.ifo))) for double in CoincidentNTuples(eventlists, event_comparefunc, offset_instruments, thresholds, verbose = verbose)] self.coincs.sort() | self.coincs = sorted(tuple(event.event_id for event in sorted(double, lambda a, b: cmp(a.ifo, b.ifo))) for double in CoincidentNTuples(eventlists, event_comparefunc, offset_instruments, thresholds, verbose = verbose)) | def get_coincs(self, eventlists, event_comparefunc, thresholds, verbose = False): # # has this node already been visited? if so, return the # answer we already know # | da3b5dcf0ac7a0a533ee6fcab63714a7944dfe42 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/da3b5dcf0ac7a0a533ee6fcab63714a7944dfe42/snglcoinc.py |
self.unused_coincs = reduce(lambda a, b: a & b, (component.unused_coincs for component in self.components)) | reduce(lambda a, b: a | b, (set(component.coincs) for component in self.components)) | self.unused_coincs = reduce(lambda a, b: a | b, (set(component.get_coincs(eventlists, event_comparefunc, thresholds, verbose = verbose)) for component in self.components)) self.unused_coincs |= reduce(lambda a, b: a & b, (component.unused_coincs for component in self.components)) | def get_coincs(self, eventlists, event_comparefunc, thresholds, verbose = False): # # has this node already been visited? if so, return the # answer we already know # | da3b5dcf0ac7a0a533ee6fcab63714a7944dfe42 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/da3b5dcf0ac7a0a533ee6fcab63714a7944dfe42/snglcoinc.py |
raise GitInvocationError, 'failed to run "%s"' % command | raise GitInvocationError, 'failed to run "%s"' % " ".join(command) | def check_call_out(command): """ Run the given command (with shell=False) and return the output as a string. Strip the output of enclosing whitespace. If the return code is non-zero, throw GitInvocationError. """ # start external command process p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) # get outputs out, _ = p.communicate() # throw exception if process failed if p.returncode != 0: raise GitInvocationError, 'failed to run "%s"' % command return out.strip() | a2bba870c57a91341f69046ff001819aca781dd8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/a2bba870c57a91341f69046ff001819aca781dd8/determine_git_version.py |
raise ValueError r'param-name cannot have "\n","\t", "DROP", or "DELETE" in it' | raise ValueError, r'param-name cannot have "\n","\t", "DROP", or "DELETE" in it' | def __init__( self, table_name, table_param, param_ranges_opt, verbose = False ): """ Parse --param-ranges option. Creates self.param which is the table_name and the table_param appended together (with a '.') and self.param_ranges, which is a list of tuples that give the lower parameter value, whether it is an open or closed boundary, and the same for the upper parameter. For example, if table_name is coinc_inspiral, table_param is mchirp and param_ranges_opt is '[2,8);[8,17]' will get: self.param = 'coinc_inspiral.mchirp' self.param_ranges = [ ( ('>=',2.0), ('<',8.0) ), ( ('>=',8.0), ('<=', 17.0) ) ] | db054ba913f1595e68475e1eccc579881eedb7e8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/db054ba913f1595e68475e1eccc579881eedb7e8/ligolw_sqlutils.py |
hwInjNode.set_output_file(os.path.join(hw_inj_dir, outfilename)) | hwInjNode.set_output_file(outfilename) | def hwinj_page_setup(cp,ifos,veto_categories,hw_inj_dir): """ run ligolw_cbc_hardware injection page, soring the input and output in the subdirectory hardware_injection_summary """ hwInjNodes = [] hwinj_length = cp.getint("input","gps-end-time") - cp.getint("input","gps-start-time") hwInjJob = inspiral.HWinjPageJob(cp) veto_categories.append(None) for veto in veto_categories: if cp.get("pipeline","user-tag"): usertag = cp.get("pipeline", "user-tag") + "_" + "FULL_DATA" else: usertag = "FULL_DATA" if veto: usertag += "_CAT_" + str(veto) + "_VETO" cacheFile = hipe_cache( ifos, usertag, cp.getint("input", "gps-start-time"), cp.getint("input", "gps-end-time") ) if not os.path.isfile(os.path.join("full_data", cacheFile)): print>>sys.stderr, "WARNING: Cache file FULL_DATA/" + cacheFile print>>sys.stderr, "does not exist! This might cause later failures." outfilename = os.path.join(hw_inj_dir, ''.join(ifos) + '-HWINJ_SUMMARY') if veto: outfilename += '_CAT_' + str(veto) outfilename += '-' + cp.get("input","gps-start-time") + '-' + str(hwinj_length) + '.html' hwInjNode = inspiral.HWinjPageNode(hwInjJob) hwInjNode.set_start(cp.get("input","gps-start-time")) hwInjNode.set_end(cp.get("input","gps-end-time")) hwInjNode.set_input_cache(os.path.join('full_data', cacheFile)) hwInjNode.set_cache_string('*COIRE_SECOND*') hwInjNode.set_source_xml(os.path.join(hw_inj_dir,cp.get("hardware-injections", "hwinj-def-file"))) hwInjNode.set_segment_dir(hw_inj_dir) hwInjNode.set_output_file(os.path.join(hw_inj_dir, outfilename)) hwInjNode.add_var_opt('analyze-injections','') for ifo in ifos: hwInjNode.add_var_opt(ifo.lower()+'-injections','') hwInjNodes.append(hwInjNode) return hwInjNodes | 7c958f7f3caa1fa4c2fb0a978e2c88fd0201960c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/7c958f7f3caa1fa4c2fb0a978e2c88fd0201960c/inspiralutils.py |
def get_lock(lockfile): """ Tries to write a lockfile containing the current pid. Excepts if the lockfile already contains the pid of a running process. Although this should prevent a lock from being granted twice, it can theoretically deny a lock unjustly in the unlikely event that the original process is gone but another unrelated process has been assigned the same pid by the OS. """ pidfile = open(lockfile, "a+") # here we do some meta-locking by getting an exclusive lock on the # pidfile before reading it, to prevent two daemons from seeing a # stale lock at the same time, and both trying to run try: fcntl.flock(pidfile.fileno(), fcntl.LOCK_EX|fcntl.LOCK_NB) except IOError,e: raise RuntimeError, "failed to lock %s: %s" % (lockfile, e) # we got the file lock, so check the pid therein pidfile.seek(0) pidfile_pid = pidfile.readline().strip() if pidfile_pid.isdigit() and glue.utils.pid_exists(int(pidfile_pid)): raise RuntimeError, ("pidfile %s contains pid (%s) of a running " "process" % (lockfile, pidfile_pid)) else: print ("pidfile %s contains stale pid %s; writing new lock" % (lockfile, pidfile_pid)) # the pidfile didn't exist or was stale, so grab a new lock pidfile.truncate(0) pidfile.write("%d\n" % os.getpid()) pidfile.close() # should be entirely unecessary, but paranoia always served me well confirm_lock(lockfile) return True | 92643f48d0ca124ff21dcb6607a2650036792d4e /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/92643f48d0ca124ff21dcb6607a2650036792d4e/pidfile.py |
||
if pidfile_pid.isdigit() and glue.utils.pid_exists(int(pidfile_pid)): raise RuntimeError, ("pidfile %s contains pid (%s) of a running " "process" % (lockfile, pidfile_pid)) else: print ("pidfile %s contains stale pid %s; writing new lock" % (lockfile, pidfile_pid)) | if pidfile_pid.isdigit(): if glue.utils.pid_exists(int(pidfile_pid)): raise RuntimeError, ("pidfile %s contains pid (%s) of a running " "process" % (lockfile, pidfile_pid)) else: print ("pidfile %s contains stale pid %s; writing new lock" % (lockfile, pidfile_pid)) | def get_lock(lockfile): """ Tries to write a lockfile containing the current pid. Excepts if the lockfile already contains the pid of a running process. Although this should prevent a lock from being granted twice, it can theoretically deny a lock unjustly in the unlikely event that the original process is gone but another unrelated process has been assigned the same pid by the OS. """ pidfile = open(lockfile, "a+") # here we do some meta-locking by getting an exclusive lock on the # pidfile before reading it, to prevent two daemons from seeing a # stale lock at the same time, and both trying to run try: fcntl.flock(pidfile.fileno(), fcntl.LOCK_EX|fcntl.LOCK_NB) except IOError,e: raise RuntimeError, "failed to lock %s: %s" % (lockfile, e) # we got the file lock, so check the pid therein pidfile.seek(0) pidfile_pid = pidfile.readline().strip() if pidfile_pid.isdigit() and glue.utils.pid_exists(int(pidfile_pid)): raise RuntimeError, ("pidfile %s contains pid (%s) of a running " "process" % (lockfile, pidfile_pid)) else: print ("pidfile %s contains stale pid %s; writing new lock" % (lockfile, pidfile_pid)) # the pidfile didn't exist or was stale, so grab a new lock pidfile.truncate(0) pidfile.write("%d\n" % os.getpid()) pidfile.close() # should be entirely unecessary, but paranoia always served me well confirm_lock(lockfile) return True | 92643f48d0ca124ff21dcb6607a2650036792d4e /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/92643f48d0ca124ff21dcb6607a2650036792d4e/pidfile.py |
os.path.join("bin", "search_upper_limit_by_s1_s2"), | os.path.join("bin", "search_upper_limit_by_s1z_s2z"), os.path.join("bin", "search_volume_by_s1z_s2z"), | def run(self): # remove the automatically generated user env scripts for script in ["pylal-user-env.sh", "pylal-user-env.csh"]: log.info("removing " + script ) try: os.unlink(os.path.join("etc", script)) except: pass | e4ec28f741e70913dea624c706c78235785d2e96 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/e4ec28f741e70913dea624c706c78235785d2e96/setup.py |
plot(getinjpar(injection,0),getinjpar(injection,1),'go',scalex=False,scaley=False) | plot([getinjpar(injection,0)],[getinjpar(injection,1)],'go',scalex=False,scaley=False) | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=linspace(min(xdat),max(xdat),Nx) yax=linspace(min(ydat),max(ydat),Ny) x,y=numpy.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = numpy.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() | 0831130907772a52407e021196488edd437a353b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/0831130907772a52407e021196488edd437a353b/OddsPostProc.py |
plot(getinjpar(injection,5),getinjpar(injection,6),'go',scalex=False,scaley=False) | print 'getinjpar(5),getinjpar(6) = %f,%f\n'%(getinjpar(injection,5),getinjpar(injection,6)) plot([getinjpar(injection,5)],[getinjpar(injection,6)],'go',scalex=False,scaley=False) | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=linspace(min(xdat),max(xdat),Nx) yax=linspace(min(ydat),max(ydat),Ny) x,y=numpy.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = numpy.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() | 0831130907772a52407e021196488edd437a353b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/0831130907772a52407e021196488edd437a353b/OddsPostProc.py |
if injection and getinjpar(injection,7)<max(pos[:,7]) and getinjpar(injection,7)>min(pos[:,7]) and getinjpar(injection,8)<max(pos[:,8]) and getinjpar(injection,8)>min(pos[:,8]): plot(getinjpar(injection,7),getinjpar(injection,8),'go',scalex=False,scaley=False) | if injection and getinjpar(injection,7)<max(pos[:,7]) and getinjpar(injection,7)>min(pos[:,7]) and getinjpar(injection,8)<max(pos[:,8]) and getinjpar(injection,8)>min(pos[:,8]): plot([getinjpar(injection,7)],[getinjpar(injection,8)],'go',scalex=False,scaley=False) | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=linspace(min(xdat),max(xdat),Nx) yax=linspace(min(ydat),max(ydat),Ny) x,y=numpy.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = numpy.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() | 0831130907772a52407e021196488edd437a353b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/0831130907772a52407e021196488edd437a353b/OddsPostProc.py |
plot(injection.mass1,injection.mass2,'go',scalex=False,scaley=False) | plot([injection.mass1],[injection.mass2],'go',scalex=False,scaley=False) | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=linspace(min(xdat),max(xdat),Nx) yax=linspace(min(ydat),max(ydat),Ny) x,y=numpy.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = numpy.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() | 0831130907772a52407e021196488edd437a353b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/0831130907772a52407e021196488edd437a353b/OddsPostProc.py |
plot(injection.mass1,injection.distance,'go',scalex=False,scaley=False) | plot([injection.mass1],[injection.distance],'go',scalex=False,scaley=False) | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=linspace(min(xdat),max(xdat),Nx) yax=linspace(min(ydat),max(ydat),Ny) x,y=numpy.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = numpy.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() | 0831130907772a52407e021196488edd437a353b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/0831130907772a52407e021196488edd437a353b/OddsPostProc.py |
plot(getinjpar(injection,4),getinjpar(injection,8),'go',scalex=False,scaley=False) | plot([getinjpar(injection,4)],[getinjpar(injection,8)],'go',scalex=False,scaley=False) | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=linspace(min(xdat),max(xdat),Nx) yax=linspace(min(ydat),max(ydat),Ny) x,y=numpy.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = numpy.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() | 0831130907772a52407e021196488edd437a353b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/0831130907772a52407e021196488edd437a353b/OddsPostProc.py |
plot(getinjpar(injection,i),getinjpar(injection,j),'go',scalex=False,scaley=False) | plot([getinjpar(injection,i)],[getinjpar(injection,j)],'go',scalex=False,scaley=False) | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=linspace(min(xdat),max(xdat),Nx) yax=linspace(min(ydat),max(ydat),Ny) x,y=numpy.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = numpy.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() | 0831130907772a52407e021196488edd437a353b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/0831130907772a52407e021196488edd437a353b/OddsPostProc.py |
htmlfile.write('<td width=30%><img width=100% src="skymap.png"></td>') htmlfile.write('<td width=30%><img width=100% src="m2dist.png"></td>') | if opts.skyres is not None: htmlfile.write('<td width=30%><img width=100% src="skymap.png"></td>') else: htmlfile.write('<td width=30%><img width=100% src="psiiota.png"></td>') htmlfile.write('<td width=30%><img width=100% src="Diota.png"></td>') | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=linspace(min(xdat),max(xdat),Nx) yax=linspace(min(ydat),max(ydat),Ny) x,y=numpy.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = numpy.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() | 0831130907772a52407e021196488edd437a353b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/0831130907772a52407e021196488edd437a353b/OddsPostProc.py |
gkde=stats.gaussian_kde(pos[:,i]) ind=linspace(min(pos[:,i]),max(pos[:,i]),101) kdepdf=gkde.evaluate(ind) plot(ind,kdepdf,label='density estimate') | if size(unique(pos[:,i]))>1: gkde=stats.gaussian_kde(pos[:,i]) ind=linspace(min(pos[:,i]),max(pos[:,i]),101) kdepdf=gkde.evaluate(ind) plot(ind,kdepdf,label='density estimate') | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=linspace(min(xdat),max(xdat),Nx) yax=linspace(min(ydat),max(ydat),Ny) x,y=numpy.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = numpy.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() | 0831130907772a52407e021196488edd437a353b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/0831130907772a52407e021196488edd437a353b/OddsPostProc.py |
shiftString=shift | shiftString=shiftLabel[shift] | def getFOMLinks(gpsTime=int(0),ifo=("default")): """ Simple method returns a list of links to FOMs ordered by FOM # The list is 2D ie: [['ifo,shift',LINKtoImage,LinktoThumb],['ifo,shift',LinktoImage,LinkToThumb]...] images marked [Eve,Owl,Day] via [p3,p2,p1] in filenames this methd only for S6 and later IFO naming start dates: There were three naming conventions mixed, then p1,p2,p3 and lastly Day,Eve,Owl LHO: 20090724 :: 932428815 LLO: 20090708 :: 931046415 """ urls={ "DEFAULT":"http://www.ligo.caltech.edu/~pshawhan/scilinks.html", "V1":"http://wwwcascina.virgo.infn.it/DetectorOperations/index.htm", "L1":"https://llocds.ligo-la.caltech.edu/scirun/S6/robofom/%s/%s%s_FOM%i%s.gif", "H1":"http://lhocds.ligo-wa.caltech.edu/scirun/S6/robofom/%s/%s%s_FOM%i%s.gif", "H2":"http://lhocds.ligo-wa.caltech.edu/scirun/S6/robofom/%s/%s%s_FOM%i%s.gif" } ifoTag=ifo.upper() shiftDuration=8; #Give the IFO and shift start hour as integer shiftStandardTime={'L1':{'day':14,'eve':22,'owl':6}, 'H1':{'day':16,'eve':0,'owl':8}, 'H2':{'day':16,'eve':0,'owl':8}, 'V1':{'day':6,'eve':14,'owl':22}} shiftOrder=['day','eve','owl'] shiftLabel={'day':'p1','eve':'p3','owl':'p2'} outputURLs=list() if ((ifo==None) or (gpsTime==None)): sys.stdout.write("getFOMLinks called incorrectly \ | ec919a691dca5cdb69170a2ea0a8c295483bfc2f /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/ec919a691dca5cdb69170a2ea0a8c295483bfc2f/stfu_pipe.py |
def update_ids(xmldoc, connection, verbose = False): """ For internal use only. """ table_elems = xmldoc.getElementsByTagName(ligolw.Table.tagName) for i, tbl in enumerate(table_elems): if verbose: print >>sys.stderr, "updating IDs: %d%%\r" % (100 * i / len(table_elems)), tbl.applyKeyMapping() if verbose: print >>sys.stderr, "updating IDs: 100%" # reset ID mapping for next document dbtables.idmap_reset(connection) | acb14f4da9f7d6adcfd5633d711e4650897950a7 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/acb14f4da9f7d6adcfd5633d711e4650897950a7/ligolw_sqlite.py |
||
self.set_stderr_file('logs/' + exec_namee + '-$(cluster)-$(process).err') | self.set_stderr_file('logs/' + exec_name + '-$(cluster)-$(process).err') | def __init__(self, cp, dax = False): """ cp: ConfigParser object from which options are read. """ exec_name = "mvsc_get_doubles" universe = "vanilla" executable = cp.get('condor',exec_name) pipeline.CondorDAGJob.__init__(self, universe, executable) pipeline.AnalysisJob.__init__(self, cp, dax) self.add_condor_cmd('getenv','True') self.add_condor_cmd('environment',"KMP_LIBRARY=serial;MKL_SERIAL=yes") self.set_stdout_file('logs/' + exec_name + '-$(cluster)-$(process).out') self.set_stderr_file('logs/' + exec_namee + '-$(cluster)-$(process).err') self.set_sub_file(exec_name + '.sub') | 05168fcc33be29301cc0a9702cd5e8c93d815a6e /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/05168fcc33be29301cc0a9702cd5e8c93d815a6e/inspiral.py |
for i in range(number): | for i in range(self.number): | def finalize(self): """ finalize the mvsc_get_doubles node """ self.add_var_opt("instruments", self.instruments) self.add_var_opt("trainingstr", self.trainingstr) self.add_var_opt("testingstr", self.testingstr) self.add_var_opt("zerolagstr", self.zerolagstr) for database in self.databases: self.add_file_arg(database) ifos = self.instruments.strip().split(',') ifos.sort() self.out_file_group = {} for i in range(number): trainingname = ''.join(ifos) + '_set' + str(i) + '_' + str(self.trainingstr) + '.pat' testingname = ''.join(ifos) + '_set' + str(i) + '_' + str(self.testingstr) + '.pat' infoname = ''.join(ifos) + '_set' + str(i) + '_' + str(self.testingstr) + '_info.pat' self.out_file_group[i] = ((trainingname), (testingname)) self.add_output_file(trainingname) self.add_output_file(testingname) self.add_output_file(infoname) self.zerolag_file = [''.join(ifos) + '_' + str(self.zerolagstr) + '.pat'] self.add_output_file(''.join(ifos) + '_' + str(self.zerolagstr) + '.pat') self.add_output_file(''.join(ifos) + '_' + str(self.zerolagstr) + '_info.pat') | 05168fcc33be29301cc0a9702cd5e8c93d815a6e /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/05168fcc33be29301cc0a9702cd5e8c93d815a6e/inspiral.py |
self.set_stderr_file('logs/' + exec_namee + '-$(cluster)-$(process).err') | self.set_stderr_file('logs/' + exec_name + '-$(cluster)-$(process).err') | def __init__(self, cp, dax = False): """ cp: ConfigParser object from which options are read. """ exec_name = "mvsc_train_forest" universe = "vanilla" executable = cp.get('condor',exec_name) pipeline.CondorDAGJob.__init__(self, universe, executable) pipeline.AnalysisJob.__init__(self, cp, dax) self.add_condor_cmd('getenv','True') self.add_condor_cmd('environment',"KMP_LIBRARY=serial;MKL_SERIAL=yes") self.set_stdout_file('logs/' + exec_name + '-$(cluster)-$(process).out') self.set_stderr_file('logs/' + exec_namee + '-$(cluster)-$(process).err') self.set_sub_file(exec_name + '.sub') | 05168fcc33be29301cc0a9702cd5e8c93d815a6e /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/05168fcc33be29301cc0a9702cd5e8c93d815a6e/inspiral.py |
""" trainingfile: take a single file to train with """ self.trainingfile = trainingfile self.add_input_file(self.trainingfile) | """ trainingfile: take a single file to train with """ self.trainingfile = trainingfile self.add_input_file(self.trainingfile) | def add_training_file(self, trainingfile): | 05168fcc33be29301cc0a9702cd5e8c93d815a6e /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/05168fcc33be29301cc0a9702cd5e8c93d815a6e/inspiral.py |
def __init__(self, cp, dax = False): | def __init__(self, cp, dax = False): | def __init__(self, cp, dax = False): | 05168fcc33be29301cc0a9702cd5e8c93d815a6e /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/05168fcc33be29301cc0a9702cd5e8c93d815a6e/inspiral.py |
self.set_stderr_file('logs/' + exec_namee + '-$(cluster)-$(process).err') | self.set_stderr_file('logs/' + exec_name + '-$(cluster)-$(process).err') | def __init__(self, cp, dax = False): | 05168fcc33be29301cc0a9702cd5e8c93d815a6e /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/05168fcc33be29301cc0a9702cd5e8c93d815a6e/inspiral.py |
self.add_file_arg("-A -a 1 %s %s %s" % (self.trainedforest, self.file_to_rank, self.ranked_file)) | self.add_file_arg("-A -a 4 %s %s %s" % (self.trainedforest, self.file_to_rank, self.ranked_file)) | def finalize(self): """ finalize the MvscUseForestNode """ self.ranked_file = self.file_to_rank.replace('.pat','.dat') self.add_file_arg("-A -a 1 %s %s %s" % (self.trainedforest, self.file_to_rank, self.ranked_file)) self.add_output_file(self.ranked_file) | 05168fcc33be29301cc0a9702cd5e8c93d815a6e /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/05168fcc33be29301cc0a9702cd5e8c93d815a6e/inspiral.py |
class MvscUpdateSqlJob(pipeline.CondorDAGJob): | class MvscUpdateSqlJob(pipeline.AnalysisJob, pipeline.CondorDAGJob): | def finalize(self): """ finalize the MvscUseForestNode """ self.ranked_file = self.file_to_rank.replace('.pat','.dat') self.add_file_arg("-A -a 1 %s %s %s" % (self.trainedforest, self.file_to_rank, self.ranked_file)) self.add_output_file(self.ranked_file) | 05168fcc33be29301cc0a9702cd5e8c93d815a6e /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/05168fcc33be29301cc0a9702cd5e8c93d815a6e/inspiral.py |
self.set_stderr_file('logs/' + exec_namee + '-$(cluster)-$(process).err') | self.set_stderr_file('logs/' + exec_name + '-$(cluster)-$(process).err') | def __init__(self, cp, dax = False): """ cp: ConfigParser object from which options are read. """ exec_name = "mvsc_update_sql" universe = "vanilla" executable = cp.get('condor',exec_name) pipeline.CondorDAGJob.__init__(self, universe, executable) pipeline.AnalysisJob.__init__(self, cp, dax) self.add_condor_cmd('getenv','True') self.add_condor_cmd('environment',"KMP_LIBRARY=serial;MKL_SERIAL=yes") self.set_stdout_file('logs/' + exec_name + '-$(cluster)-$(process).out') self.set_stderr_file('logs/' + exec_namee + '-$(cluster)-$(process).err') self.set_sub_file(exec_name + '.sub') | 05168fcc33be29301cc0a9702cd5e8c93d815a6e /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/05168fcc33be29301cc0a9702cd5e8c93d815a6e/inspiral.py |
class MvscUpdateSqlNode(pipeline.CondorDAGNode): | class MvscUpdateSqlNode(pipeline.AnalysisNode, pipeline.CondorDAGNode): | def __init__(self, cp, dax = False): """ cp: ConfigParser object from which options are read. """ exec_name = "mvsc_update_sql" universe = "vanilla" executable = cp.get('condor',exec_name) pipeline.CondorDAGJob.__init__(self, universe, executable) pipeline.AnalysisJob.__init__(self, cp, dax) self.add_condor_cmd('getenv','True') self.add_condor_cmd('environment',"KMP_LIBRARY=serial;MKL_SERIAL=yes") self.set_stdout_file('logs/' + exec_name + '-$(cluster)-$(process).out') self.set_stderr_file('logs/' + exec_namee + '-$(cluster)-$(process).err') self.set_sub_file(exec_name + '.sub') | 05168fcc33be29301cc0a9702cd5e8c93d815a6e /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/05168fcc33be29301cc0a9702cd5e8c93d815a6e/inspiral.py |
def get_delta_t_rss(latitude,longitude,coinc,reference_frequency=None): """ returns the rss timing error for a particular location in the sky (longitude,latitude) """ earth_center = (0.0,0.0,0.0) tref = {} tgeo={} for ifo in coinc.ifo_list: if reference_frequency: tFromRefFreq = get_signal_duration(ifo,coinc,reference_frequency) tref[ifo] = LIGOTimeGPS(int(tFromRefFreq), 1.e9*(tFromRefFreq-int(tFromRefFreq))) else: tref[ifo] = 0.0 #compute the geocentric time from each trigger tgeo[ifo] = coinc_dat.gps[ifo] - tref[ifo] - \ LIGOTimeGPS(0,1.0e9*date.XLALArrivalTimeDiff(detector_locations[ifo],\ earth_center,longitude,latitude,coinc.gps[ifo])) #compute differences in these geocentric times time={} delta_t_rms = 0.0 for ifos in coinc.ifo_coincs: time[ifos[0]+ifos[1]] = 1.0e-9*date.XLALGPSToINT8NS(tgeo[ifos[0]] - tgeo[ifos[1]]) delta_t_rms += time[ifos[0]+ifos[1]] * time[ifos[0]+ifos[1]] return sqrt(delta_t_rms) | 82d9b847712430a808c0151d33158a9f2c80ba8b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/82d9b847712430a808c0151d33158a9f2c80ba8b/skylocutils.py |
||
if (cpt[0]-fpt[0])*(cpt[0]-fpt[0]) <= coarseres/2 and \ (cpt[1]-fpt[1])*(cpt[1]-fpt[1])*abs(sin(fpt[1])) <= coarseres/2: | if (cpt[0]-fpt[0])*(cpt[0]-fpt[0]) <= ds*ds/4 and \ (cpt[1]-fpt[1])*(cpt[1]-fpt[1])*abs(sin(fpt[1]))*abs(sin(fpt[1])) \ <= ds*ds/4: | def map_grids(coarsegrid,finegrid,coarseres=4.0): """ takes the two grids (lists of lat/lon tuples) and returns a dictionary where the points in the coarse grid are the keys and lists of tuples of points in the fine grid are the values """ fgtemp = finegrid coarsedict = {} for cpt in coarsegrid: flist = [] for fpt in fgtemp: if (cpt[0]-fpt[0])*(cpt[0]-fpt[0]) <= coarseres/2 and \ (cpt[1]-fpt[1])*(cpt[1]-fpt[1])*abs(sin(fpt[1])) <= coarseres/2: flist.append(fpt) coarsedict[cpt] = flist for rpt in flist: fgtemp.remove(pt) return coarsedict | 82d9b847712430a808c0151d33158a9f2c80ba8b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/82d9b847712430a808c0151d33158a9f2c80ba8b/skylocutils.py |
fgtemp.remove(pt) return coarsedict | fgtemp.remove(rpt) return coarsedict, fgtemp | def map_grids(coarsegrid,finegrid,coarseres=4.0): """ takes the two grids (lists of lat/lon tuples) and returns a dictionary where the points in the coarse grid are the keys and lists of tuples of points in the fine grid are the values """ fgtemp = finegrid coarsedict = {} for cpt in coarsegrid: flist = [] for fpt in fgtemp: if (cpt[0]-fpt[0])*(cpt[0]-fpt[0]) <= coarseres/2 and \ (cpt[1]-fpt[1])*(cpt[1]-fpt[1])*abs(sin(fpt[1])) <= coarseres/2: flist.append(fpt) coarsedict[cpt] = flist for rpt in flist: fgtemp.remove(pt) return coarsedict | 82d9b847712430a808c0151d33158a9f2c80ba8b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/82d9b847712430a808c0151d33158a9f2c80ba8b/skylocutils.py |
pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable) self.setupJob(name=self.name,dir=dir,cp=cp,tag_base=tag_base) | self.setupJob(name=self.name,dir=dir,cp=cp,tag_base=tag_base) | def __init__(self,opts,cp,dir='',tag_base=''): """ """ self.__executable = string.strip(cp.get('fu-condor','plotmcmc')) self.name = os.path.split(self.__executable.rstrip('/'))[1] self.__universe = "vanilla" | 3b80bb445789f475126530607b67bd534325dde5 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/3b80bb445789f475126530607b67bd534325dde5/stfu_pipe.py |
if '||' in line: tab.append(line.split('||')) | if '||' in line: tab.append(line.split('||')[1:]) | def wiki_table_parse(file): #FIXME assumes table files of the form # === title === # ||data||data|| # ||data||data|| tabs = [] titles = [] tab = [] for line in open(file).readlines(): if '===' in line: titles.append(line.replace("=","")) if tab: tabs.append(tab) tab = [] if '||' in line: tab.append(line.split('||')) tabs.append(tab) return tabs, titles | e0cf194d65477330c5eb4cd1396384f15444095e /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/e0cf194d65477330c5eb4cd1396384f15444095e/cbcwebpage.py |
self.tr | self.add('<tr>') | def __init__(self, two_d_data, title="", caption="", tag="table", num="1"): markup.page.__init__(self, mode="strict_html") self.add("<br>") if title: self.b("%s. %s" %(num, title.upper()) ) self.table() for row in two_d_data: self.tr tdstr = "" for col in row: tdstr += "<td>%s</td>" % (str(col),) self.add(tdstr) self.tr.close() self.table.close() if self.caption: self.i("%s. %s" %(num, caption)) self.add("<br>") | e0cf194d65477330c5eb4cd1396384f15444095e /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/e0cf194d65477330c5eb4cd1396384f15444095e/cbcwebpage.py |
self.tr.close() | self.add('</tr>') | def __init__(self, two_d_data, title="", caption="", tag="table", num="1"): markup.page.__init__(self, mode="strict_html") self.add("<br>") if title: self.b("%s. %s" %(num, title.upper()) ) self.table() for row in two_d_data: self.tr tdstr = "" for col in row: tdstr += "<td>%s</td>" % (str(col),) self.add(tdstr) self.tr.close() self.table.close() if self.caption: self.i("%s. %s" %(num, caption)) self.add("<br>") | e0cf194d65477330c5eb4cd1396384f15444095e /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/e0cf194d65477330c5eb4cd1396384f15444095e/cbcwebpage.py |
htmlfile.write('<img src="'+paramnames[i]+'.png"><img src="'+paramnames[i]+'_samps.png><br>') | htmlfile.write('<img src="'+paramnames[i]+'.png"><img src="'+paramnames[i]+'_samps.png"><br>') | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=linspace(min(xdat),max(xdat),Nx) yax=linspace(min(ydat),max(ydat),Ny) x,y=numpy.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = numpy.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() | c11ca7c9b15f66f09585f574c7f59c4643aef25c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/c11ca7c9b15f66f09585f574c7f59c4643aef25c/OddsPostProc.py |
cacheSelected = cacheSelected.sieve(segment=segments.segment(math.floor(float(time)), math.ceil(float(time)))) | if math.floor(float(time)) != math.ceil(float(time)): cacheSelected = cacheSelected.sieve(segment=segments.segment(math.floor(float(time)), math.ceil(float(time)))) else: cacheSelected = cacheSelected.sieve(segment=segments.segment(math.floor(float(time))-0.5, math.floor(float(time))+0.5)) | def getParamsFromCache(fileName,type,ifo=None,time=None): qscanList = [] cacheList = lal.Cache.fromfile(open(fileName)) if not cacheList: return qscanList cacheSelected = cacheList.sieve(description=type,ifos=ifo) if time: cacheSelected = cacheSelected.sieve(segment=segments.segment(math.floor(float(time)), math.ceil(float(time)))) for cacheEntry in cacheSelected: path_output = cacheEntry.path() time_output = str(cacheEntry.segment[0]) type_output = cacheEntry.description ifo_output = cacheEntry.observatory qscanList.append([path_output,time_output,type_output,ifo_output]) return qscanList | 1d06c8f74136eb0c234bbd1f0b42590561f3cbb9 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/1d06c8f74136eb0c234bbd1f0b42590561f3cbb9/stfu_pipe.py |
if triple_coinc is True: for ifo_3 in ifos: if ifos.index(ifo_3)>ifos.index(ifo_2): triples.append(ifo_1+ifo_2+ifo_3) | for ifo_3 in ifos: if ifos.index(ifo_3)>ifos.index(ifo_2): triples.append(ifo_1+ifo_2+ifo_3) | def coinc_segments(start,end,ifos): #== first, construct doubles and triples lists doubles=[] triples=[] for ifo_1 in ifos: for ifo_2 in ifos: if ifos.index(ifo_2)>ifos.index(ifo_1): doubles.append(ifo_1+ifo_2) if triple_coinc is True: for ifo_3 in ifos: if ifos.index(ifo_3)>ifos.index(ifo_2): triples.append(ifo_1+ifo_2+ifo_3) segments={} double_segments={} triple_segments={} #== grab science data for each ifo science_flag = {'H1':'H1:DMT-SCIENCE',\ 'H2':'H2:DMT-SCIENCE',\ 'L1':'L1:DMT-SCIENCE',\ 'V1':'V1:ITF_SCIENCEMODE'} for ifo in ifos: segments[ifo] = grab_segments(gps_start,\ gps_end,\ science_flag[ifo]) #== grab double-coincidence segments for double in doubles: ifo_1 = double[0:2] ifo_2 = double[2:4] double_segments[double] = segments[ifo_1] & segments[ifo_2] #== grab triple-coincidence segments for triple in triples: ifo_1=triple[0:2] ifo_2=triple[2:4] ifo_3=triple[4:6] #== grab concident segments triple_segments[triple] = segments[ifo_1] & segments[ifo_2] \ & segments[ifo_3] return segments,double_segments,triple_segments | 6952e29b535f5fb01b1e1257b1e74ed7ae6060b4 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/6952e29b535f5fb01b1e1257b1e74ed7ae6060b4/dqSegmentUtils.py |
segments[ifo] = grab_segments(gps_start,\ gps_end,\ | segments[ifo] = grab_segments(start,\ end,\ | def coinc_segments(start,end,ifos): #== first, construct doubles and triples lists doubles=[] triples=[] for ifo_1 in ifos: for ifo_2 in ifos: if ifos.index(ifo_2)>ifos.index(ifo_1): doubles.append(ifo_1+ifo_2) if triple_coinc is True: for ifo_3 in ifos: if ifos.index(ifo_3)>ifos.index(ifo_2): triples.append(ifo_1+ifo_2+ifo_3) segments={} double_segments={} triple_segments={} #== grab science data for each ifo science_flag = {'H1':'H1:DMT-SCIENCE',\ 'H2':'H2:DMT-SCIENCE',\ 'L1':'L1:DMT-SCIENCE',\ 'V1':'V1:ITF_SCIENCEMODE'} for ifo in ifos: segments[ifo] = grab_segments(gps_start,\ gps_end,\ science_flag[ifo]) #== grab double-coincidence segments for double in doubles: ifo_1 = double[0:2] ifo_2 = double[2:4] double_segments[double] = segments[ifo_1] & segments[ifo_2] #== grab triple-coincidence segments for triple in triples: ifo_1=triple[0:2] ifo_2=triple[2:4] ifo_3=triple[4:6] #== grab concident segments triple_segments[triple] = segments[ifo_1] & segments[ifo_2] \ & segments[ifo_3] return segments,double_segments,triple_segments | 6952e29b535f5fb01b1e1257b1e74ed7ae6060b4 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/6952e29b535f5fb01b1e1257b1e74ed7ae6060b4/dqSegmentUtils.py |
y.segment_def_cdb = x.creator_db) AND \ NOT (segment.start_time > %s OR %s > segment.end_time) \ | y.segment_def_cdb = x.creator_db) \ | def getSciSegs(ifo=None, gpsStart=None, gpsStop=None, cut=bool(False), serverURL=None, segName="DMT-SCIENCE", seglenmin=None, segpading=0 | adc581022a281d7b14802f291e6460b387201eab /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/adc581022a281d7b14802f291e6460b387201eab/fu_utils.py |
sqlQuery=query01%(segName,ifo,gpsStop,gpsStart,gpsStop,gpsStart) | sqlQuery=query01%(segName,ifo,gpsStop,gpsStart) | def getSciSegs(ifo=None, gpsStart=None, gpsStop=None, cut=bool(False), serverURL=None, segName="DMT-SCIENCE", seglenmin=None, segpading=0 | adc581022a281d7b14802f291e6460b387201eab /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/adc581022a281d7b14802f291e6460b387201eab/fu_utils.py |
AND y.segment_def_cdb = x.creator_db \ | AND y.segment_def_cdb = x.creator_db \ | def __init__(self,LDBDServerURL=None,quiet=bool(False),pickle=None,blinded=False): """ This class setups of for connecting to a LDBD server specified at command line to do segment queries as part of the follow up pipeline. If the user does not specify the LDBD server to use the method will use the environment variable S6_SEGMENT_SERVER to determine who to query. The LDBD URL should be in the following form ldbd://myserver.domain.name:808080. You can specify the path to a background DQ pickle if the path is valid the class opens it otherwise it queries the segment DB and builds the pickle. Warning! recreating the DQ background is VERY slow. """ self.__connection__= None self.__engine__= None self.__installPath__=home_dir()+"/ctorres/followupbackgrounds/dq/" self.__blinded__=blinded self.__blindFlags__=[\ "DMT-INJECTION_INSPIRAL", "DMT-INJECTION"\ ] if pickle==None: self.__backgroundPickle__=None else: self.__backgroundPickle=pickle self.__backgroundPickle__=os.path.expanduser(self.__backgroundPickle__) self.__haveBackgroundDict__=bool(False) self.__havecategories__=bool(False) #A dict for a dict of GPStimes and list all flags seen self.__backgroundDict__=dict() self.__category__=dict() #Access a dict of dicts for a flag names with % stored self.__backgroundResults__=dict() self.__backgroundPoints__=int(1000) self.__columns__=["Ifo","Flag","Ver","Start","Offset",\ "Stop","Offset","Size","DQ Rank","Cat(s)"] #Dict should be a dict of lists self.__backgroundTimesDict__=dict() self.ifos=interferometers self.ifos.sort() self.triggerTime=int(-1) self.serverURL=defaultsegmentserver if LDBDServerURL==None: envServer=None envServer=os.getenv('S6_SEGMENT_SERVER') if envServer!=None: self.serverURL=envServer sys.stderr.write("Warning no LDBD Server URL specified \ | adc581022a281d7b14802f291e6460b387201eab /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/adc581022a281d7b14802f291e6460b387201eab/fu_utils.py |
extent = off_segs.extent() | extent = (off_segs | on_segs).extent() | def get_exttrig_trials(on_segs, off_segs, veto_files): """ Return a tuple of (off-source time bins, off-source veto mask, index of trial that is on source). The off-source veto mask is a one-dimensional boolean array where True means vetoed. @param on_segs: On-source segments @param off_segs: Off-source segments @param veto_files: List of filenames containing vetoes """ # Check that offsource length is a multiple of the onsource segment length trial_len = int(abs(on_segs)) if abs(off_segs) % trial_len != 0: raise ValueError, "The provided file's analysis segment is not "\ "divisible by the fold time." extent = off_segs.extent() # generate bins for trials num_trials = int(abs(extent)) // trial_len trial_bins = rate.LinearBins(extent[0], extent[1], num_trials) # incorporate veto file; in trial_veto_mask, True means vetoed. trial_veto_mask = numpy.zeros(num_trials, dtype=numpy.bool8) for veto_file in veto_files: new_veto_segs = segmentsUtils.fromsegwizard(open(veto_file), coltype=int) if new_veto_segs.intersects(on_segs): print >>sys.stderr, "warning: %s overlaps on-source segment" \ % veto_file trial_veto_mask |= rate.bins_spanned(trial_bins, new_veto_segs, dtype=numpy.bool8) # identify onsource trial index onsource_mask = rate.bins_spanned(trial_bins, on_segs, dtype=numpy.bool8) if sum(onsource_mask) != 1: raise ValueError, "on-source segment spans more or less than one trial" onsource_ind = numpy.arange(len(onsource_mask))[onsource_mask] return trial_bins, trial_veto_mask, onsource_ind | 3b88a1db0dc91661ad0cd027d8be3fd9fcede3d4 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/3b88a1db0dc91661ad0cd027d8be3fd9fcede3d4/grbsummary.py |
if dY>=0 and dX>=0: | if dY>=0 and dX>0: | def scatterPointer(A=None,B=None): """ Expects two ordered pairs as tuples (x1,y1) and (x2,y2)... Then this defines an angle. From this angle we orient a triangle point and return this as a tuple of requested size. This can be plotted by the scatter plot to point in a particular orientation. The direction right(0,0)->(1,0) is angle 0 radians, then we rotate counter clockwise from there... What is returned in a three element tuple (numsides,style,angle) which can be put into a plot call via marker=X as a **kwarg """ if A == None or B == None: return (3,0,0) if( A != type(tuple()) and len(A) != 2) \ or \ ( B != type(tuple()) and len(B) != 2): return (3,0,0) # # Calculate orientation of triangle # Ang = arcsin(dY/dX) dY=float(B[-1]-A[-1]) dX=float(B[0]-A[0]) if dY>=0 and dX>=0: myAngle=arctan(dY/dX) elif dY>=0 and dX<0: myAngle=pi-arctan(dY/dX) elif dY<0 and dX<0: myAngle=arctan(dY/dX)+pi elif dY<0 and dX>0: myAngle=(2.0*pi)-arctan(dY/dX) else: myAngle=0 return (3,0,myAngle) | 0aae795f34e87fdf3988ad1f607c71be6b045d53 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/0aae795f34e87fdf3988ad1f607c71be6b045d53/followupPDSurface.py |
elif dY>=0 and dX<0: | elif dY>0 and dX<0: | def scatterPointer(A=None,B=None): """ Expects two ordered pairs as tuples (x1,y1) and (x2,y2)... Then this defines an angle. From this angle we orient a triangle point and return this as a tuple of requested size. This can be plotted by the scatter plot to point in a particular orientation. The direction right(0,0)->(1,0) is angle 0 radians, then we rotate counter clockwise from there... What is returned in a three element tuple (numsides,style,angle) which can be put into a plot call via marker=X as a **kwarg """ if A == None or B == None: return (3,0,0) if( A != type(tuple()) and len(A) != 2) \ or \ ( B != type(tuple()) and len(B) != 2): return (3,0,0) # # Calculate orientation of triangle # Ang = arcsin(dY/dX) dY=float(B[-1]-A[-1]) dX=float(B[0]-A[0]) if dY>=0 and dX>=0: myAngle=arctan(dY/dX) elif dY>=0 and dX<0: myAngle=pi-arctan(dY/dX) elif dY<0 and dX<0: myAngle=arctan(dY/dX)+pi elif dY<0 and dX>0: myAngle=(2.0*pi)-arctan(dY/dX) else: myAngle=0 return (3,0,myAngle) | 0aae795f34e87fdf3988ad1f607c71be6b045d53 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/0aae795f34e87fdf3988ad1f607c71be6b045d53/followupPDSurface.py |
def __init__(self,fStructure=None,myCoinc=None): if fStructure==None or myCoinc==None: print "Given None Types FS:%s Coinc:%s"%(type(fStructure),type(myCoinc)) return None else: self.fsys=fStructure self.coinc=myCoinc | cabde339221bbc8a748cfedf8ed7be0711b1e7f9 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/cabde339221bbc8a748cfedf8ed7be0711b1e7f9/makeCheckListWiki.py |
||
if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype | frametype=__patchFrameTypeDef__(frametype,sngl.ifo,sngl.time) | def get_RDS_R_L1(self): """ """ tmpList=list() for sngl in self.coinc.sngls: #Determine file type frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype myMaskIndex="*/%s/*/%s/index.html"%(frametype,sngl.time) myMaskPNG="*/%s/*/%s/*.png"%(frametype,sngl.time) myMaskSummary="*/%s/*/%s/*summary.txt"%(frametype,sngl.time) tmpList.extend(fnmatch.filter(self.fsys,myMaskIndex)) tmpList.extend(fnmatch.filter(self.fsys,myMaskPNG)) tmpList.extend(fnmatch.filter(self.fsys,myMaskSummary)) return tmpList | cabde339221bbc8a748cfedf8ed7be0711b1e7f9 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/cabde339221bbc8a748cfedf8ed7be0711b1e7f9/makeCheckListWiki.py |
if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype | frametype=__patchFrameTypeDef__(frametype,sngl.ifo,sngl.time) | def get_RDS_R_L1_SEIS(self): """ """ tmpList=list() for sngl in self.coinc.sngls: #Determine file type frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype frametype = frametype + "_SEIS" myMaskIndex="*/%s*/%s/*.html"%(frametype,sngl.time) myMaskPNG="*/%s*/%s/*.png"%(frametype,sngl.time) myMaskSummary="*/%s*/%s/*summary.txt"%(frametype,sngl.time) tmpList.extend(fnmatch.filter(self.fsys,myMaskIndex)) tmpList.extend(fnmatch.filter(self.fsys,myMaskPNG)) tmpList.extend(fnmatch.filter(self.fsys,myMaskSummary)) return tmpList | cabde339221bbc8a748cfedf8ed7be0711b1e7f9 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/cabde339221bbc8a748cfedf8ed7be0711b1e7f9/makeCheckListWiki.py |
if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype | frametype=__patchFrameTypeDef__(frametype,sngl.ifo,sngl.time) | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 sys.stdout.write("Available via browser for wiki upload at %s\n"\ %(file2URL.convert(wikiFilename))) # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") wikiPage.putText("UTC Time of trigger :%s"%(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") # imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() filesOmega=wikiFileFinder.get_RDS_R_L1_SEIS() filesAnalyze=wikiFileFinder.get_analyzeQscan_SEIS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype if sngl.ifo == "V1": chankey = "Em_SE" else: chankey = "SEI" indexDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*%s*_512.00_spectrogram_whitened.png"%\ (frametype,sngl.time,chankey)) thumbDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*%s*_512.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time,chankey)) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s_*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if chankey in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Search for analyzeQscan files timeString=str(float(sngl.time)).replace(".","_") indexDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_%s*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString,chankey)) thumbDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_%s*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString,chankey)) #Load of analyzeQscan z file if available zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if chankey in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only PEM channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype if sngl.ifo == "V1": chankeyseis = "Em_SE" chankeyenv = "Em_" else: chankeyseis = "SEI" chankeyenv = "PEM" for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*html"%(frametype,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened.png"%\ (frametype,sngl.time)): if chankeyenv in myFile and not chankeyseis in myFile: imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time)): if chankeyenv in myFile and not chankeyseis in myFile: thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if chankeyenv in myFile and not chankeyseis in myFile: imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if chankeyenv in myFile and not chankeyseis in myFile: thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only AUX channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype if sngl.ifo == "V1": chankeyseis = "Em_SE" chankeyenv = "Em_" else: chankeyseis = "SEI" chankeyenv = "PEM" indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*html"%(frametype,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened.png"%\ (frametype,sngl.time)): if not chankeyenv in myFile or not chankeyseis in myFile: imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time)): if not chankeyenv in myFile and not chankeyseis in myFile: thumbDict[sngl.ifo].append(myFile) zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if not chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not chankeyenv in myFile or not chankeyseis in myFile: imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not chankeyenv in myFile and not chankeyseis in myFile: thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if not chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # | cabde339221bbc8a748cfedf8ed7be0711b1e7f9 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/cabde339221bbc8a748cfedf8ed7be0711b1e7f9/makeCheckListWiki.py |
if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype | frametype=__patchFrameTypeDef__(frametype,sngl.ifo,sngl.time) | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 sys.stdout.write("Available via browser for wiki upload at %s\n"\ %(file2URL.convert(wikiFilename))) # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") wikiPage.putText("UTC Time of trigger :%s"%(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") # imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() filesOmega=wikiFileFinder.get_RDS_R_L1_SEIS() filesAnalyze=wikiFileFinder.get_analyzeQscan_SEIS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype if sngl.ifo == "V1": chankey = "Em_SE" else: chankey = "SEI" indexDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*%s*_512.00_spectrogram_whitened.png"%\ (frametype,sngl.time,chankey)) thumbDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*%s*_512.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time,chankey)) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s_*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if chankey in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Search for analyzeQscan files timeString=str(float(sngl.time)).replace(".","_") indexDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_%s*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString,chankey)) thumbDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_%s*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString,chankey)) #Load of analyzeQscan z file if available zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if chankey in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only PEM channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype if sngl.ifo == "V1": chankeyseis = "Em_SE" chankeyenv = "Em_" else: chankeyseis = "SEI" chankeyenv = "PEM" for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*html"%(frametype,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened.png"%\ (frametype,sngl.time)): if chankeyenv in myFile and not chankeyseis in myFile: imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time)): if chankeyenv in myFile and not chankeyseis in myFile: thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if chankeyenv in myFile and not chankeyseis in myFile: imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if chankeyenv in myFile and not chankeyseis in myFile: thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only AUX channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype if sngl.ifo == "V1": chankeyseis = "Em_SE" chankeyenv = "Em_" else: chankeyseis = "SEI" chankeyenv = "PEM" indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*html"%(frametype,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened.png"%\ (frametype,sngl.time)): if not chankeyenv in myFile or not chankeyseis in myFile: imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time)): if not chankeyenv in myFile and not chankeyseis in myFile: thumbDict[sngl.ifo].append(myFile) zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if not chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not chankeyenv in myFile or not chankeyseis in myFile: imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not chankeyenv in myFile and not chankeyseis in myFile: thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if not chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # | cabde339221bbc8a748cfedf8ed7be0711b1e7f9 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/cabde339221bbc8a748cfedf8ed7be0711b1e7f9/makeCheckListWiki.py |
sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) | sys.stdout.write("Omega scan summary file not found or seen empty for %s. ...continuing...\n"%sngl.ifo) | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 sys.stdout.write("Available via browser for wiki upload at %s\n"\ %(file2URL.convert(wikiFilename))) # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") wikiPage.putText("UTC Time of trigger :%s"%(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") # imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() filesOmega=wikiFileFinder.get_RDS_R_L1_SEIS() filesAnalyze=wikiFileFinder.get_analyzeQscan_SEIS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype if sngl.ifo == "V1": chankey = "Em_SE" else: chankey = "SEI" indexDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*%s*_512.00_spectrogram_whitened.png"%\ (frametype,sngl.time,chankey)) thumbDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*%s*_512.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time,chankey)) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s_*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if chankey in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Search for analyzeQscan files timeString=str(float(sngl.time)).replace(".","_") indexDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_%s*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString,chankey)) thumbDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_%s*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString,chankey)) #Load of analyzeQscan z file if available zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if chankey in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only PEM channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype if sngl.ifo == "V1": chankeyseis = "Em_SE" chankeyenv = "Em_" else: chankeyseis = "SEI" chankeyenv = "PEM" for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*html"%(frametype,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened.png"%\ (frametype,sngl.time)): if chankeyenv in myFile and not chankeyseis in myFile: imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time)): if chankeyenv in myFile and not chankeyseis in myFile: thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if chankeyenv in myFile and not chankeyseis in myFile: imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if chankeyenv in myFile and not chankeyseis in myFile: thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only AUX channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype if sngl.ifo == "V1": chankeyseis = "Em_SE" chankeyenv = "Em_" else: chankeyseis = "SEI" chankeyenv = "PEM" indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*html"%(frametype,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened.png"%\ (frametype,sngl.time)): if not chankeyenv in myFile or not chankeyseis in myFile: imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time)): if not chankeyenv in myFile and not chankeyseis in myFile: thumbDict[sngl.ifo].append(myFile) zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if not chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not chankeyenv in myFile or not chankeyseis in myFile: imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not chankeyenv in myFile and not chankeyseis in myFile: thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if not chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # | cabde339221bbc8a748cfedf8ed7be0711b1e7f9 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/cabde339221bbc8a748cfedf8ed7be0711b1e7f9/makeCheckListWiki.py |
if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype | frametype=__patchFrameTypeDef__(frametype,sngl.ifo,sngl.time) | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 sys.stdout.write("Available via browser for wiki upload at %s\n"\ %(file2URL.convert(wikiFilename))) # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") wikiPage.putText("UTC Time of trigger :%s"%(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") # imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() filesOmega=wikiFileFinder.get_RDS_R_L1_SEIS() filesAnalyze=wikiFileFinder.get_analyzeQscan_SEIS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype if sngl.ifo == "V1": chankey = "Em_SE" else: chankey = "SEI" indexDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*%s*_512.00_spectrogram_whitened.png"%\ (frametype,sngl.time,chankey)) thumbDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*%s*_512.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time,chankey)) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s_*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if chankey in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Search for analyzeQscan files timeString=str(float(sngl.time)).replace(".","_") indexDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_%s*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString,chankey)) thumbDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_%s*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString,chankey)) #Load of analyzeQscan z file if available zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if chankey in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only PEM channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype if sngl.ifo == "V1": chankeyseis = "Em_SE" chankeyenv = "Em_" else: chankeyseis = "SEI" chankeyenv = "PEM" for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*html"%(frametype,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened.png"%\ (frametype,sngl.time)): if chankeyenv in myFile and not chankeyseis in myFile: imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time)): if chankeyenv in myFile and not chankeyseis in myFile: thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if chankeyenv in myFile and not chankeyseis in myFile: imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if chankeyenv in myFile and not chankeyseis in myFile: thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only AUX channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype if sngl.ifo == "V1": chankeyseis = "Em_SE" chankeyenv = "Em_" else: chankeyseis = "SEI" chankeyenv = "PEM" indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*html"%(frametype,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened.png"%\ (frametype,sngl.time)): if not chankeyenv in myFile or not chankeyseis in myFile: imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time)): if not chankeyenv in myFile and not chankeyseis in myFile: thumbDict[sngl.ifo].append(myFile) zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if not chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not chankeyenv in myFile or not chankeyseis in myFile: imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not chankeyenv in myFile and not chankeyseis in myFile: thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if not chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # | cabde339221bbc8a748cfedf8ed7be0711b1e7f9 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/cabde339221bbc8a748cfedf8ed7be0711b1e7f9/makeCheckListWiki.py |
sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) | sys.stdout.write("Omega scan summary file not found or seen empty for %s. ...continuing...\n"%sngl.ifo) | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 sys.stdout.write("Available via browser for wiki upload at %s\n"\ %(file2URL.convert(wikiFilename))) # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") wikiPage.putText("UTC Time of trigger :%s"%(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") # imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() filesOmega=wikiFileFinder.get_RDS_R_L1_SEIS() filesAnalyze=wikiFileFinder.get_analyzeQscan_SEIS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype if sngl.ifo == "V1": chankey = "Em_SE" else: chankey = "SEI" indexDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*%s*_512.00_spectrogram_whitened.png"%\ (frametype,sngl.time,chankey)) thumbDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*%s*_512.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time,chankey)) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s_*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if chankey in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Search for analyzeQscan files timeString=str(float(sngl.time)).replace(".","_") indexDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_%s*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString,chankey)) thumbDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_%s*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString,chankey)) #Load of analyzeQscan z file if available zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if chankey in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only PEM channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype if sngl.ifo == "V1": chankeyseis = "Em_SE" chankeyenv = "Em_" else: chankeyseis = "SEI" chankeyenv = "PEM" for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*html"%(frametype,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened.png"%\ (frametype,sngl.time)): if chankeyenv in myFile and not chankeyseis in myFile: imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time)): if chankeyenv in myFile and not chankeyseis in myFile: thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if chankeyenv in myFile and not chankeyseis in myFile: imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if chankeyenv in myFile and not chankeyseis in myFile: thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only AUX channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype if sngl.ifo == "V1": chankeyseis = "Em_SE" chankeyenv = "Em_" else: chankeyseis = "SEI" chankeyenv = "PEM" indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*html"%(frametype,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened.png"%\ (frametype,sngl.time)): if not chankeyenv in myFile or not chankeyseis in myFile: imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time)): if not chankeyenv in myFile and not chankeyseis in myFile: thumbDict[sngl.ifo].append(myFile) zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if not chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not chankeyenv in myFile or not chankeyseis in myFile: imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not chankeyenv in myFile and not chankeyseis in myFile: thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if not chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # | cabde339221bbc8a748cfedf8ed7be0711b1e7f9 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/cabde339221bbc8a748cfedf8ed7be0711b1e7f9/makeCheckListWiki.py |
if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype | frametype=__patchFrameTypeDef__(frametype,sngl.ifo,sngl.time) | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 sys.stdout.write("Available via browser for wiki upload at %s\n"\ %(file2URL.convert(wikiFilename))) # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") wikiPage.putText("UTC Time of trigger :%s"%(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") # imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() filesOmega=wikiFileFinder.get_RDS_R_L1_SEIS() filesAnalyze=wikiFileFinder.get_analyzeQscan_SEIS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype if sngl.ifo == "V1": chankey = "Em_SE" else: chankey = "SEI" indexDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*%s*_512.00_spectrogram_whitened.png"%\ (frametype,sngl.time,chankey)) thumbDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*%s*_512.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time,chankey)) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s_*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if chankey in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Search for analyzeQscan files timeString=str(float(sngl.time)).replace(".","_") indexDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_%s*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString,chankey)) thumbDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_%s*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString,chankey)) #Load of analyzeQscan z file if available zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if chankey in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only PEM channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype if sngl.ifo == "V1": chankeyseis = "Em_SE" chankeyenv = "Em_" else: chankeyseis = "SEI" chankeyenv = "PEM" for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*html"%(frametype,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened.png"%\ (frametype,sngl.time)): if chankeyenv in myFile and not chankeyseis in myFile: imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time)): if chankeyenv in myFile and not chankeyseis in myFile: thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if chankeyenv in myFile and not chankeyseis in myFile: imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if chankeyenv in myFile and not chankeyseis in myFile: thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only AUX channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype if sngl.ifo == "V1": chankeyseis = "Em_SE" chankeyenv = "Em_" else: chankeyseis = "SEI" chankeyenv = "PEM" indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*html"%(frametype,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened.png"%\ (frametype,sngl.time)): if not chankeyenv in myFile or not chankeyseis in myFile: imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time)): if not chankeyenv in myFile and not chankeyseis in myFile: thumbDict[sngl.ifo].append(myFile) zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if not chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not chankeyenv in myFile or not chankeyseis in myFile: imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not chankeyenv in myFile and not chankeyseis in myFile: thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if not chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # | cabde339221bbc8a748cfedf8ed7be0711b1e7f9 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/cabde339221bbc8a748cfedf8ed7be0711b1e7f9/makeCheckListWiki.py |
sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) | sys.stdout.write("Omega scan summary file not found or seen empty for %s. ...continuing...\n"%sngl.ifo) | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 sys.stdout.write("Available via browser for wiki upload at %s\n"\ %(file2URL.convert(wikiFilename))) # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") wikiPage.putText("UTC Time of trigger :%s"%(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") # imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() filesOmega=wikiFileFinder.get_RDS_R_L1_SEIS() filesAnalyze=wikiFileFinder.get_analyzeQscan_SEIS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype if sngl.ifo == "V1": chankey = "Em_SE" else: chankey = "SEI" indexDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*%s*_512.00_spectrogram_whitened.png"%\ (frametype,sngl.time,chankey)) thumbDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*%s*_512.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time,chankey)) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s_*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if chankey in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Search for analyzeQscan files timeString=str(float(sngl.time)).replace(".","_") indexDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_%s*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString,chankey)) thumbDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_%s*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString,chankey)) #Load of analyzeQscan z file if available zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if chankey in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only PEM channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype if sngl.ifo == "V1": chankeyseis = "Em_SE" chankeyenv = "Em_" else: chankeyseis = "SEI" chankeyenv = "PEM" for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*html"%(frametype,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened.png"%\ (frametype,sngl.time)): if chankeyenv in myFile and not chankeyseis in myFile: imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time)): if chankeyenv in myFile and not chankeyseis in myFile: thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if chankeyenv in myFile and not chankeyseis in myFile: imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if chankeyenv in myFile and not chankeyseis in myFile: thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only AUX channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype if sngl.ifo == "V1": chankeyseis = "Em_SE" chankeyenv = "Em_" else: chankeyseis = "SEI" chankeyenv = "PEM" indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*html"%(frametype,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened.png"%\ (frametype,sngl.time)): if not chankeyenv in myFile or not chankeyseis in myFile: imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time)): if not chankeyenv in myFile and not chankeyseis in myFile: thumbDict[sngl.ifo].append(myFile) zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if not chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not chankeyenv in myFile or not chankeyseis in myFile: imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not chankeyenv in myFile and not chankeyseis in myFile: thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if not chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # | cabde339221bbc8a748cfedf8ed7be0711b1e7f9 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/cabde339221bbc8a748cfedf8ed7be0711b1e7f9/makeCheckListWiki.py |
pud=os.path.abspath(publication_directory) minFileCount=1 for key,fileList in allSources.items(): if len(fileList) > minFileCount: commonPath=os.path.commonprefix(fileList) for singleFile in fileList: if not singleFile.__contains__(pud): myDestFile=singleFile.replace(commonPath,myDestPath+"/") if not os.path.exists(os.path.split(myDestFile)[0]): os.makedirs(os.path.split(myDestFile)[0]) shutil.copy2(singleFile,myDestFile) else: sys.stdout.write("Warning: Scanning (%s) found %s files.\n"%\ (key,len(fileList))) | def __init__(self,type=None,ifo=None,time=None,snr=None,chisqr=None,mass1=None,mass2=None,mchirp=None): """ """ self.type=str(type) self.ifo=str(ifo) self.time=float(time) self.snr=float(snr) self.chisqr=float(chisqr) self.mass1=float(mass1) self.mass2=float(mass2) self.mchirp=float(mchirp) | cabde339221bbc8a748cfedf8ed7be0711b1e7f9 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/cabde339221bbc8a748cfedf8ed7be0711b1e7f9/makeCheckListWiki.py |
|
if re.search( table_name+r'[.]', table_param ) is None: | if table_param.find( table_name+'.' ) == -1: | def __init__( self, table_name, table_param, param_ranges_opt, verbose = False ): """ Parse --param-ranges option. Creates self.param which is the table_name and the table_param appended together (with a '.') and self.param_ranges, which is a list of tuples that give the lower parameter value, whether it is an open or closed boundary, and the same for the upper parameter. For example, if table_name is coinc_inspiral, table_param is mchirp and param_ranges_opt is '[2,8);[8,17]' will get: self.param = 'coinc_inspiral.mchirp' self.param_ranges = [ ( ('>=',2.0), ('<',8.0) ), ( ('>=',8.0), ('<=', 17.0) ) ] | 4c49c1fb5437a05aff3cd51768415cbec048e68a /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/4c49c1fb5437a05aff3cd51768415cbec048e68a/ligolw_sqlutils.py |
def get_new_snr(self, index=6.0): # the kwarg 'index' is to be assigned to the parameter chisq_index occurring in the .ini files etc # the parameter nhigh gives the asymptotic behaviour d (ln chisq) / d (ln rho) at large rho # nhigh=2 means chisq~rho^2 along contours of new_snr as expected from the behaviour of mismatched templates snr = self.get_column('snr') chisq = self.get_column('chisq') chisq_dof = self.get_column('chisq_dof') rchisq = chisq/ (2*chisq_dof - 2) nhigh = 2. if rchisq > 1.: return snr/ ((1+rchisq**(index/nhigh))/2)**(1./index) else: return snr | a54b8790e6284abfa3f4d5dd75911d7bd58ea9a8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/a54b8790e6284abfa3f4d5dd75911d7bd58ea9a8/lsctables.py |
||
chisq = self.get_column('chisq') chisq_dof = self.get_column('chisq_dof') rchisq = chisq/ (2*chisq_dof - 2) | rchisq = self.get_column('chisq')/(2*self.get_column('chisq_dof') - 2) | def get_new_snr(self, index=6.0): # the kwarg 'index' is to be assigned to the parameter chisq_index occurring in the .ini files etc # the parameter nhigh gives the asymptotic behaviour d (ln chisq) / d (ln rho) at large rho # nhigh=2 means chisq~rho^2 along contours of new_snr as expected from the behaviour of mismatched templates snr = self.get_column('snr') chisq = self.get_column('chisq') chisq_dof = self.get_column('chisq_dof') rchisq = chisq/ (2*chisq_dof - 2) nhigh = 2. if rchisq > 1.: return snr/ ((1+rchisq**(index/nhigh))/2)**(1./index) else: return snr | a54b8790e6284abfa3f4d5dd75911d7bd58ea9a8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/a54b8790e6284abfa3f4d5dd75911d7bd58ea9a8/lsctables.py |
if rchisq > 1.: return snr/ ((1+rchisq**(index/nhigh))/2)**(1./index) else: return snr | newsnr = snr/ (0.5*(1+rchisq**(index/nhigh)))**(1./index) numpy.putmask(newsnr, rchisq < 1, snr) return newsnr | def get_new_snr(self, index=6.0): # the kwarg 'index' is to be assigned to the parameter chisq_index occurring in the .ini files etc # the parameter nhigh gives the asymptotic behaviour d (ln chisq) / d (ln rho) at large rho # nhigh=2 means chisq~rho^2 along contours of new_snr as expected from the behaviour of mismatched templates snr = self.get_column('snr') chisq = self.get_column('chisq') chisq_dof = self.get_column('chisq_dof') rchisq = chisq/ (2*chisq_dof - 2) nhigh = 2. if rchisq > 1.: return snr/ ((1+rchisq**(index/nhigh))/2)**(1./index) else: return snr | a54b8790e6284abfa3f4d5dd75911d7bd58ea9a8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/a54b8790e6284abfa3f4d5dd75911d7bd58ea9a8/lsctables.py |
""" | """ | def skyhist_cart(skycarts,samples): """ Histogram the list of samples into bins defined by Cartesian vectors in skycarts """ dot=numpy.dot N=len(skycarts) print 'operating on %d sky points'%(N) bins=zeros(N) for sample in samples: sampcart=pol2cart(sample[RAdim],sample[decdim]) maxdx=max(xrange(0,N),key=lambda i:dot(sampcart,skycarts[i])) bins[maxdx]+=1 return (skycarts,bins) | 48d35325f3b7aee9002aa13f84bee3790743b777 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/48d35325f3b7aee9002aa13f84bee3790743b777/cbcBayesSkyRes.py |
raise RuntimeError, "failed to lock %s: %s" % (pidfile_path, e) | raise RuntimeError, "failed to lock %s: %s" % (lockfile, e) | def get_lock(lockfile): """ Tries to write a lockfile containing the current pid. Excepts if the lockfile already contains the pid of a running process. Although this should prevent a lock from being granted twice, it can theoretically deny a lock unjustly in the unlikely event that the original process is gone but another unrelated process has been assigned the same pid by the OS. """ pidfile = open(lockfile, "a+") # here we do some meta-locking by getting an exclusive lock on the # pidfile before reading it, to prevent two daemons from seeing a # stale lock at the same time, and both trying to run try: fcntl.flock(pidfile.fileno(), fcntl.LOCK_EX|fcntl.LOCK_NB) except IOError,e: raise RuntimeError, "failed to lock %s: %s" % (pidfile_path, e) # we got the file lock, so check the pid therein pidfile.seek(0) pidfile_pid = pidfile.readline().strip() if pidfile_pid.isdigit() and pid_exists(int(pidfile_pid)): raise RuntimeError, ("pidfile %s contains pid (%s) of a running " "process" % (lockfile, pidfile_pid)) else: print ("pidfile %s contains stale pid %s; writing new lock" % (lockfile, pidfile_pid)) # the pidfile didn't exist or was stale, so grab a new lock pidfile.truncate(0) pidfile.write("%d\n" % os.getpid()) pidfile.close() # should be entirely unecessary, but paranoia always served me well confirm_lock(lockfile) return True | 4ecf85274ebfa7e11e4cbe79203e3afac8585452 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/4ecf85274ebfa7e11e4cbe79203e3afac8585452/pidfile.py |
if (int(seg[0]) - seg[0]) / seg[0] > 1e-15 or (int(seg[1]) - seg[1]) / seg[1] > 1e-15: | if abs((int(seg[0]) - seg[0]) / seg[0]) > 1e-14 or abs((int(seg[1]) - seg[1]) / seg[1]) > 1e-14: | def clip_segment(seg, pad, short_segment_duration): # clip segment to the length required by lalapps_StringSearch. if # # duration = segment length - padding # # then # # duration / short_segment_duration - 0.5 # # must be an odd integer, therefore # # 2 * duration + short_segment_duration # # must be divisble by (4 * short_segment_duration) duration = float(abs(seg)) - 2 * pad extra = (2 * duration + short_segment_duration) % (4 * short_segment_duration) extra /= 2 # clip segment seg = segments.segment(seg[0], seg[1] - extra) # bounds must be integers if (int(seg[0]) - seg[0]) / seg[0] > 1e-15 or (int(seg[1]) - seg[1]) / seg[1] > 1e-15: raise ValueError, "segment %s does not have integer boundaries" % str(seg) seg = segments.segment(int(seg[0]), int(seg[1])) # done return seg | 7e35b094d9f9357945414d1e531da585725c350d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/7e35b094d9f9357945414d1e531da585725c350d/cosmicstring.py |
seps=map(lambda s: ang_dist(sample[6],sample[7],s[1],s[0]),skypoints) | seps=map(lambda s: ang_dist(sample[5],sample[6],s[1],s[0]),skypoints) | def sky_hist(skypoints,samples): N=len(skypoints) print 'operating on %d sky points' % (N) bins=zeros(N) j=0 for sample in samples: seps=map(lambda s: ang_dist(sample[6],sample[7],s[1],s[0]),skypoints) minsep=math.pi for i in range(0,N): if seps[i]<minsep: minsep=seps[i] mindx=i bins[mindx]=bins[mindx]+1 j=j+1 print 'Done %d/%d iterations, minsep=%f degrees'%(j,len(samples),minsep*(180.0/3.1415926)) return (skypoints,bins) | 73c84ed284fcb153278e3e08bd057453c3c5cdc8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/73c84ed284fcb153278e3e08bd057453c3c5cdc8/OddsPostProc.py |
sampcart=pol2cart(sample[6],sample[7]) | sampcart=pol2cart(sample[5],sample[6]) | def skyhist_cart(skycarts,samples): N=len(skypoints) print 'operating on %d sky points'%(N) bins=zeros(N) j=0 for sample in samples: sampcart=pol2cart(sample[6],sample[7]) dots=map(lambda s: numpy.dot(sampcart,s),skycarts) maxdot=0 for i in range(0,N): if dots[i]>maxdot: maxdot=dots[i] mindx=i bins[mindx]=bins[mindx]+1 j=j+1 # print 'Done %d/%d iterations, minsep=%f degrees'%(j,len(samples),math.acos(maxdot)*(180.0/3.14159)) return (skypoints,bins) | 73c84ed284fcb153278e3e08bd057453c3c5cdc8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/73c84ed284fcb153278e3e08bd057453c3c5cdc8/OddsPostProc.py |
plot(getinjpar(injection,0),getinjpar(injection,1),'go') | plot(getinjpar(injection,0),getinjpar(injection,1),'go',scalex=False,scaley=False) | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=linspace(min(xdat),max(xdat),Nx) yax=linspace(min(ydat),max(ydat),Ny) x,y=numpy.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = numpy.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() | 73c84ed284fcb153278e3e08bd057453c3c5cdc8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/73c84ed284fcb153278e3e08bd057453c3c5cdc8/OddsPostProc.py |
plot(getinjpar(injection,5),getinjpar(injection,6),'go') | plot(getinjpar(injection,5),getinjpar(injection,6),'go',scalex=False,scaley=False) | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=linspace(min(xdat),max(xdat),Nx) yax=linspace(min(ydat),max(ydat),Ny) x,y=numpy.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = numpy.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() | 73c84ed284fcb153278e3e08bd057453c3c5cdc8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/73c84ed284fcb153278e3e08bd057453c3c5cdc8/OddsPostProc.py |
if injection and getinjpar(injection,7)<max(pos[:,7]) and getinjpar(injection,7)>min(pos[:,7]) and getinjpar(injection,8)<max(pos[:,8]) and getinjpar(injection,8)>min(pos[:,8]): plot(getinjpar(injection,7),getinjpar(injection,8),'go') | if injection and getinjpar(injection,7)<max(pos[:,7]) and getinjpar(injection,7)>min(pos[:,7]) and getinjpar(injection,8)<max(pos[:,8]) and getinjpar(injection,8)>min(pos[:,8]): plot(getinjpar(injection,7),getinjpar(injection,8),'go',scalex=False,scaley=False) | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=linspace(min(xdat),max(xdat),Nx) yax=linspace(min(ydat),max(ydat),Ny) x,y=numpy.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = numpy.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() | 73c84ed284fcb153278e3e08bd057453c3c5cdc8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/73c84ed284fcb153278e3e08bd057453c3c5cdc8/OddsPostProc.py |
plot(injection.mass1,injection.mass2,'go') | plot(injection.mass1,injection.mass2,'go',scalex=False,scaley=False) | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=linspace(min(xdat),max(xdat),Nx) yax=linspace(min(ydat),max(ydat),Ny) x,y=numpy.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = numpy.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() | 73c84ed284fcb153278e3e08bd057453c3c5cdc8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/73c84ed284fcb153278e3e08bd057453c3c5cdc8/OddsPostProc.py |
plot(injection.mass1,injection.distance,'go') | plot(injection.mass1,injection.distance,'go',scalex=False,scaley=False) | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=linspace(min(xdat),max(xdat),Nx) yax=linspace(min(ydat),max(ydat),Ny) x,y=numpy.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = numpy.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() | 73c84ed284fcb153278e3e08bd057453c3c5cdc8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/73c84ed284fcb153278e3e08bd057453c3c5cdc8/OddsPostProc.py |
plot(getinjpar(injection,4),getinjpar(injection,8),'go') | plot(getinjpar(injection,4),getinjpar(injection,8),'go',scalex=False,scaley=False) | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=linspace(min(xdat),max(xdat),Nx) yax=linspace(min(ydat),max(ydat),Ny) x,y=numpy.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = numpy.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() | 73c84ed284fcb153278e3e08bd057453c3c5cdc8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/73c84ed284fcb153278e3e08bd057453c3c5cdc8/OddsPostProc.py |
plot(getinjpar(injection,i),getinjpar(injection,j),'go') | plot(getinjpar(injection,i),getinjpar(injection,j),'go',scalex=False,scaley=False) | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=linspace(min(xdat),max(xdat),Nx) yax=linspace(min(ydat),max(ydat),Ny) x,y=numpy.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = numpy.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() | 73c84ed284fcb153278e3e08bd057453c3c5cdc8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/73c84ed284fcb153278e3e08bd057453c3c5cdc8/OddsPostProc.py |
plot([getinjpar(injection,i),getinjpar(injection,i)],[0,max(kdepdf)],'r-.') | plot([getinjpar(injection,i),getinjpar(injection,i)],[0,max(kdepdf)],'r-.',scalex=False,scaley=False) | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=linspace(min(xdat),max(xdat),Nx) yax=linspace(min(ydat),max(ydat),Ny) x,y=numpy.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = numpy.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() | 73c84ed284fcb153278e3e08bd057453c3c5cdc8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/73c84ed284fcb153278e3e08bd057453c3c5cdc8/OddsPostProc.py |
htmlfile.write('<img src="'+paramnames[i]+'.png">') | myfig=figure(figsize=(4,3.5),dpi=80) plot(pos[:,i],'.') if injection and min(pos[:,i])<getinjpar(injection,i) and max(pos[:,i])>getinjpar(injection,i): plot([0,len(pos)],[getinjpar(injection,i),getinjpar(injection,i)],'r-.') myfig.savefig(outdir+'/'+paramnames[i]+'_samps.png') htmlfile.write('<img src="'+paramnames[i]+'.png"><img src="'+paramnames[i]+'_samps.png><br>') | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=linspace(min(xdat),max(xdat),Nx) yax=linspace(min(ydat),max(ydat),Ny) x,y=numpy.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = numpy.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() | 73c84ed284fcb153278e3e08bd057453c3c5cdc8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/73c84ed284fcb153278e3e08bd057453c3c5cdc8/OddsPostProc.py |
galpt = fbins[sbin(fbins,inj_pt,fine_res)] | galpt = fbins[skylocutils.sbin(fbins,inj_pt,fine_res)] | def get_unique_filename(name): """ use this to avoid name collisions """ counter = 1 base_name, ext = os.path.splitext(name) while os.path.isfile(name): name = base_name + '_' + str(counter) + ext counter += 1 return name | 43d91fce474394571608d7b00d83c73d87693e07 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/43d91fce474394571608d7b00d83c73d87693e07/run_skypoints.py |
git_id = check_call_out((git_path, 'log', '-1', '--pretty=%H')) | git_id = check_call_out((git_path, 'log', '-1', '--pretty=format:%H')) | def write_git_version(fileobj): """ Query git to determine current repository status and write a Python module with this information. Ex: >>> write_git_version(open("git_version.py", "w")) >>> import git_version >>> print git_version.id 1b0549019e992d0e001f3c28e8488946f825e873 """ git_path = check_call_out(('/usr/bin/which', 'git')) # determine current time and treat it as the build time build_date = time.strftime('%Y-%m-%d %H:%M:%S +0000', time.gmtime()) # determine builder git_builder_name = check_call_out((git_path, 'config', 'user.name')) git_builder_email = check_call_out((git_path, 'config', 'user.email')) git_builder = "%s <%s>" % (git_builder_name, git_builder_email) # determine git id git_id = check_call_out((git_path, 'log', '-1', '--pretty=%H')) # determine commit date, iso utc git_udate = float(check_call_out((git_path, 'log', '-1', '--pretty=%ct'))) git_date = time.strftime('%Y-%m-%d %H:%M:%S +0000', time.gmtime(git_udate)) # determine branch branch_match = check_call_out((git_path, 'rev-parse', '--symbolic-full-name', 'HEAD')) if branch_match == "HEAD": git_branch = None else: git_branch = os.path.basename(branch_match) # determine tag status, git_tag = call_out((git_path, 'describe', '--exact-match', '--tags', git_id)) if status != 0: git_tag = None # determine author and committer git_author_name = check_call_out((git_path, 'log', '-1', '--pretty=%an')) git_author_email = check_call_out((git_path, 'log', '-1', '--pretty=%ae')) git_author = '%s <%s>' % (git_author_name, git_author_email) git_committer_name = check_call_out((git_path, 'log', '-1', '--pretty=%cn')) git_committer_email = check_call_out((git_path, 'log', '-1', '--pretty=%ce')) git_committer = '%s <%s>' % (git_committer_name, git_committer_email) # refresh index check_call_out((git_path, 'update-index', '-q', '--refresh')) # check working copy for changes status_output = subprocess.call((git_path, 'diff-files', '--quiet')) if status_output != 0: git_status = 'UNCLEAN: Modified working tree' else: # check index for changes status_output = subprocess.call((git_path, 'diff-index', '--cached', '--quiet', 'HEAD')) if status_output != 0: git_status = 'UNCLEAN: Modified index' else: git_status = 'CLEAN: All modifications committed' # print details in a directly importable form print >>fileobj, 'id = "%s"' % git_id print >>fileobj, 'date = "%s"' % git_date print >>fileobj, 'branch = "%s"' % git_branch if git_tag is None: print >>fileobj, 'tag = None' else: print >>fileobj, 'tag = "%s"' % git_tag print >>fileobj, 'author = "%s"' % git_author print >>fileobj, 'author_name = "%s"' % git_author_name print >>fileobj, 'author_email = "%s"' % git_author_email print >>fileobj, 'builder = "%s"' % git_builder print >>fileobj, 'builder_name = "%s"' % git_builder_name print >>fileobj, 'builder_email = "%s"' % git_builder_email print >>fileobj, 'committer = "%s"' % git_committer print >>fileobj, 'committer_name = "%s"' % git_committer_name print >>fileobj, 'committer_email = "%s"' % git_committer_email print >>fileobj, 'status = "%s"' % git_status print >>fileobj, 'version = id' # add a verbose report for convenience print >>fileobj, 'verbose_msg = """%s"""' % \ """Branch: %s | 60b8cbbb05cab29e42eab07fcc04ec2faf96eca7 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/60b8cbbb05cab29e42eab07fcc04ec2faf96eca7/determine_git_version.py |
git_udate = float(check_call_out((git_path, 'log', '-1', '--pretty=%ct'))) | git_udate = float(check_call_out((git_path, 'log', '-1', '--pretty=format:%ct'))) | def write_git_version(fileobj): """ Query git to determine current repository status and write a Python module with this information. Ex: >>> write_git_version(open("git_version.py", "w")) >>> import git_version >>> print git_version.id 1b0549019e992d0e001f3c28e8488946f825e873 """ git_path = check_call_out(('/usr/bin/which', 'git')) # determine current time and treat it as the build time build_date = time.strftime('%Y-%m-%d %H:%M:%S +0000', time.gmtime()) # determine builder git_builder_name = check_call_out((git_path, 'config', 'user.name')) git_builder_email = check_call_out((git_path, 'config', 'user.email')) git_builder = "%s <%s>" % (git_builder_name, git_builder_email) # determine git id git_id = check_call_out((git_path, 'log', '-1', '--pretty=%H')) # determine commit date, iso utc git_udate = float(check_call_out((git_path, 'log', '-1', '--pretty=%ct'))) git_date = time.strftime('%Y-%m-%d %H:%M:%S +0000', time.gmtime(git_udate)) # determine branch branch_match = check_call_out((git_path, 'rev-parse', '--symbolic-full-name', 'HEAD')) if branch_match == "HEAD": git_branch = None else: git_branch = os.path.basename(branch_match) # determine tag status, git_tag = call_out((git_path, 'describe', '--exact-match', '--tags', git_id)) if status != 0: git_tag = None # determine author and committer git_author_name = check_call_out((git_path, 'log', '-1', '--pretty=%an')) git_author_email = check_call_out((git_path, 'log', '-1', '--pretty=%ae')) git_author = '%s <%s>' % (git_author_name, git_author_email) git_committer_name = check_call_out((git_path, 'log', '-1', '--pretty=%cn')) git_committer_email = check_call_out((git_path, 'log', '-1', '--pretty=%ce')) git_committer = '%s <%s>' % (git_committer_name, git_committer_email) # refresh index check_call_out((git_path, 'update-index', '-q', '--refresh')) # check working copy for changes status_output = subprocess.call((git_path, 'diff-files', '--quiet')) if status_output != 0: git_status = 'UNCLEAN: Modified working tree' else: # check index for changes status_output = subprocess.call((git_path, 'diff-index', '--cached', '--quiet', 'HEAD')) if status_output != 0: git_status = 'UNCLEAN: Modified index' else: git_status = 'CLEAN: All modifications committed' # print details in a directly importable form print >>fileobj, 'id = "%s"' % git_id print >>fileobj, 'date = "%s"' % git_date print >>fileobj, 'branch = "%s"' % git_branch if git_tag is None: print >>fileobj, 'tag = None' else: print >>fileobj, 'tag = "%s"' % git_tag print >>fileobj, 'author = "%s"' % git_author print >>fileobj, 'author_name = "%s"' % git_author_name print >>fileobj, 'author_email = "%s"' % git_author_email print >>fileobj, 'builder = "%s"' % git_builder print >>fileobj, 'builder_name = "%s"' % git_builder_name print >>fileobj, 'builder_email = "%s"' % git_builder_email print >>fileobj, 'committer = "%s"' % git_committer print >>fileobj, 'committer_name = "%s"' % git_committer_name print >>fileobj, 'committer_email = "%s"' % git_committer_email print >>fileobj, 'status = "%s"' % git_status print >>fileobj, 'version = id' # add a verbose report for convenience print >>fileobj, 'verbose_msg = """%s"""' % \ """Branch: %s | 60b8cbbb05cab29e42eab07fcc04ec2faf96eca7 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/60b8cbbb05cab29e42eab07fcc04ec2faf96eca7/determine_git_version.py |
git_author_name = check_call_out((git_path, 'log', '-1', '--pretty=%an')) git_author_email = check_call_out((git_path, 'log', '-1', '--pretty=%ae')) | git_author_name = check_call_out((git_path, 'log', '-1', '--pretty=format:%an')) git_author_email = check_call_out((git_path, 'log', '-1', '--pretty=format:%ae')) | def write_git_version(fileobj): """ Query git to determine current repository status and write a Python module with this information. Ex: >>> write_git_version(open("git_version.py", "w")) >>> import git_version >>> print git_version.id 1b0549019e992d0e001f3c28e8488946f825e873 """ git_path = check_call_out(('/usr/bin/which', 'git')) # determine current time and treat it as the build time build_date = time.strftime('%Y-%m-%d %H:%M:%S +0000', time.gmtime()) # determine builder git_builder_name = check_call_out((git_path, 'config', 'user.name')) git_builder_email = check_call_out((git_path, 'config', 'user.email')) git_builder = "%s <%s>" % (git_builder_name, git_builder_email) # determine git id git_id = check_call_out((git_path, 'log', '-1', '--pretty=%H')) # determine commit date, iso utc git_udate = float(check_call_out((git_path, 'log', '-1', '--pretty=%ct'))) git_date = time.strftime('%Y-%m-%d %H:%M:%S +0000', time.gmtime(git_udate)) # determine branch branch_match = check_call_out((git_path, 'rev-parse', '--symbolic-full-name', 'HEAD')) if branch_match == "HEAD": git_branch = None else: git_branch = os.path.basename(branch_match) # determine tag status, git_tag = call_out((git_path, 'describe', '--exact-match', '--tags', git_id)) if status != 0: git_tag = None # determine author and committer git_author_name = check_call_out((git_path, 'log', '-1', '--pretty=%an')) git_author_email = check_call_out((git_path, 'log', '-1', '--pretty=%ae')) git_author = '%s <%s>' % (git_author_name, git_author_email) git_committer_name = check_call_out((git_path, 'log', '-1', '--pretty=%cn')) git_committer_email = check_call_out((git_path, 'log', '-1', '--pretty=%ce')) git_committer = '%s <%s>' % (git_committer_name, git_committer_email) # refresh index check_call_out((git_path, 'update-index', '-q', '--refresh')) # check working copy for changes status_output = subprocess.call((git_path, 'diff-files', '--quiet')) if status_output != 0: git_status = 'UNCLEAN: Modified working tree' else: # check index for changes status_output = subprocess.call((git_path, 'diff-index', '--cached', '--quiet', 'HEAD')) if status_output != 0: git_status = 'UNCLEAN: Modified index' else: git_status = 'CLEAN: All modifications committed' # print details in a directly importable form print >>fileobj, 'id = "%s"' % git_id print >>fileobj, 'date = "%s"' % git_date print >>fileobj, 'branch = "%s"' % git_branch if git_tag is None: print >>fileobj, 'tag = None' else: print >>fileobj, 'tag = "%s"' % git_tag print >>fileobj, 'author = "%s"' % git_author print >>fileobj, 'author_name = "%s"' % git_author_name print >>fileobj, 'author_email = "%s"' % git_author_email print >>fileobj, 'builder = "%s"' % git_builder print >>fileobj, 'builder_name = "%s"' % git_builder_name print >>fileobj, 'builder_email = "%s"' % git_builder_email print >>fileobj, 'committer = "%s"' % git_committer print >>fileobj, 'committer_name = "%s"' % git_committer_name print >>fileobj, 'committer_email = "%s"' % git_committer_email print >>fileobj, 'status = "%s"' % git_status print >>fileobj, 'version = id' # add a verbose report for convenience print >>fileobj, 'verbose_msg = """%s"""' % \ """Branch: %s | 60b8cbbb05cab29e42eab07fcc04ec2faf96eca7 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/60b8cbbb05cab29e42eab07fcc04ec2faf96eca7/determine_git_version.py |
git_committer_name = check_call_out((git_path, 'log', '-1', '--pretty=%cn')) git_committer_email = check_call_out((git_path, 'log', '-1', '--pretty=%ce')) | git_committer_name = check_call_out((git_path, 'log', '-1', '--pretty=format:%cn')) git_committer_email = check_call_out((git_path, 'log', '-1', '--pretty=format:%ce')) | def write_git_version(fileobj): """ Query git to determine current repository status and write a Python module with this information. Ex: >>> write_git_version(open("git_version.py", "w")) >>> import git_version >>> print git_version.id 1b0549019e992d0e001f3c28e8488946f825e873 """ git_path = check_call_out(('/usr/bin/which', 'git')) # determine current time and treat it as the build time build_date = time.strftime('%Y-%m-%d %H:%M:%S +0000', time.gmtime()) # determine builder git_builder_name = check_call_out((git_path, 'config', 'user.name')) git_builder_email = check_call_out((git_path, 'config', 'user.email')) git_builder = "%s <%s>" % (git_builder_name, git_builder_email) # determine git id git_id = check_call_out((git_path, 'log', '-1', '--pretty=%H')) # determine commit date, iso utc git_udate = float(check_call_out((git_path, 'log', '-1', '--pretty=%ct'))) git_date = time.strftime('%Y-%m-%d %H:%M:%S +0000', time.gmtime(git_udate)) # determine branch branch_match = check_call_out((git_path, 'rev-parse', '--symbolic-full-name', 'HEAD')) if branch_match == "HEAD": git_branch = None else: git_branch = os.path.basename(branch_match) # determine tag status, git_tag = call_out((git_path, 'describe', '--exact-match', '--tags', git_id)) if status != 0: git_tag = None # determine author and committer git_author_name = check_call_out((git_path, 'log', '-1', '--pretty=%an')) git_author_email = check_call_out((git_path, 'log', '-1', '--pretty=%ae')) git_author = '%s <%s>' % (git_author_name, git_author_email) git_committer_name = check_call_out((git_path, 'log', '-1', '--pretty=%cn')) git_committer_email = check_call_out((git_path, 'log', '-1', '--pretty=%ce')) git_committer = '%s <%s>' % (git_committer_name, git_committer_email) # refresh index check_call_out((git_path, 'update-index', '-q', '--refresh')) # check working copy for changes status_output = subprocess.call((git_path, 'diff-files', '--quiet')) if status_output != 0: git_status = 'UNCLEAN: Modified working tree' else: # check index for changes status_output = subprocess.call((git_path, 'diff-index', '--cached', '--quiet', 'HEAD')) if status_output != 0: git_status = 'UNCLEAN: Modified index' else: git_status = 'CLEAN: All modifications committed' # print details in a directly importable form print >>fileobj, 'id = "%s"' % git_id print >>fileobj, 'date = "%s"' % git_date print >>fileobj, 'branch = "%s"' % git_branch if git_tag is None: print >>fileobj, 'tag = None' else: print >>fileobj, 'tag = "%s"' % git_tag print >>fileobj, 'author = "%s"' % git_author print >>fileobj, 'author_name = "%s"' % git_author_name print >>fileobj, 'author_email = "%s"' % git_author_email print >>fileobj, 'builder = "%s"' % git_builder print >>fileobj, 'builder_name = "%s"' % git_builder_name print >>fileobj, 'builder_email = "%s"' % git_builder_email print >>fileobj, 'committer = "%s"' % git_committer print >>fileobj, 'committer_name = "%s"' % git_committer_name print >>fileobj, 'committer_email = "%s"' % git_committer_email print >>fileobj, 'status = "%s"' % git_status print >>fileobj, 'version = id' # add a verbose report for convenience print >>fileobj, 'verbose_msg = """%s"""' % \ """Branch: %s | 60b8cbbb05cab29e42eab07fcc04ec2faf96eca7 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/60b8cbbb05cab29e42eab07fcc04ec2faf96eca7/determine_git_version.py |
np.savetxt('ranked_sky_pixels',np.column_stack([np.asarray(toppoints)[:,0:1],np.asarray(toppoints)[:,1],np.asarray(toppoints)[:,3]])) | np.savetxt(os.path.join(outdir,'ranked_sky_pixels.dat'),np.column_stack([np.asarray(toppoints)[:,0:1],np.asarray(toppoints)[:,1],np.asarray(toppoints)[:,3]])) | def plotSkyMap(skypos,skyres,sky_injpoint,confidence_levels,outdir): from mpl_toolkits.basemap import Basemap from pylal import skylocutils np.seterr(under='ignore') skypoints=np.array(skylocutils.gridsky(float(skyres))) skycarts=map(lambda s: pol2cart(s[1],s[0]),skypoints) skyinjectionconfidence=None shist=skyhist_cart(np.array(skycarts),skypos) #shist=skyhist_cart(skycarts,list(pos)) bins=skycarts # Find the bin of the injection if available injbin=None if sky_injpoint: injhist=skyhist_cart_slow(skycarts,np.array([sky_injpoint])) injbin=injhist.tolist().index(1) print 'Found injection in bin %d with co-ordinates %f,%f .'%(injbin,skypoints[injbin,0],skypoints[injbin,1]) (skyinjectionconfidence,toppoints,skyreses)=calculateConfidenceLevels(shist,skypoints,injbin,float(skyres),confidence_levels,len(skypos)) if injbin and skyinjectionconfidence: i=list(np.nonzero(np.asarray(toppoints)[:,2]==injbin))[0] min_sky_area_containing_injection=float(skyres)*float(skyres)*i print 'Minimum sky area containing injection point = %f square degrees'%min_sky_area_containing_injection myfig=plt.figure() plt.clf() m=Basemap(projection='moll',lon_0=180.0,lat_0=0.0) plx,ply=m(np.asarray(toppoints)[::-1,1]*57.296,np.asarray(toppoints)[::-1,0]*57.296) cnlevel=[1-tp for tp in np.asarray(toppoints)[::-1,3]] plt.scatter(plx,ply,s=5,c=cnlevel,faceted=False,cmap=mpl_cm.jet) m.drawmapboundary() m.drawparallels(np.arange(-90.,120.,45.),labels=[1,0,0,0],labelstyle='+/-') # draw parallels m.drawmeridians(np.arange(0.,360.,90.),labels=[0,0,0,1],labelstyle='+/-') # draw meridians plt.title("Skymap") # add a title plt.colorbar() myfig.savefig(os.path.join(outdir,'skymap.png')) plt.clf() #Save skypoints np.savetxt('ranked_sky_pixels',np.column_stack([np.asarray(toppoints)[:,0:1],np.asarray(toppoints)[:,1],np.asarray(toppoints)[:,3]])) return skyreses,skyinjectionconfidence | ca9a68b4480ed4f4824eab4d4d19762aba7828e7 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/ca9a68b4480ed4f4824eab4d4d19762aba7828e7/bayespputils.py |
inspiral.InspiralNode.__init__(self,job) | pipeline.CondorDAGNode.__init__(self,job) pipeline.AnalysisNode.__init__(self) | def __init__(self, dag, job, cp, opts, sngl, frame_cache, chia, tag, p_nodes=[]): | 1d62e8501fe5a09856b38aebf1337aced30aab29 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5758/1d62e8501fe5a09856b38aebf1337aced30aab29/stfu_pipe.py |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.