rem
stringlengths
1
322k
add
stringlengths
0
2.05M
context
stringlengths
4
228k
meta
stringlengths
156
215
return re.sub(r"([+-]?[.0-9]+)[Ee]?([+-]?[0-9]+)", r"\1 \\times 10^{\2}", s)
m, e = floatpattern.match(s).groups() return r"%s \\times 10^{%d}" % (m, int(e))
def latexnumber(s): """ Convert a string of the form "d.dddde-dd" to "d.dddd \times 10^{-dd}" """ return re.sub(r"([+-]?[.0-9]+)[Ee]?([+-]?[0-9]+)", r"\1 \\times 10^{\2}", s)
93839b38b5d5fee5d9941f59939032deb6c48ec7 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/93839b38b5d5fee5d9941f59939032deb6c48ec7/SnglBurstUtils.py
s = numpy.real(s)
s = numpy.real(s)
def IMRpeakAmp(m1,m2,spin1z,spin2z,d): """ IMRpeakAmp finds the peak amplitude of the waveform for a given source parameters and the source distance. usage: IMRpeakAmp(m1,m2,spin1z,spin2z,distance) e.g. spawaveApp.IMRpeakAmp(30,40,0.45,0.5,100) """ chi = spawaveform.computechi(m1, m2, spin1z, spin2z) imrfFinal = spawaveform.imrffinal(m1, m2, chi, 'fcut') fLower = 10.0 order = 7 dur = 2**numpy.ceil(numpy.log2(spawaveform.chirptime(m1,m2,order,fLower))) sr = 2**numpy.ceil(numpy.log2(imrfFinal*2)) deltaF = 1.0 / dur deltaT = 1.0 / sr s = numpy.empty(sr * dur, 'complex128') spawaveform.imrwaveform(m1, m2, deltaF, fLower, s, spin1z, spin2z) s = scipy.ifft(s) #s = numpy.abs(s) s = numpy.real(s) max = numpy.max(s)/d return max
0bdd962ae73f315890b6347b4ef9f25b152c7cb9 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/0bdd962ae73f315890b6347b4ef9f25b152c7cb9/spawaveApp.py
mcmcfilelist += node.outputName'
mcmcfilelist += node.outputName
def __init__(self,job,coinc,cp,opts,dag,ifo,ifonames,p_nodes): pipeline.CondorDAGNode.__init__(self,job)
05035e645679043fd1ca6facde81b57b17155a95 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/05035e645679043fd1ca6facde81b57b17155a95/stfu_pipe.py
cp.set("fu-condor","mcmc", self.which("lalapps_spinspiral"))
cp.set("fu-condor","spinmcmc", self.which("lalapps_spinspiral"))
def __init__(self, configfile=None): cp = ConfigParser.ConfigParser() self.cp = cp self.time_now = "_".join([str(i) for i in time_method.gmtime()[0:6]]) self.ini_file=self.time_now + ".ini" home_base = home_dirs() # CONDOR SECTION NEEDED BY THINGS IN INSPIRAL.PY cp.add_section("condor") cp.set("condor","datafind",self.which("ligo_data_find")) cp.set("condor","inspiral",self.which("lalapps_inspiral")) cp.set("condor","chia", self.which("lalapps_coherent_inspiral")) cp.set("condor","universe","standard") # SECTIONS TO SHUT UP WARNINGS cp.add_section("inspiral") cp.add_section("data") # DATAFIND SECTION cp.add_section("datafind")
05035e645679043fd1ca6facde81b57b17155a95 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/05035e645679043fd1ca6facde81b57b17155a95/stfu_pipe.py
cp.set("makeCheckListWiki","ini-file",self.ini_file)
cp.set("makeCheckListWiki","ini-file",os.path.abspath(self.ini_file))
def __init__(self, configfile=None): cp = ConfigParser.ConfigParser() self.cp = cp self.time_now = "_".join([str(i) for i in time_method.gmtime()[0:6]]) self.ini_file=self.time_now + ".ini" home_base = home_dirs() # CONDOR SECTION NEEDED BY THINGS IN INSPIRAL.PY cp.add_section("condor") cp.set("condor","datafind",self.which("ligo_data_find")) cp.set("condor","inspiral",self.which("lalapps_inspiral")) cp.set("condor","chia", self.which("lalapps_coherent_inspiral")) cp.set("condor","universe","standard") # SECTIONS TO SHUT UP WARNINGS cp.add_section("inspiral") cp.add_section("data") # DATAFIND SECTION cp.add_section("datafind")
7af3808c8bded294da5949d5d10b9e4ea18c1bb5 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/7af3808c8bded294da5949d5d10b9e4ea18c1bb5/stfu_pipe.py
if trigger.chisq_h1 < 4 or trigger.chisq_l < 4:
if trigger.chisq_h1 < 4 or trigger.chisq_v < 4:
def get_signal_vetoes(trigger,bankq=0,bankn=0,autoq=0,auton=0,chiq=0,chin=0,sigmaVals = None,fResp = None): sbvs = {} q = bankq nhigh = bankn q2 = autoq nhigh2 = auton if trigger.chisq == 0: sbvs['BestNR1'] = 0 else: if trigger.chisq < 60: sbvs['BestNR1'] = trigger.snr else: sbvs['BestNR1'] = trigger.snr/((1 + (trigger.chisq/60.)**(chiq/chin))/2.)**(1./chiq)
f5cea0acc9eb8c5f98e8d26075ddaef489f7a6c8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/f5cea0acc9eb8c5f98e8d26075ddaef489f7a6c8/coh_PTF_pyutils.py
df = float((event1.central_freq + 0.5*event1.bandwidth - event2.central_freq - 0.5*event2.bandwidth)/(event1.central_freq + 0.5*event1.bandwidth + event2.central_freq + 0.5*event2.bandwidth))
f_cut1 = event1.central_freq + event1.bandwidth / 2 f_cut2 = event2.central_freq + event2.bandwidth / 2 df = float((f_cut1 - f_cut2) / (f_cut1 + f_cut2))
def coinc_params_func(events, offsetvector): # # check for coincs that have been vetoed entirely # if len(events) < 2: return None params = {} # # zero-instrument parameters # params["nevents"] = (len(events),) # # one-instrument parameters # for event in events: prefix = "%s_" % event.ifo params["%ssnr2_chi2" % prefix] = (event.snr**2.0, event.chisq / event.chisq_dof) # # two-instrument parameters # for event1, event2 in iterutils.choices(sorted(events, key = lambda event: event.ifo), 2): assert event1.ifo != event2.ifo prefix = "%s_%s_" % (event1.ifo, event2.ifo) dt = float((event1.get_peak() + offsetvector[event1.ifo]) - (event2.get_peak() + offsetvector[event2.ifo])) params["%sdt" % prefix] = (dt,) dA = math.log10(abs(event1.amplitude / event2.amplitude)) params["%sdA" % prefix] = (dA,) df = float((event1.central_freq + 0.5*event1.bandwidth - event2.central_freq - 0.5*event2.bandwidth)/(event1.central_freq + 0.5*event1.bandwidth + event2.central_freq + 0.5*event2.bandwidth)) params["%sdf" % prefix] = (df,) # # done # return params
25a3dc1b78a704666ce9f482a744565f842bfa06 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/25a3dc1b78a704666ce9f482a744565f842bfa06/stringutils.py
"nevents": rate.tophat_window(1)
"nevents": rate.tophat_window(1)
def dt_binning(instrument1, instrument2): dt = 0.005 + inject.light_travel_time(instrument1, instrument2) # seconds return rate.NDBins((rate.ATanBins(-dt, +dt, 3001),))
25a3dc1b78a704666ce9f482a744565f842bfa06 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/25a3dc1b78a704666ce9f482a744565f842bfa06/stringutils.py
def get_coincparamsdistributions(xmldoc):
def get_coincparamsdistributions(xmldoc, seglists = None):
def get_coincparamsdistributions(xmldoc): coincparamsdistributions, process_id = ligolw_burca_tailor.coinc_params_distributions_from_xml(xmldoc, u"string_cusp_likelihood") return coincparamsdistributions
25a3dc1b78a704666ce9f482a744565f842bfa06 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/25a3dc1b78a704666ce9f482a744565f842bfa06/stringutils.py
def load_likelihood_data(filenames, verbose = False):
def load_likelihood_data(filenames, seglists = None, verbose = False):
def load_likelihood_data(filenames, verbose = False): coincparamsdistributions = None for n, filename in enumerate(filenames): if verbose: print >>sys.stderr, "%d/%d:" % (n + 1, len(filenames)), xmldoc = utils.load_filename(filename, gz = (filename or "stdin").endswith(".gz"), verbose = verbose) if coincparamsdistributions is None: coincparamsdistributions = get_coincparamsdistributions(xmldoc) else: coincparamsdistributions += get_coincparamsdistributions(xmldoc) xmldoc.unlink() return coincparamsdistributions
25a3dc1b78a704666ce9f482a744565f842bfa06 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/25a3dc1b78a704666ce9f482a744565f842bfa06/stringutils.py
coincparamsdistributions = get_coincparamsdistributions(xmldoc)
coincparamsdistributions = get_coincparamsdistributions(xmldoc, seglists = seglists)
def load_likelihood_data(filenames, verbose = False): coincparamsdistributions = None for n, filename in enumerate(filenames): if verbose: print >>sys.stderr, "%d/%d:" % (n + 1, len(filenames)), xmldoc = utils.load_filename(filename, gz = (filename or "stdin").endswith(".gz"), verbose = verbose) if coincparamsdistributions is None: coincparamsdistributions = get_coincparamsdistributions(xmldoc) else: coincparamsdistributions += get_coincparamsdistributions(xmldoc) xmldoc.unlink() return coincparamsdistributions
25a3dc1b78a704666ce9f482a744565f842bfa06 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/25a3dc1b78a704666ce9f482a744565f842bfa06/stringutils.py
coincparamsdistributions += get_coincparamsdistributions(xmldoc)
coincparamsdistributions += get_coincparamsdistributions(xmldoc, seglists = seglists)
def load_likelihood_data(filenames, verbose = False): coincparamsdistributions = None for n, filename in enumerate(filenames): if verbose: print >>sys.stderr, "%d/%d:" % (n + 1, len(filenames)), xmldoc = utils.load_filename(filename, gz = (filename or "stdin").endswith(".gz"), verbose = verbose) if coincparamsdistributions is None: coincparamsdistributions = get_coincparamsdistributions(xmldoc) else: coincparamsdistributions += get_coincparamsdistributions(xmldoc) xmldoc.unlink() return coincparamsdistributions
25a3dc1b78a704666ce9f482a744565f842bfa06 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/25a3dc1b78a704666ce9f482a744565f842bfa06/stringutils.py
pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable) self.setupJob(name=self.name,dir=dir,cp=cp,tag_base=tag_base)
self.setupJob(name=self.name,dir=dir,cp=cp,tag_base=tag_base)
def __init__(self,opts,cp,dir='',tag_base=''): """ """ self.__executable = string.strip(cp.get('fu-condor','plotmcmc')) self.name = os.path.split(self.__executable.rstrip('/'))[1] self.__universe = "vanilla"
a627dfaff8b63ff5e2852047ce0b8646f5e6b641 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/a627dfaff8b63ff5e2852047ce0b8646f5e6b641/stfu_pipe.py
os.path.join("bin", "search_volume_by_s1_s2"),
def run(self): # remove the automatically generated user env scripts for script in ["pylal-user-env.sh", "pylal-user-env.csh"]: log.info("removing " + script ) try: os.unlink(os.path.join("etc", script)) except: pass
a019c4b0522ce86d589cf0f39840014d0379a330 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/a019c4b0522ce86d589cf0f39840014d0379a330/setup.py
self.FAR = -1
self.FAR = 99
def __init__(self): """ here are all the things we need """ #start with data needed for every coinc self.ifo_list = [] self.ifo_coincs = [] self.snr = {} self.gps = {} self.eff_distances = {} self.mass1 = {} self.mass2 = {} self.time = None self.FAR = -1 #this stuff is only needed for injections self.is_injection = False self.latitude_inj = None self.longitude_inj = None self.mass1_inj = None self.mass2_inj = None self.distance_inj = None self.eff_distances_inj = {}
be8cfcd82a5a5e331d1d030f7034863d70f64728 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/be8cfcd82a5a5e331d1d030f7034863d70f64728/skylocutils.py
if ctab[0].false_alarm_rate is not None: coinc.set_FAR(ctab[0].false_alarm_rate)
def get_coincs_from_coinctable(self,files): """ read data from coinc tables (xml format) FIXME: currently assumes one coinc per file!!! """ for file in files: coinc = CoincData() xmldoc = utils.load_filename(file) sngltab = tab.get_table(xmldoc,lsctables.SnglInspiralTable.tableName) coinc.set_snr(dict((row.ifo, row.snr) for row in sngltab)) coinc.set_gps(dict((row.ifo, LIGOTimeGPS(row.get_end())) for row in sngltab)) coinc.set_effDs(dict((row.ifo,row.eff_distance) for row in sngltab)) coinc.set_masses(dict((row.ifo, row.mass1) for row in sngltab), \ dict((row.ifo, row.mass2) for row in sngltab)) ctab = tab.get_table(xmldoc,lsctables.CoincInspiralTable.tableName) coinc.set_ifos(list(ctab[0].get_ifos())) try: simtab = tab.get_table(xmldoc,lsctables.SimInspiralTable.tableName) row = siminsptab[0] effDs_inj = {} for ifo in coinc.ifo_list: if ifo == 'H1': effDs_inj[ifo] = row.eff_dist_h elif ifo == 'L1': effDs_inj[ifo] = row.eff_dist_l elif ifo == 'V1': effDs_inj[ifo] = row.eff_dist_v dist_inj = row.distance coinc.set_inj_params(row.latitude,row.longitude,row.mass1,row.mass2, \ dist_inj,effDs_inj) coinc.is_injection = True #FIXME: name the exception! except: pass
be8cfcd82a5a5e331d1d030f7034863d70f64728 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/be8cfcd82a5a5e331d1d030f7034863d70f64728/skylocutils.py
coincTrigs = CoincInspiralUtils.coincInspiralTable(inspTrigs,'snr')
statistic = CoincInspiralUtils.coincStatistic('snr',None,None) coincTrigs = CoincInspiralUtils.coincInspiralTable(inspTrigs,statistic)
def get_coincs_from_coire(self,files): """ uses CoincInspiralUtils to get data from old-style (coire'd) coincs """ coincTrigs = CoincInspiralUtils.coincInspiralTable() inspTrigs = SnglInspiralUtils.ReadSnglInspiralFromFiles(files, \ mangle_event_id = True,verbose=None) #note that it's hardcoded to use snr as the statistic coincTrigs = CoincInspiralUtils.coincInspiralTable(inspTrigs,'snr') try: inspInj = SimInspiralUtils.ReadSimInspiralFromFiles(files) coincTrigs.add_sim_inspirals(inspInj) #FIXME: name the exception! except: pass
c5476ebb03d5348cf59eefbc3035b78574a7d926 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/c5476ebb03d5348cf59eefbc3035b78574a7d926/skylocutils.py
self.FAR = -1
self.FAR = 99
def __init__(self): """ here are all the things we need """ #start with data needed for every coinc self.ifo_list = [] self.ifo_coincs = [] self.snr = {} self.gps = {} self.eff_distances = {} self.mass1 = {} self.mass2 = {} self.time = None self.FAR = -1 #this stuff is only needed for injections self.is_injection = False self.latitude_inj = None self.longitude_inj = None self.mass1_inj = None self.mass2_inj = None self.distance_inj = None self.eff_distances_inj = {}
3d55d0a3b24296f7f520395dda89285057339f7d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/3d55d0a3b24296f7f520395dda89285057339f7d/skylocutils.py
def __init__(self): """ here are all the things we need """ #start with data needed for every coinc self.ifo_list = [] self.ifo_coincs = [] self.snr = {} self.gps = {} self.eff_distances = {} self.mass1 = {} self.mass2 = {} self.time = None self.FAR = -1 #this stuff is only needed for injections self.is_injection = False self.latitude_inj = None self.longitude_inj = None self.mass1_inj = None self.mass2_inj = None self.distance_inj = None self.eff_distances_inj = {}
3d55d0a3b24296f7f520395dda89285057339f7d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/3d55d0a3b24296f7f520395dda89285057339f7d/skylocutils.py
if ctab[0].false_alarm_rate is not None: coinc.set_FAR(ctab[0].false_alarm_rate)
def get_coincs_from_coinctable(self,files): """ read data from coinc tables (xml format) FIXME: currently assumes one coinc per file!!! """ for file in files: coinc = CoincData() xmldoc = utils.load_filename(file) sngltab = tab.get_table(xmldoc,lsctables.SnglInspiralTable.tableName) coinc.set_snr(dict((row.ifo, row.snr) for row in sngltab)) coinc.set_gps(dict((row.ifo, LIGOTimeGPS(row.get_end())) for row in sngltab)) coinc.set_effDs(dict((row.ifo,row.eff_distance) for row in sngltab)) coinc.set_masses(dict((row.ifo, row.mass1) for row in sngltab), \ dict((row.ifo, row.mass2) for row in sngltab)) ctab = tab.get_table(xmldoc,lsctables.CoincInspiralTable.tableName) coinc.set_ifos(list(ctab[0].get_ifos())) try: simtab = tab.get_table(xmldoc,lsctables.SimInspiralTable.tableName) row = siminsptab[0] effDs_inj = {} for ifo in coinc.ifo_list: if ifo == 'H1': effDs_inj[ifo] = row.eff_dist_h elif ifo == 'L1': effDs_inj[ifo] = row.eff_dist_l elif ifo == 'V1': effDs_inj[ifo] = row.eff_dist_v dist_inj = row.distance coinc.set_inj_params(row.latitude,row.longitude,row.mass1,row.mass2, \ dist_inj,effDs_inj) coinc.is_injection = True #FIXME: name the exception! except: pass
3d55d0a3b24296f7f520395dda89285057339f7d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/3d55d0a3b24296f7f520395dda89285057339f7d/skylocutils.py
cacheSelected = cacheSelected.sieve(segment=segments.segment(math.floor(float(time)), math.ceil(float(time))))
if math.floor(float(time)) != math.ceil(float(time)): cacheSelected = cacheSelected.sieve(segment=segments.segment(math.floor(float(time)), math.ceil(float(time)))) else: cacheSelected = cacheSelected.sieve(segment=segments.segment(math.floor(float(time))-0.5, math.floor(float(time))+0.5))
def getParamsFromCache(fileName,type,ifo=None,time=None): qscanList = [] cacheList = lal.Cache.fromfile(open(fileName)) if not cacheList: return qscanList cacheSelected = cacheList.sieve(description=type,ifos=ifo) if time: cacheSelected = cacheSelected.sieve(segment=segments.segment(math.floor(float(time)), math.ceil(float(time)))) for cacheEntry in cacheSelected: path_output = cacheEntry.path() time_output = str(cacheEntry.segment[0]) type_output = cacheEntry.description ifo_output = cacheEntry.observatory qscanList.append([path_output,time_output,type_output,ifo_output]) return qscanList
a027ce38aaf2bc2df88aed587618ceb49cd6b1cb /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/a027ce38aaf2bc2df88aed587618ceb49cd6b1cb/stfu_pipe.py
if re.search( table_name+r'[.]', table_param ) is None:
if table_param.find( table_name+'.' ) == -1:
def __init__( self, table_name, table_param, param_ranges_opt, verbose = False ): """ Parse --param-ranges option. Creates self.param which is the table_name and the table_param appended together (with a '.') and self.param_ranges, which is a list of tuples that give the lower parameter value, whether it is an open or closed boundary, and the same for the upper parameter. For example, if table_name is coinc_inspiral, table_param is mchirp and param_ranges_opt is '[2,8);[8,17]' will get: self.param = 'coinc_inspiral.mchirp' self.param_ranges = [ ( ('>=',2.0), ('<',8.0) ), ( ('>=',8.0), ('<=', 17.0) ) ]
aa613f9ca1679aa7fff3f9cfa9b4bb9989c85fd7 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/aa613f9ca1679aa7fff3f9cfa9b4bb9989c85fd7/ligolw_sqlutils.py
def __init__(self, xmldoc, b_b_def, sb_b_def, si_b_def, sb_c_e_def, sb_c_n_def, si_c_e_def, si_c_n_def, process):
def __init__(self, xmldoc, b_b_def, sb_b_def, si_b_def, sb_c_e_def, sb_c_n_def, si_c_e_def, si_c_n_def, process, livetime_program):
def __init__(self, xmldoc, b_b_def, sb_b_def, si_b_def, sb_c_e_def, sb_c_n_def, si_c_e_def, si_c_n_def, process): # # store the process row #
f23162e4f63afbd6a2032de2d75ca44643717584 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/f23162e4f63afbd6a2032de2d75ca44643717584/ligolw_binjfind.py
self.burst_peak_time_window = 6.378140e6 / 299792458 * 1.25 if len(self.snglbursttable): self.burst_peak_time_window += max(self.snglbursttable.getColumnByName("duration")) self.coinc_peak_time_window = self.burst_peak_time_window + SimBurstUtils.burst_is_near_injection_window def bursts_near_peaktime(self, t):
def bursts_near_peaktime(self, t, window):
def __init__(self, xmldoc, b_b_def, sb_b_def, si_b_def, sb_c_e_def, sb_c_n_def, si_c_e_def, si_c_n_def, process): # # store the process row #
f23162e4f63afbd6a2032de2d75ca44643717584 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/f23162e4f63afbd6a2032de2d75ca44643717584/ligolw_binjfind.py
within self.burst_peak_time_window of t.
within window seconds of t. This is not used to define any coincidences, only to provide a short list of burst events for use in more costly comparison tests.
def bursts_near_peaktime(self, t): """ Return a list of the burst events whose peak times are within self.burst_peak_time_window of t. """ return self.snglbursttable[bisect.bisect_left(self.snglbursttable, t - self.burst_peak_time_window):bisect.bisect_right(self.snglbursttable, t + self.burst_peak_time_window)]
f23162e4f63afbd6a2032de2d75ca44643717584 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/f23162e4f63afbd6a2032de2d75ca44643717584/ligolw_binjfind.py
return self.snglbursttable[bisect.bisect_left(self.snglbursttable, t - self.burst_peak_time_window):bisect.bisect_right(self.snglbursttable, t + self.burst_peak_time_window)] def coincs_near_peaktime(self, t):
return self.snglbursttable[bisect.bisect_left(self.snglbursttable, t - window):bisect.bisect_right(self.snglbursttable, t + window)] def coincs_near_peaktime(self, t, window):
def bursts_near_peaktime(self, t): """ Return a list of the burst events whose peak times are within self.burst_peak_time_window of t. """ return self.snglbursttable[bisect.bisect_left(self.snglbursttable, t - self.burst_peak_time_window):bisect.bisect_right(self.snglbursttable, t + self.burst_peak_time_window)]
f23162e4f63afbd6a2032de2d75ca44643717584 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/f23162e4f63afbd6a2032de2d75ca44643717584/ligolw_binjfind.py
which at least one burst event's peak time is within self.coinc_peak_time_window of t.
which at least one burst event's peak time is within window seconds of t. This is not used to define any coincidences, only to provide a short list of coinc events for use in more costly comparison tests.
def coincs_near_peaktime(self, t): """ Return a list of the (coinc_event_id, event list) tuples in which at least one burst event's peak time is within self.coinc_peak_time_window of t. """ # FIXME: this test does not consider the time slide # offsets that should be applied to the coinc, but for now # injections are done at zero lag so this isn't a problem # yet return [(coinc_event_id, events) for coinc_event_id, events in self.coincs if (t - self.coinc_peak_time_window <= events[-1].get_peak()) and (events[0].get_peak() <= t + self.coinc_peak_time_window)]
f23162e4f63afbd6a2032de2d75ca44643717584 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/f23162e4f63afbd6a2032de2d75ca44643717584/ligolw_binjfind.py
return [(coinc_event_id, events) for coinc_event_id, events in self.coincs if (t - self.coinc_peak_time_window <= events[-1].get_peak()) and (events[0].get_peak() <= t + self.coinc_peak_time_window)]
near_events = set(self.bursts_near_peaktime(t, window)) return [(coinc_event_id, events) for coinc_event_id, events in self.coincs if set(events) & near_events]
def coincs_near_peaktime(self, t): """ Return a list of the (coinc_event_id, event list) tuples in which at least one burst event's peak time is within self.coinc_peak_time_window of t. """ # FIXME: this test does not consider the time slide # offsets that should be applied to the coinc, but for now # injections are done at zero lag so this isn't a problem # yet return [(coinc_event_id, events) for coinc_event_id, events in self.coincs if (t - self.coinc_peak_time_window <= events[-1].get_peak()) and (events[0].get_peak() <= t + self.coinc_peak_time_window)]
f23162e4f63afbd6a2032de2d75ca44643717584 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/f23162e4f63afbd6a2032de2d75ca44643717584/ligolw_binjfind.py
Return False if the peak time of the injection sim lies within the time interval of burst. """ return SimBurstUtils.time_at_instrument(sim, burst.ifo) not in burst.get_period()
Return False (injection matches event) if an autocorrelation-width window centred on the injection is continuous with the time interval of the burst. """ tinj = SimBurstUtils.time_at_instrument(sim, burst.ifo) window = SimBurstUtils.stringcusp_autocorrelation_width / 2 return segments.segment(tinj - window, tinj + window).disjoint(burst.get_period())
def StringCuspSnglCompare(sim, burst): """ Return False if the peak time of the injection sim lies within the time interval of burst. """ return SimBurstUtils.time_at_instrument(sim, burst.ifo) not in burst.get_period()
f23162e4f63afbd6a2032de2d75ca44643717584 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/f23162e4f63afbd6a2032de2d75ca44643717584/ligolw_binjfind.py
Return False if the peak time and centre frequency of sim lie within the time-frequency tile of burst. """ return StringCuspSnglCompare(sim, burst) or (sim.frequency not in burst.get_band())
Return False (injection matches event) if the peak time and centre frequency of sim lie within the time-frequency tile of burst. """ return (SimBurstUtils.time_at_instrument(sim, burst.ifo) not in burst.get_period()) or (sim.frequency not in burst.get_band())
def ExcessPowerSnglCompare(sim, burst): """ Return False if the peak time and centre frequency of sim lie within the time-frequency tile of burst. """ return StringCuspSnglCompare(sim, burst) or (sim.frequency not in burst.get_band())
f23162e4f63afbd6a2032de2d75ca44643717584 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/f23162e4f63afbd6a2032de2d75ca44643717584/ligolw_binjfind.py
Return False if the peak time and centre frequency of sim lie within the time-frequency tile of burst.
Return False (injection matches event) if the time of the sim and the peak time of the burst event differ by less than or equal to delta_t seconds.
def OmegaSnglCompare(sim, burst, delta_t = 10.0): """ Return False if the peak time and centre frequency of sim lie within the time-frequency tile of burst. """ return abs(float(SimBurstUtils.time_at_instrument(sim, burst.ifo) - burst.get_peak())) > delta_t
f23162e4f63afbd6a2032de2d75ca44643717584 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/f23162e4f63afbd6a2032de2d75ca44643717584/ligolw_binjfind.py
Return False if the peak time of the sim is "near" the burst event. """ return OmegaNearCoincCompare(sim, burst)
Return False (injection matches coinc) if the peak time of the sim is "near" the burst event. """ tinj = SimBurstUtils.time_at_instrument(sim, burst.ifo) window = SimBurstUtils.stringcusp_autocorrelation_width / 2 + SimBurstUtils.burst_is_near_injection_window return segments.segment(tinj - window, tinj + window).disjoint(burst.get_period())
def StringCuspNearCoincCompare(sim, burst): """ Return False if the peak time of the sim is "near" the burst event. """ return OmegaNearCoincCompare(sim, burst)
f23162e4f63afbd6a2032de2d75ca44643717584 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/f23162e4f63afbd6a2032de2d75ca44643717584/ligolw_binjfind.py
Return False if the peak time of the sim is "near" the burst event.
Return False (injection matches coinc) if the peak time of the sim is "near" the burst event.
def ExcessPowerNearCoincCompare(sim, burst): """ Return False if the peak time of the sim is "near" the burst event. """ return not SimBurstUtils.burst_is_near_injection(sim, burst.start_time, burst.start_time_ns, burst.duration, burst.ifo)
f23162e4f63afbd6a2032de2d75ca44643717584 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/f23162e4f63afbd6a2032de2d75ca44643717584/ligolw_binjfind.py
Return False if the peak time of the sim is "near" the burst event. """ start_time = burst.get_peak() - burst.duration / 2.0 return not SimBurstUtils.burst_is_near_injection(sim, start_time.seconds, start_time.nanoseconds, burst.duration, burst.ifo)
Return False (injection matches coinc) if the peak time of the sim is "near" the burst event. """ return OmegaSnglCompare(sim, burst, delta_t = 20.0 + burst.duration / 2)
def OmegaNearCoincCompare(sim, burst): """ Return False if the peak time of the sim is "near" the burst event. """ start_time = burst.get_peak() - burst.duration / 2.0 return not SimBurstUtils.burst_is_near_injection(sim, start_time.seconds, start_time.nanoseconds, burst.duration, burst.ifo)
f23162e4f63afbd6a2032de2d75ca44643717584 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/f23162e4f63afbd6a2032de2d75ca44643717584/ligolw_binjfind.py
def find_sngl_burst_matches(contents, sim, comparefunc): """ Scan the burst table for triggers matching sim. """ return [burst for burst in contents.bursts_near_peaktime(sim.get_time_geocent()) if not comparefunc(sim, burst)]
def find_sngl_burst_matches(contents, sim, comparefunc, sieve_window): """ Scan the burst table for triggers matching sim. sieve_window is used in a bisection search to quickly identify burst events within that many seconds of the injection's peak time at the geocentre; it should be larger than the greatest time difference that can separate a burst event's peak time from an injection's peak time at the geocentre and the two still be considered a match. """ return [burst for burst in contents.bursts_near_peaktime(sim.get_time_geocent(), sieve_window) if not comparefunc(sim, burst)]
def find_sngl_burst_matches(contents, sim, comparefunc): """ Scan the burst table for triggers matching sim. """ return [burst for burst in contents.bursts_near_peaktime(sim.get_time_geocent()) if not comparefunc(sim, burst)]
f23162e4f63afbd6a2032de2d75ca44643717584 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/f23162e4f63afbd6a2032de2d75ca44643717584/ligolw_binjfind.py
def find_exact_coinc_matches(coincs, sim, comparefunc):
def find_exact_coinc_matches(coincs, sim, comparefunc, seglists):
def find_exact_coinc_matches(coincs, sim, comparefunc): """ Return a list of the coinc_event_ids of the burst<-->burst coincs in which all burst events match sim. """ # FIXME: this test does not consider the time slide offsets that # should be applied to the coinc, but for now injections are done # at zero lag so this isn't a problem yet return [coinc_event_id for coinc_event_id, events in coincs if True not in (bool(comparefunc(sim, event)) for event in events)]
f23162e4f63afbd6a2032de2d75ca44643717584 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/f23162e4f63afbd6a2032de2d75ca44643717584/ligolw_binjfind.py
in which all burst events match sim.
in which all burst events match sim and to which all instruments on at the time of the sim contributed events.
def find_exact_coinc_matches(coincs, sim, comparefunc): """ Return a list of the coinc_event_ids of the burst<-->burst coincs in which all burst events match sim. """ # FIXME: this test does not consider the time slide offsets that # should be applied to the coinc, but for now injections are done # at zero lag so this isn't a problem yet return [coinc_event_id for coinc_event_id, events in coincs if True not in (bool(comparefunc(sim, event)) for event in events)]
f23162e4f63afbd6a2032de2d75ca44643717584 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/f23162e4f63afbd6a2032de2d75ca44643717584/ligolw_binjfind.py
return [coinc_event_id for coinc_event_id, events in coincs if True not in (bool(comparefunc(sim, event)) for event in events)]
on_instruments = SimBurstUtils.on_instruments(sim, seglists) return set(coinc_event_id for coinc_event_id, events in coincs if on_instruments.issubset(set(event.ifo for event in events)) and not any(comparefunc(sim, event) for event in events))
def find_exact_coinc_matches(coincs, sim, comparefunc): """ Return a list of the coinc_event_ids of the burst<-->burst coincs in which all burst events match sim. """ # FIXME: this test does not consider the time slide offsets that # should be applied to the coinc, but for now injections are done # at zero lag so this isn't a problem yet return [coinc_event_id for coinc_event_id, events in coincs if True not in (bool(comparefunc(sim, event)) for event in events)]
f23162e4f63afbd6a2032de2d75ca44643717584 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/f23162e4f63afbd6a2032de2d75ca44643717584/ligolw_binjfind.py
return [coinc_event_id for coinc_event_id, events in coincs if False in (bool(comparefunc(sim, event)) for event in events)]
return set(coinc_event_id for coinc_event_id, events in coincs if not all(comparefunc(sim, event) for event in events))
def find_near_coinc_matches(coincs, sim, comparefunc): """ Return a list of the coinc_event_ids of the burst<-->burst coincs in which at least one burst event matches sim. """ # FIXME: this test does not consider the time slide offsets that # should be applied to the coinc, but for now injections are done # at zero lag so this isn't a problem yet return [coinc_event_id for coinc_event_id, events in coincs if False in (bool(comparefunc(sim, event)) for event in events)]
f23162e4f63afbd6a2032de2d75ca44643717584 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/f23162e4f63afbd6a2032de2d75ca44643717584/ligolw_binjfind.py
process = process
process = process, livetime_program = { "StringCusp": "StringSearch", "excesspower": "lalapps_power", "omega": None }[search]
si_c_n_def = si_c_n_def,
f23162e4f63afbd6a2032de2d75ca44643717584 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/f23162e4f63afbd6a2032de2d75ca44643717584/ligolw_binjfind.py
events = find_sngl_burst_matches(contents, sim, snglcomparefunc)
events = find_sngl_burst_matches(contents, sim, snglcomparefunc, burst_peak_time_window)
si_c_n_def = si_c_n_def,
f23162e4f63afbd6a2032de2d75ca44643717584 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/f23162e4f63afbd6a2032de2d75ca44643717584/ligolw_binjfind.py
coincs = contents.coincs_near_peaktime(sim.get_time_geocent()) coinc_event_ids = find_exact_coinc_matches(coincs, sim, snglcomparefunc) if coinc_event_ids: add_sim_coinc_coinc(contents, sim, coinc_event_ids, contents.sb_c_e_coinc_def_id) coinc_event_ids = find_near_coinc_matches(coincs, sim, nearcoinccomparefunc) if coinc_event_ids: add_sim_coinc_coinc(contents, sim, coinc_event_ids, contents.sb_c_n_coinc_def_id)
coincs = contents.coincs_near_peaktime(sim.get_time_geocent(), coinc_peak_time_window) exact_coinc_event_ids = find_exact_coinc_matches(coincs, sim, snglcomparefunc, contents.seglists) near_coinc_event_ids = find_near_coinc_matches(coincs, sim, nearcoinccomparefunc) assert exact_coinc_event_ids.issubset(near_coinc_event_ids) if exact_coinc_event_ids: add_sim_coinc_coinc(contents, sim, exact_coinc_event_ids, contents.sb_c_e_coinc_def_id) if near_coinc_event_ids: add_sim_coinc_coinc(contents, sim, near_coinc_event_ids, contents.sb_c_n_coinc_def_id)
si_c_n_def = si_c_n_def,
f23162e4f63afbd6a2032de2d75ca44643717584 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/f23162e4f63afbd6a2032de2d75ca44643717584/ligolw_binjfind.py
events = find_sngl_burst_matches(contents, sim, snglcomparefunc)
events = find_sngl_burst_matches(contents, sim, snglcomparefunc, burst_peak_time_window)
si_c_n_def = si_c_n_def,
f23162e4f63afbd6a2032de2d75ca44643717584 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/f23162e4f63afbd6a2032de2d75ca44643717584/ligolw_binjfind.py
coincs = contents.coincs_near_peaktime(sim.get_time_geocent()) coinc_event_ids = find_exact_coinc_matches(coincs, sim, snglcomparefunc) if coinc_event_ids: add_sim_coinc_coinc(contents, sim, coinc_event_ids, contents.si_c_e_coinc_def_id) coinc_event_ids = find_near_coinc_matches(coincs, sim, nearcoinccomparefunc) if coinc_event_ids: add_sim_coinc_coinc(contents, sim, coinc_event_ids, contents.si_c_n_coinc_def_id)
coincs = contents.coincs_near_peaktime(sim.get_time_geocent(), coinc_peak_time_window) exact_coinc_event_ids = find_exact_coinc_matches(coincs, sim, snglcomparefunc, contents.seglists) near_coinc_event_ids = find_near_coinc_matches(coincs, sim, nearcoinccomparefunc) assert exact_coinc_event_ids.issubset(near_coinc_event_ids) if exact_coinc_event_ids: add_sim_coinc_coinc(contents, sim, exact_coinc_event_ids, contents.si_c_e_coinc_def_id) if near_coinc_event_ids: add_sim_coinc_coinc(contents, sim, near_coinc_event_ids, contents.si_c_n_coinc_def_id)
si_c_n_def = si_c_n_def,
f23162e4f63afbd6a2032de2d75ca44643717584 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/f23162e4f63afbd6a2032de2d75ca44643717584/ligolw_binjfind.py
(self.coint.type,sngl.ifo,sngl.ifo,timeString)
(self.coinc.type,sngl.ifo,sngl.ifo,timeString)
def get_analyzeQscan_RDS(self): """ """ #analyseQscan.py_FG_RDS_full_data/H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.cache cacheList=list() cacheFiles=list() for sngl in self.coinc.sngls: timeString=str(float(sngl.time)).replace(".","_") myCacheMask="*%s*/%s-analyseQscan_%s_%s_rds*.cache"%\ (self.coint.type,sngl.ifo,sngl.ifo,timeString) #Ignore the files with seis_rds in them for x in fnmatch.filter(self.fsys,myCacheMask): if not x.__contains__('seis_rds'): cacheList.append(x) #Read the cache file or files cacheFiles=self.__readCache__(cacheList) return cacheFiles
8f5bc08aa7c5a9ce674e08a28ead83d83999609d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/8f5bc08aa7c5a9ce674e08a28ead83d83999609d/makeCheckListWiki.py
sys.stdout.write("Found: %s\n",publication_directory)
sys.stdout.write("Found: %s\n" %(publication_directory))
def __init__(self,type=None,ifo=None,time=None,snr=None,chisqr=None,mass1=None,mass2=None,mchirp=None): """ """ self.type=str(type) self.ifo=str(ifo) self.time=float(time) self.snr=float(snr) self.chisqr=float(chisqr) self.mass1=float(mass1) self.mass2=float(mass2) self.mchirp=float(mchirp)
8f5bc08aa7c5a9ce674e08a28ead83d83999609d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/8f5bc08aa7c5a9ce674e08a28ead83d83999609d/makeCheckListWiki.py
sys.stdout.write("Found: %s\n",publication_url)
sys.stdout.write("Found: %s\n" %(publication_url))
def __init__(self,type=None,ifo=None,time=None,snr=None,chisqr=None,mass1=None,mass2=None,mchirp=None): """ """ self.type=str(type) self.ifo=str(ifo) self.time=float(time) self.snr=float(snr) self.chisqr=float(chisqr) self.mass1=float(mass1) self.mass2=float(mass2) self.mchirp=float(mchirp)
8f5bc08aa7c5a9ce674e08a28ead83d83999609d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/8f5bc08aa7c5a9ce674e08a28ead83d83999609d/makeCheckListWiki.py
coincTrigs = CoincInspiralUtils.coincInspiralTable(inspTriggers,'snr')
coincTrigs = CoincInspiralUtils.coincInspiralTable(inspTrigs,'snr')
def get_coincs_from_coire(self,files): """ uses CoincInspiralUtils to get data from old-style (coire'd) coincs """ coincTrigs = CoincInspiralUtils.coincInspiralTable() inspTrigs = SnglInspiralUtils.ReadSnglInspiralFromFiles(files, \ mangle_event_id = True,verbose=None) #note that it's hardcoded to use snr as the statistic coincTrigs = CoincInspiralUtils.coincInspiralTable(inspTriggers,'snr') try: inspInj = SimInspiralUtils.ReadSimInspiralFromFiles(files) coincTrigs.add_sim_inspirals(inspInj) #FIXME: name the exception! except: pass
dc0d302d6255c3e1457b8ee8961a5c8535491853 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/3592/dc0d302d6255c3e1457b8ee8961a5c8535491853/skylocutils.py
def now(): return datetime.now()
da66d430c0acc35d155af13f1e3e3c04f787efd7 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/1844/da66d430c0acc35d155af13f1e3e3c04f787efd7/fu.py
global JAVA_TYPE_MAP JAVA_TYPE_MAP = {
global JAVA_PRIMITIVE_TYPE_MAP JAVA_PRIMITIVE_TYPE_MAP = {
def updateTypeMaps(namespace): """ Updates the type maps with a new namespace. **Must** be executed at least once, **before** node class file generation. """ global JAVA_TYPE_MAP JAVA_TYPE_MAP = { # Base types namespace + 'boolean': 'Boolean', namespace + 'dateTime': 'String', namespace + 'string': 'String', namespace + 'integer': 'Integer', namespace + 'positiveInteger': 'Integer', namespace + 'nonNegativeInteger': 'Integer', namespace + 'float': 'Double', namespace + 'anyURI': 'String', namespace + 'hexBinary': 'String', # Hacks 'NamingConvention': 'String', 'PercentFraction': 'Double', 'MIMEtype': 'String', 'Leader': 'Experimenter', 'Contact': 'Experimenter', 'Pump': 'Laser', } global JAVA_BASE_TYPE_MAP JAVA_BASE_TYPE_MAP = { 'UniversallyUniqueIdentifier': DEFAULT_BASE_CLASS }
da66d430c0acc35d155af13f1e3e3c04f787efd7 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/1844/da66d430c0acc35d155af13f1e3e3c04f787efd7/fu.py
'NamingConvention': 'String', 'PercentFraction': 'Double', 'MIMEtype': 'String', 'Leader': 'Experimenter', 'Contact': 'Experimenter', 'Pump': 'Laser',
def updateTypeMaps(namespace): """ Updates the type maps with a new namespace. **Must** be executed at least once, **before** node class file generation. """ global JAVA_TYPE_MAP JAVA_TYPE_MAP = { # Base types namespace + 'boolean': 'Boolean', namespace + 'dateTime': 'String', namespace + 'string': 'String', namespace + 'integer': 'Integer', namespace + 'positiveInteger': 'Integer', namespace + 'nonNegativeInteger': 'Integer', namespace + 'float': 'Double', namespace + 'anyURI': 'String', namespace + 'hexBinary': 'String', # Hacks 'NamingConvention': 'String', 'PercentFraction': 'Double', 'MIMEtype': 'String', 'Leader': 'Experimenter', 'Contact': 'Experimenter', 'Pump': 'Laser', } global JAVA_BASE_TYPE_MAP JAVA_BASE_TYPE_MAP = { 'UniversallyUniqueIdentifier': DEFAULT_BASE_CLASS }
da66d430c0acc35d155af13f1e3e3c04f787efd7 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/1844/da66d430c0acc35d155af13f1e3e3c04f787efd7/fu.py
global JAVA_TYPE_MAP JAVA_TYPE_MAP = copy.deepcopy(JAVA_PRIMITIVE_TYPE_MAP) JAVA_TYPE_MAP['NamingConvention'] = 'String' JAVA_TYPE_MAP['PercentFraction'] = 'Double' JAVA_TYPE_MAP['MIMEtype'] = 'String' JAVA_TYPE_MAP['Leader'] = 'Experimenter' JAVA_TYPE_MAP['Contact'] = 'Experimenter' JAVA_TYPE_MAP['Pump'] = 'Laser'
def updateTypeMaps(namespace): """ Updates the type maps with a new namespace. **Must** be executed at least once, **before** node class file generation. """ global JAVA_TYPE_MAP JAVA_TYPE_MAP = { # Base types namespace + 'boolean': 'Boolean', namespace + 'dateTime': 'String', namespace + 'string': 'String', namespace + 'integer': 'Integer', namespace + 'positiveInteger': 'Integer', namespace + 'nonNegativeInteger': 'Integer', namespace + 'float': 'Double', namespace + 'anyURI': 'String', namespace + 'hexBinary': 'String', # Hacks 'NamingConvention': 'String', 'PercentFraction': 'Double', 'MIMEtype': 'String', 'Leader': 'Experimenter', 'Contact': 'Experimenter', 'Pump': 'Laser', } global JAVA_BASE_TYPE_MAP JAVA_BASE_TYPE_MAP = { 'UniversallyUniqueIdentifier': DEFAULT_BASE_CLASS }
da66d430c0acc35d155af13f1e3e3c04f787efd7 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/1844/da66d430c0acc35d155af13f1e3e3c04f787efd7/fu.py
return javaType
if not self.isPrimitive and not self.isEnumeration: return "String" return self.javaType
def _get_metadataStoreType(self): # FIXME: No more node #javaType = self.javaType #if javaType[-4:] == "Node": # return "String" return javaType
da66d430c0acc35d155af13f1e3e3c04f787efd7 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/1844/da66d430c0acc35d155af13f1e3e3c04f787efd7/fu.py
doc="""The property's MetadataStore type.""")
doc="""The property's MetadataStore type.""")
def _get_metadataStoreType(self): # FIXME: No more node #javaType = self.javaType #if javaType[-4:] == "Node": # return "String" return javaType
da66d430c0acc35d155af13f1e3e3c04f787efd7 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/1844/da66d430c0acc35d155af13f1e3e3c04f787efd7/fu.py
if o is not None and o.base == "Reference": return True
if o is not None: return o.isReference
def _get_isReference(self): o = self.model.getObjectByName(self.type) if o is not None and o.base == "Reference": return True return False
da66d430c0acc35d155af13f1e3e3c04f787efd7 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/1844/da66d430c0acc35d155af13f1e3e3c04f787efd7/fu.py
BACK_REFERENCE_OVERRIDE = {'Screen': ['Plate'], 'Plate': ['Screen']}
BACK_REFERENCE_OVERRIDE = {'Screen': ['Plate'], 'Plate': ['Screen'], 'Annotation': ['Annotation']}
def now(): return datetime.now()
33ca668f1e2bc211408916409f5987902b983f32 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/1844/33ca668f1e2bc211408916409f5987902b983f32/fu.py
namespace + 'positiveInteger': 'PositiveInteger', namespace + 'nonNegativeInteger': 'NonNegativeInteger',
'PositiveInt': 'PositiveInteger', 'NonNegativeInt': 'NonNegativeInteger', 'PositiveLong': 'PositiveLong', 'NonNegativeLong': 'NonNegativeLong',
def updateTypeMaps(namespace): """ Updates the type maps with a new namespace. **Must** be executed at least once, **before** node class file generation. """ global JAVA_PRIMITIVE_TYPE_MAP JAVA_PRIMITIVE_TYPE_MAP = { namespace + 'boolean': 'Boolean', namespace + 'dateTime': 'String', namespace + 'string': 'String', namespace + 'integer': 'Integer', namespace + 'long': 'Long', namespace + 'positiveInteger': 'PositiveInteger', namespace + 'nonNegativeInteger': 'NonNegativeInteger', namespace + 'float': 'Double', namespace + 'double': 'Double', namespace + 'anyURI': 'String', namespace + 'hexBinary': 'String', 'PercentFraction': 'PercentFraction', } global JAVA_TYPE_MAP JAVA_TYPE_MAP = copy.deepcopy(JAVA_PRIMITIVE_TYPE_MAP) JAVA_TYPE_MAP['MIMEtype'] = 'String' JAVA_TYPE_MAP['Leader'] = 'Experimenter' JAVA_TYPE_MAP['Contact'] = 'Experimenter' JAVA_TYPE_MAP['Pump'] = 'Laser' global JAVA_BASE_TYPE_MAP JAVA_BASE_TYPE_MAP = { 'UniversallyUniqueIdentifier': DEFAULT_BASE_CLASS }
33ca668f1e2bc211408916409f5987902b983f32 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/1844/33ca668f1e2bc211408916409f5987902b983f32/fu.py
return JAVA_TYPE_MAP[self.type]
return JAVA_TYPE_MAP[self.type.replace('OME:', '')]
def _get_javaType(self): try: # Hand back the type of enumerations if self.isEnumeration: javaType = self.name if len(self.delegate.values) == 0: # As we have no directly defined possible values we have # no reason to qualify our type explicitly. return self.type if javaType == "Type": # One of the OME XML unspecific "Type" properties which # can only be qualified by the parent. if self.type.endswith("string"): # We've been defined entirely inline, prefix our Java # type name with the parent type's name. return "%s%s" % (self.parent.name, javaType) # There's another type which describes us, use its name # as our Java type name. return self.type return javaType # Handle XML Schema types that directly map to Java types return JAVA_TYPE_MAP[self.type] except KeyError: # Hand back the type of references or complex types with the # useless OME XML 'Ref' suffix removed. if self.isBackReference or \ (not self.isAttribute and self.delegate.isComplex()): return self.REF_REGEX.sub('', self.type) # Hand back the type of complex types if not self.isAttribute and self.delegate.isComplex(): return self.type if not self.isEnumeration: # We have a property whose type was defined by a top level # simpleType. simpleTypeName = self.type return self.resolveJavaTypeFromSimpleType(simpleTypeName) logging.debug("%s dump: %s" % (self, self.__dict__)) logging.debug("%s delegate dump: %s" % (self, self.delegate.__dict__)) raise ModelProcessingError, \ "Unable to find %s Java type for %s" % (self.name, self.type)
33ca668f1e2bc211408916409f5987902b983f32 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/1844/33ca668f1e2bc211408916409f5987902b983f32/fu.py
CODEPOINTS = {\n\
def print_header(): print "\
bf2a87a44b2d474baa7044c77e6ae38d4822dc2a /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/10700/bf2a87a44b2d474baa7044c77e6ae38d4822dc2a/unihan_kconv.py
print "],\nu'x%s':["%code[0],
if firsttime: firsttime = False print "CODEPOINTS = { \nu'x%s':["%code[0], else: print "],\nu'x%s':["%code[0],
def process_readings(): oucode = 0 olcode = 0 for line in open(source,'r'): items = line[:-1].split('\t') try: r = re.match(r'kKorea', items[1]) if r is not None: code = re.sub(r'U\+([0-9A-F]{2})([0-9A-F]{2})',r'\1\t\2',items[0]).split('\t') ucode = int(code[0],16) lcode = int(code[1],16) pron = items[2].split(' ')[0].capitalize() if oucode != ucode: print "],\nu'x%s':["%code[0], oucode = ucode olcode = -1 if (lcode - olcode) > 1: for i in range(lcode-olcode-1): print '"[?]",', olcode = lcode print '"'+pron+'",', except: continue
bf2a87a44b2d474baa7044c77e6ae38d4822dc2a /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/10700/bf2a87a44b2d474baa7044c77e6ae38d4822dc2a/unihan_kconv.py
print '"[?]",',
print "'',",
def process_readings(): oucode = 0 olcode = 0 for line in open(source,'r'): items = line[:-1].split('\t') try: r = re.match(r'kKorea', items[1]) if r is not None: code = re.sub(r'U\+([0-9A-F]{2})([0-9A-F]{2})',r'\1\t\2',items[0]).split('\t') ucode = int(code[0],16) lcode = int(code[1],16) pron = items[2].split(' ')[0].capitalize() if oucode != ucode: print "],\nu'x%s':["%code[0], oucode = ucode olcode = -1 if (lcode - olcode) > 1: for i in range(lcode-olcode-1): print '"[?]",', olcode = lcode print '"'+pron+'",', except: continue
bf2a87a44b2d474baa7044c77e6ae38d4822dc2a /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/10700/bf2a87a44b2d474baa7044c77e6ae38d4822dc2a/unihan_kconv.py
print '"'+pron+'",',
print "'"+pron+"',",
def process_readings(): oucode = 0 olcode = 0 for line in open(source,'r'): items = line[:-1].split('\t') try: r = re.match(r'kKorea', items[1]) if r is not None: code = re.sub(r'U\+([0-9A-F]{2})([0-9A-F]{2})',r'\1\t\2',items[0]).split('\t') ucode = int(code[0],16) lcode = int(code[1],16) pron = items[2].split(' ')[0].capitalize() if oucode != ucode: print "],\nu'x%s':["%code[0], oucode = ucode olcode = -1 if (lcode - olcode) > 1: for i in range(lcode-olcode-1): print '"[?]",', olcode = lcode print '"'+pron+'",', except: continue
bf2a87a44b2d474baa7044c77e6ae38d4822dc2a /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/10700/bf2a87a44b2d474baa7044c77e6ae38d4822dc2a/unihan_kconv.py
self.kakasi = CDLL("libkakasi")
self.kakasi = CDLL("\\kakasi\\lib\\kakasi")
def __init__(self): self.codepoints = CODEPOINTS self.codepoints.update(JACODES)
181f9235e3fa3ea58e938b9ceb91268e1baa7126 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/10700/181f9235e3fa3ea58e938b9ceb91268e1baa7126/jadecoder.py
self.kakasi = CDLL("libkakasi.so")
self.kakasi = CDLL("libkakasi.so")
def __init__(self): self.codepoints = CODEPOINTS self.codepoints.update(JACODES)
181f9235e3fa3ea58e938b9ceb91268e1baa7126 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/10700/181f9235e3fa3ea58e938b9ceb91268e1baa7126/jadecoder.py
pmap = { ord(u'â'):'a',ord(u'à'):'a',ord(u'ắ'):'a',ord(u'ă'):'a',ord(u'ấ'):'a', ord(u'ü'):'u',ord(u'ụ'):'u',ord(u'ú'):'u',ord(u'ử'):'u',ord(u'ư'):'u', ord(u'ù'):'u', ord(u'é'):'e', ord(u'ọ'):'o',ord(u'ố'):'o',ord(u'ộ'):'o',ord(u'ơ'):'o',ord(u'ớ'):'o', ord(u'ớ'):'o', } r1 = re.compile(r'U\+([0-9A-F]{2})([0-9A-F]{2}\b)')
def process_readings(self, source, fout): oucode = 0
9145b8e82b3e4a975e637a96d80862a186515a18 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/10700/9145b8e82b3e4a975e637a96d80862a186515a18/unihan_conv.py
code = re.sub(r'U\+([0-9A-F]{2})([0-9A-F]{2}\b)',r'\1\t\2',items[0]).split('\t')
code = r1.sub(r'\1\t\2',items[0]).split('\t')
def process_readings(self, source, fout): oucode = 0
9145b8e82b3e4a975e637a96d80862a186515a18 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/10700/9145b8e82b3e4a975e637a96d80862a186515a18/unihan_conv.py
pron = items[2].split(' ')[0].capitalize() if not all(ord(c) < 128 for c in pron): pron = re.sub('[^\x00-\x7f]',lambda x: self.pmap[ord(x)], pron)
ptmp = items[2].split(' ')[0].capitalize() pron = re.sub('[^\00-\x7f]', lambda x: pmap[ord(x.group())], ptmp)
def process_readings(self, source, fout): oucode = 0
9145b8e82b3e4a975e637a96d80862a186515a18 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/10700/9145b8e82b3e4a975e637a96d80862a186515a18/unihan_conv.py
self.gen_map(fout, oucode)
self.gen_map(fout, oucode)
def process_readings(self, source, fout): oucode = 0
9145b8e82b3e4a975e637a96d80862a186515a18 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/10700/9145b8e82b3e4a975e637a96d80862a186515a18/unihan_conv.py
if obtained != valid:
if not expected_result(obtained, valid):
def expand (file, entry, type = -1):
a5b45471485c35b80cfe0668a4fec1a12a72538b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/14180/a5b45471485c35b80cfe0668a4fec1a12a72538b/testsuite.py
if obtained != valid:
if not expected_result(obtained, valid):
def expand (file, entry):
a5b45471485c35b80cfe0668a4fec1a12a72538b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/14180/a5b45471485c35b80cfe0668a4fec1a12a72538b/testsuite.py
from ipdb import set_trace; set_trace() debug(title)
def main_upload(arguments): """Upload video to Youtube.""" usage = """Usage: %prog [OPTIONS] EMAIL PASSWORD FILE TITLE DESCRIPTION CATEGORY KEYWORDS Upload a video to youtube spliting it if necessary (uses ffmpeg).""" parser = optparse.OptionParser(usage, version=VERSION) parser.add_option('-c', '--get-categories', dest='get_categories', action="store_true", default=False, help='Show video categories') parser.add_option('-s', '--split-only', dest='split_only', action="store_true", default=False, help='Split videos without uploading') parser.add_option('-n', '--no-split', dest='no_split', action="store_true", default=False, help='Skip video split') parser.add_option('-u', '--get-upload-form-info', dest='get_upload_form_data', action="store_true", default=False, help="Don't upload, just get the form info") parser.add_option('', '--private', dest='private', action="store_true", default=False, help='Set uploaded video as private') parser.add_option('', '--location', dest='location', type="string", default=None, metavar="COORDINATES", help='Video location (lat, lon). example: "37.0,-122.0"') options, args = parser.parse_args(arguments) if options.get_categories: print " ".join(Youtube.get_categories().keys()) return elif options.split_only: video_path, = args for path in split_youtube_video(video_path): print path return elif len(args) != 7: parser.print_usage() return 1 encoding = get_encoding() email, password0, video_path, title, description, category, skeywords = \ [unicode(s, encoding) for s in args] from ipdb import set_trace; set_trace() debug(title) password = (sys.stdin.readline().strip() if password0 == "-" else password0) videos = ([video_path] if options.no_split else list(split_youtube_video(video_path))) debug("connecting to Youtube API") yt = Youtube(DEVELOPER_KEY, email, password) keywords = filter(bool, [s.strip() for s in re.split('[,;\s]+', skeywords)]) for index, splitted_video_path in enumerate(videos): complete_title = ("%s [%d/%d]" % (title, index+1, len(videos)) if len(videos) > 1 else title) args = [splitted_video_path, complete_title, description, category, keywords] kwargs = dict(private=options.private, location=parse_location(options.location)) if options.get_upload_form_data: data = yt.get_upload_form_data(*args, **kwargs) print "|".join([splitted_video_path, data["token"], data["post_url"]]) else: debug("start upload: %s (%s)" % (splitted_video_path, complete_title)) entry = yt.upload_video(*args, **kwargs) print entry.GetHtmlLink().href.replace("&feature=youtube_gdata", "")
9a1ca1cfb5fe15c213d33feb4a0065af9208d38d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/9246/9a1ca1cfb5fe15c213d33feb4a0065af9208d38d/youtube_upload.py
password = (sys.stdin.read().strip() if password0 == "-" else password0)
password = (sys.stdin.readline().strip() if password0 == "-" else password0)
def main_upload(arguments): """Upload video to Youtube.""" usage = """Usage: %prog [OPTIONS] EMAIL PASSWORD FILE TITLE DESCRIPTION CATEGORY KEYWORDS Upload a video to youtube spliting it if necessary (uses ffmpeg).""" parser = optparse.OptionParser(usage, version=VERSION) parser.add_option('-c', '--get-categories', dest='get_categories', action="store_true", default=False, help='Show video categories') parser.add_option('-s', '--split-only', dest='split_only', action="store_true", default=False, help='Split videos without uploading') parser.add_option('-n', '--no-split', dest='no_split', action="store_true", default=False, help='Skip video split') parser.add_option('-u', '--get-upload-form-info', dest='get_upload_form_data', action="store_true", default=False, help="Don't upload, just get the form info") options, args = parser.parse_args(arguments) if options.get_categories: print " ".join(Youtube.get_categories().keys()) return elif options.split_only: video_path, = args for path in split_youtube_video(video_path): print path return elif len(args) != 7: parser.print_usage() return 1 email, password0, video_path, title, description, category, skeywords = args password = (sys.stdin.read().strip() if password0 == "-" else password0) videos = ([video_path] if options.no_split else list(split_youtube_video(video_path))) debug("connecting to Youtube API") yt = Youtube(DEVELOPER_KEY, email, password) keywords = filter(bool, map(str.strip, re.split('[,;\s]+', skeywords))) for index, splitted_video_path in enumerate(videos): if len(videos) > 1: complete_title = "%s [%d/%d]" % (title, index+1, len(videos)) else: complete_title = title args = [splitted_video_path, complete_title, description, category, keywords] if options.get_upload_form_data: data = yt.get_upload_form_data(*args) print "|".join([splitted_video_path, data["token"], data["post_url"]]) else: debug("start upload: %s (%s)" % (splitted_video_path, complete_title)) entry = yt.upload_video(*args) print entry.GetHtmlLink().href.replace("&feature=youtube_gdata", "")
e1560097324ccb81eeddb0650e1658d6b1cc6efb /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/9246/e1560097324ccb81eeddb0650e1658d6b1cc6efb/youtube_upload.py
sys.stderr.write("--- " + str(obj) + "\n")
string = str(obj.encode(get_encoding()) if isinstance(obj, unicode) else obj) sys.stderr.write("--- " + string + "\n") def get_encoding(): return sys.stdout.encoding or locale.getpreferredencoding()
def debug(obj): """Write obj to standard error.""" sys.stderr.write("--- " + str(obj) + "\n")
9999034b88a09fe278589e25fccf30ffe5f276ac /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/9246/9999034b88a09fe278589e25fccf30ffe5f276ac/youtube_upload.py
encoding = sys.stdout.encoding or locale.getpreferredencoding()
encoding = get_encoding()
def main_upload(arguments): """Upload video to Youtube.""" usage = """Usage: %prog [OPTIONS] EMAIL PASSWORD FILE TITLE DESCRIPTION CATEGORY KEYWORDS Upload a video to youtube spliting it if necessary (uses ffmpeg).""" parser = optparse.OptionParser(usage, version=VERSION) parser.add_option('-c', '--get-categories', dest='get_categories', action="store_true", default=False, help='Show video categories') parser.add_option('-s', '--split-only', dest='split_only', action="store_true", default=False, help='Split videos without uploading') parser.add_option('-n', '--no-split', dest='no_split', action="store_true", default=False, help='Skip video split') parser.add_option('-u', '--get-upload-form-info', dest='get_upload_form_data', action="store_true", default=False, help="Don't upload, just get the form info") parser.add_option('', '--private', dest='private', action="store_true", default=False, help='Set uploaded video as private') parser.add_option('', '--location', dest='location', type="string", default=None, metavar="COORDINATES", help='Video location (lat, lon). example: "37.0,-122.0"') options, args = parser.parse_args(arguments) if options.get_categories: print " ".join(Youtube.get_categories().keys()) return elif options.split_only: video_path, = args for path in split_youtube_video(video_path): print path return elif len(args) != 7: parser.print_usage() return 1 encoding = sys.stdout.encoding or locale.getpreferredencoding() email, password0, video_path, title, description, category, skeywords = \ [unicode(s, encoding) for s in args] password = (sys.stdin.readline().strip() if password0 == "-" else password0) videos = ([video_path] if options.no_split else list(split_youtube_video(video_path))) debug("connecting to Youtube API") yt = Youtube(DEVELOPER_KEY, email, password) keywords = filter(bool, [s.strip() for s in re.split('[,;\s]+', skeywords)]) for index, splitted_video_path in enumerate(videos): complete_title = ("%s [%d/%d]" % (title, index+1, len(videos)) if len(videos) > 1 else title) args = [splitted_video_path, complete_title, description, category, keywords] kwargs = dict(private=options.private, location=parse_location(options.location)) debug("kwargs = %s" % kwargs) if options.get_upload_form_data: data = yt.get_upload_form_data(*args, **kwargs) print "|".join([splitted_video_path, data["token"], data["post_url"]]) else: debug("start upload: %s (%s)" % (splitted_video_path, complete_title)) entry = yt.upload_video(*args, **kwargs) print entry.GetHtmlLink().href.replace("&feature=youtube_gdata", "")
9999034b88a09fe278589e25fccf30ffe5f276ac /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/9246/9999034b88a09fe278589e25fccf30ffe5f276ac/youtube_upload.py
debug("kwargs = %s" % kwargs)
def main_upload(arguments): """Upload video to Youtube.""" usage = """Usage: %prog [OPTIONS] EMAIL PASSWORD FILE TITLE DESCRIPTION CATEGORY KEYWORDS Upload a video to youtube spliting it if necessary (uses ffmpeg).""" parser = optparse.OptionParser(usage, version=VERSION) parser.add_option('-c', '--get-categories', dest='get_categories', action="store_true", default=False, help='Show video categories') parser.add_option('-s', '--split-only', dest='split_only', action="store_true", default=False, help='Split videos without uploading') parser.add_option('-n', '--no-split', dest='no_split', action="store_true", default=False, help='Skip video split') parser.add_option('-u', '--get-upload-form-info', dest='get_upload_form_data', action="store_true", default=False, help="Don't upload, just get the form info") parser.add_option('', '--private', dest='private', action="store_true", default=False, help='Set uploaded video as private') parser.add_option('', '--location', dest='location', type="string", default=None, metavar="COORDINATES", help='Video location (lat, lon). example: "37.0,-122.0"') options, args = parser.parse_args(arguments) if options.get_categories: print " ".join(Youtube.get_categories().keys()) return elif options.split_only: video_path, = args for path in split_youtube_video(video_path): print path return elif len(args) != 7: parser.print_usage() return 1 encoding = sys.stdout.encoding or locale.getpreferredencoding() email, password0, video_path, title, description, category, skeywords = \ [unicode(s, encoding) for s in args] password = (sys.stdin.readline().strip() if password0 == "-" else password0) videos = ([video_path] if options.no_split else list(split_youtube_video(video_path))) debug("connecting to Youtube API") yt = Youtube(DEVELOPER_KEY, email, password) keywords = filter(bool, [s.strip() for s in re.split('[,;\s]+', skeywords)]) for index, splitted_video_path in enumerate(videos): complete_title = ("%s [%d/%d]" % (title, index+1, len(videos)) if len(videos) > 1 else title) args = [splitted_video_path, complete_title, description, category, keywords] kwargs = dict(private=options.private, location=parse_location(options.location)) debug("kwargs = %s" % kwargs) if options.get_upload_form_data: data = yt.get_upload_form_data(*args, **kwargs) print "|".join([splitted_video_path, data["token"], data["post_url"]]) else: debug("start upload: %s (%s)" % (splitted_video_path, complete_title)) entry = yt.upload_video(*args, **kwargs) print entry.GetHtmlLink().href.replace("&feature=youtube_gdata", "")
9999034b88a09fe278589e25fccf30ffe5f276ac /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/9246/9999034b88a09fe278589e25fccf30ffe5f276ac/youtube_upload.py
if (os.path.exists(os.path.join(root_dir, ".git")) or os.path.exists(os.path.join(root_dir, ".hg"))): prefix = os.path.commonprefix([root_dir, project_dir]) return fullname[len(prefix) + 1:]
if (os.path.exists(os.path.join(root_dir, ".git")) or os.path.exists(os.path.join(root_dir, ".hg"))): prefix = os.path.commonprefix([root_dir, project_dir]) return fullname[len(prefix) + 1:]
def RepositoryName(self): """FullName after removing the local path to the repository.
df58397e6f4ed5d2740b06fa5118dfc9cb92f2ef /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/8431/df58397e6f4ed5d2740b06fa5118dfc9cb92f2ef/cpplint.py
def RepositoryName(self): """FullName after removing the local path to the repository.
e8777217774ca1c50e978c3fbecd3a9aa3cbb882 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/8431/e8777217774ca1c50e978c3fbecd3a9aa3cbb882/cpplint.py
not os.path.exists(os.path.join(root_dir, ".git"))):
not os.path.exists(os.path.join(root_dir, ".git")) and not os.path.exists(os.path.join(root_dir, ".hg"))):
def RepositoryName(self): """FullName after removing the local path to the repository.
e8777217774ca1c50e978c3fbecd3a9aa3cbb882 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/8431/e8777217774ca1c50e978c3fbecd3a9aa3cbb882/cpplint.py
if os.path.exists(os.path.join(root_dir, ".git")):
if (os.path.exists(os.path.join(root_dir, ".git")) or os.path.exists(os.path.join(root_dir, ".hg"))):
def RepositoryName(self): """FullName after removing the local path to the repository.
e8777217774ca1c50e978c3fbecd3a9aa3cbb882 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/8431/e8777217774ca1c50e978c3fbecd3a9aa3cbb882/cpplint.py
def id (about, host):
def id(about, host):
def id (about, host): # this might turn into a cache that gets dumped to file and # supports more than two fixed hosts in time. cache = IDS_MAIN if host == FLUIDDB_PATH else IDS_SAND return cache[about]
e75848841ac96982524448e932e12fd6ff9cbe0c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/12637/e75848841ac96982524448e932e12fd6ff9cbe0c/fdb.py
http = Http (timeout=timeout)
http = Http(timeout=timeout)
def _get_http(timeout): try: http = Http (timeout=timeout) except TypeError: # The user's version of http2lib is old. Omit the timeout. http = Http () return http
e75848841ac96982524448e932e12fd6ff9cbe0c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/12637/e75848841ac96982524448e932e12fd6ff9cbe0c/fdb.py
http = Http ()
http = Http()
def _get_http(timeout): try: http = Http (timeout=timeout) except TypeError: # The user's version of http2lib is old. Omit the timeout. http = Http () return http
e75848841ac96982524448e932e12fd6ff9cbe0c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/12637/e75848841ac96982524448e932e12fd6ff9cbe0c/fdb.py
url = host + urllib.quote (path)
url = host + urllib.quote(path)
def _get_url(host, path, hash, kw): url = host + urllib.quote (path) if hash: url = '%s?%s' % (url, urllib.urlencode (hash)) elif kw: url = '%s?%s' % (url, urllib.urlencode (kw)) return url
e75848841ac96982524448e932e12fd6ff9cbe0c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/12637/e75848841ac96982524448e932e12fd6ff9cbe0c/fdb.py
url = '%s?%s' % (url, urllib.urlencode (hash))
url = '%s?%s' % (url, urllib.urlencode(hash))
def _get_url(host, path, hash, kw): url = host + urllib.quote (path) if hash: url = '%s?%s' % (url, urllib.urlencode (hash)) elif kw: url = '%s?%s' % (url, urllib.urlencode (kw)) return url
e75848841ac96982524448e932e12fd6ff9cbe0c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/12637/e75848841ac96982524448e932e12fd6ff9cbe0c/fdb.py
url = '%s?%s' % (url, urllib.urlencode (kw))
url = '%s?%s' % (url, urllib.urlencode(kw))
def _get_url(host, path, hash, kw): url = host + urllib.quote (path) if hash: url = '%s?%s' % (url, urllib.urlencode (hash)) elif kw: url = '%s?%s' % (url, urllib.urlencode (kw)) return url
e75848841ac96982524448e932e12fd6ff9cbe0c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/12637/e75848841ac96982524448e932e12fd6ff9cbe0c/fdb.py
def __init__ (self, hash):
def __init__(self, hash):
def __init__ (self, hash): for k in hash: self.__dict__[k] = hash[k]
e75848841ac96982524448e932e12fd6ff9cbe0c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/12637/e75848841ac96982524448e932e12fd6ff9cbe0c/fdb.py
def __str__ (self): keys = self.__dict__.keys () keys.sort () return '\n'.join (['%20s: %s' % (key, str (self.__dict__[key]))
def __str__(self): keys = self.__dict__.keys() keys.sort() return '\n'.join(['%20s: %s' % (key, str(self.__dict__[key]))
def __str__ (self): keys = self.__dict__.keys () keys.sort () return '\n'.join (['%20s: %s' % (key, str (self.__dict__[key])) for key in keys])
e75848841ac96982524448e932e12fd6ff9cbe0c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/12637/e75848841ac96982524448e932e12fd6ff9cbe0c/fdb.py
def __init__ (self, name, value=None):
def __init__(self, name, value=None):
def __init__ (self, name, value=None): self.name = name self.value = value
e75848841ac96982524448e932e12fd6ff9cbe0c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/12637/e75848841ac96982524448e932e12fd6ff9cbe0c/fdb.py
def __str__ (self):
def __str__(self):
def __str__ (self): return ('Tag "%s", value "%s" of type %s' % (self.name, str (self.value), str (type (self.value))))
e75848841ac96982524448e932e12fd6ff9cbe0c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/12637/e75848841ac96982524448e932e12fd6ff9cbe0c/fdb.py
% (self.name, str (self.value), str (type (self.value))))
% (self.name, str(self.value), str(type(self.value))))
def __str__ (self): return ('Tag "%s", value "%s" of type %s' % (self.name, str (self.value), str (type (self.value))))
e75848841ac96982524448e932e12fd6ff9cbe0c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/12637/e75848841ac96982524448e932e12fd6ff9cbe0c/fdb.py
def __init__ (self, username=None, password=None, id=None, filename=None):
def __init__(self, username=None, password=None, id=None, filename=None):
def __init__ (self, username=None, password=None, id=None, filename=None): if username: self.username = username self.password = password else: if filename == None: filename = get_credentials_file () if os.path.exists (filename): try: f = open (filename) lines = f.readlines () self.username = lines[0].strip () self.password = lines[1].strip () f.close () except: raise ProblemReadingCredentialsFileError, ('Failed to read' ' credentials from %s.' % str (filename)) else: raise CredentialsFileNotFoundError, ('Couldn\'t find or ' 'read credentials from %s.' % str (filename))
e75848841ac96982524448e932e12fd6ff9cbe0c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/12637/e75848841ac96982524448e932e12fd6ff9cbe0c/fdb.py
filename = get_credentials_file () if os.path.exists (filename):
filename = get_credentials_file() if os.path.exists(filename):
def __init__ (self, username=None, password=None, id=None, filename=None): if username: self.username = username self.password = password else: if filename == None: filename = get_credentials_file () if os.path.exists (filename): try: f = open (filename) lines = f.readlines () self.username = lines[0].strip () self.password = lines[1].strip () f.close () except: raise ProblemReadingCredentialsFileError, ('Failed to read' ' credentials from %s.' % str (filename)) else: raise CredentialsFileNotFoundError, ('Couldn\'t find or ' 'read credentials from %s.' % str (filename))
e75848841ac96982524448e932e12fd6ff9cbe0c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/12637/e75848841ac96982524448e932e12fd6ff9cbe0c/fdb.py
f = open (filename) lines = f.readlines () self.username = lines[0].strip () self.password = lines[1].strip () f.close ()
f = open(filename) lines = f.readlines() self.username = lines[0].strip() self.password = lines[1].strip() f.close()
def __init__ (self, username=None, password=None, id=None, filename=None): if username: self.username = username self.password = password else: if filename == None: filename = get_credentials_file () if os.path.exists (filename): try: f = open (filename) lines = f.readlines () self.username = lines[0].strip () self.password = lines[1].strip () f.close () except: raise ProblemReadingCredentialsFileError, ('Failed to read' ' credentials from %s.' % str (filename)) else: raise CredentialsFileNotFoundError, ('Couldn\'t find or ' 'read credentials from %s.' % str (filename))
e75848841ac96982524448e932e12fd6ff9cbe0c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/12637/e75848841ac96982524448e932e12fd6ff9cbe0c/fdb.py
' credentials from %s.' % str (filename))
' credentials from %s.' % str(filename))
def __init__ (self, username=None, password=None, id=None, filename=None): if username: self.username = username self.password = password else: if filename == None: filename = get_credentials_file () if os.path.exists (filename): try: f = open (filename) lines = f.readlines () self.username = lines[0].strip () self.password = lines[1].strip () f.close () except: raise ProblemReadingCredentialsFileError, ('Failed to read' ' credentials from %s.' % str (filename)) else: raise CredentialsFileNotFoundError, ('Couldn\'t find or ' 'read credentials from %s.' % str (filename))
e75848841ac96982524448e932e12fd6ff9cbe0c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/12637/e75848841ac96982524448e932e12fd6ff9cbe0c/fdb.py
'read credentials from %s.' % str (filename))
'read credentials from %s.' % str(filename))
def __init__ (self, username=None, password=None, id=None, filename=None): if username: self.username = username self.password = password else: if filename == None: filename = get_credentials_file () if os.path.exists (filename): try: f = open (filename) lines = f.readlines () self.username = lines[0].strip () self.password = lines[1].strip () f.close () except: raise ProblemReadingCredentialsFileError, ('Failed to read' ' credentials from %s.' % str (filename)) else: raise CredentialsFileNotFoundError, ('Couldn\'t find or ' 'read credentials from %s.' % str (filename))
e75848841ac96982524448e932e12fd6ff9cbe0c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/12637/e75848841ac96982524448e932e12fd6ff9cbe0c/fdb.py
def __init__ (self, credentials=None, host=None, debug=False):
def __init__(self, credentials=None, host=None, debug=False):
def __init__ (self, credentials=None, host=None, debug=False): if credentials == None: credentials = Credentials () self.credentials = credentials if host is None: host = choose_host() self.host = host self.debug = debug self.timeout = choose_http_timeout() if not host.startswith ('http'): self.host = 'http://%s' % host # the following based on fluiddb.py userpass = '%s:%s' % (credentials.username, credentials.password) auth = 'Basic %s' % userpass.encode ('base64').strip() self.headers = { 'Authorization' : auth }
e75848841ac96982524448e932e12fd6ff9cbe0c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/12637/e75848841ac96982524448e932e12fd6ff9cbe0c/fdb.py
credentials = Credentials ()
credentials = Credentials()
def __init__ (self, credentials=None, host=None, debug=False): if credentials == None: credentials = Credentials () self.credentials = credentials if host is None: host = choose_host() self.host = host self.debug = debug self.timeout = choose_http_timeout() if not host.startswith ('http'): self.host = 'http://%s' % host # the following based on fluiddb.py userpass = '%s:%s' % (credentials.username, credentials.password) auth = 'Basic %s' % userpass.encode ('base64').strip() self.headers = { 'Authorization' : auth }
e75848841ac96982524448e932e12fd6ff9cbe0c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/12637/e75848841ac96982524448e932e12fd6ff9cbe0c/fdb.py
if not host.startswith ('http'):
if not host.startswith('http'):
def __init__ (self, credentials=None, host=None, debug=False): if credentials == None: credentials = Credentials () self.credentials = credentials if host is None: host = choose_host() self.host = host self.debug = debug self.timeout = choose_http_timeout() if not host.startswith ('http'): self.host = 'http://%s' % host # the following based on fluiddb.py userpass = '%s:%s' % (credentials.username, credentials.password) auth = 'Basic %s' % userpass.encode ('base64').strip() self.headers = { 'Authorization' : auth }
e75848841ac96982524448e932e12fd6ff9cbe0c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/12637/e75848841ac96982524448e932e12fd6ff9cbe0c/fdb.py
auth = 'Basic %s' % userpass.encode ('base64').strip()
auth = 'Basic %s' % userpass.encode('base64').strip()
def __init__ (self, credentials=None, host=None, debug=False): if credentials == None: credentials = Credentials () self.credentials = credentials if host is None: host = choose_host() self.host = host self.debug = debug self.timeout = choose_http_timeout() if not host.startswith ('http'): self.host = 'http://%s' % host # the following based on fluiddb.py userpass = '%s:%s' % (credentials.username, credentials.password) auth = 'Basic %s' % userpass.encode ('base64').strip() self.headers = { 'Authorization' : auth }
e75848841ac96982524448e932e12fd6ff9cbe0c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/12637/e75848841ac96982524448e932e12fd6ff9cbe0c/fdb.py
def set_connection_from_global (self):
def set_connection_from_global(self):
def set_connection_from_global (self): """Sets the host on the basis of the global variable flags, if that exists. Used to enable the tests to run against alternate hosts.""" self.host = choose_host () self.debug = choose_debug_mode () self.timeout = choose_http_timeout ()
e75848841ac96982524448e932e12fd6ff9cbe0c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/12637/e75848841ac96982524448e932e12fd6ff9cbe0c/fdb.py