rem
stringlengths
1
322k
add
stringlengths
0
2.05M
context
stringlengths
4
228k
meta
stringlengths
156
215
def convert(cvsroot, target=SVNROOT, log_fname_base=DATAFILE, start_pass=1, verbose=0):
def convert(pool, cvsroot, target=SVNROOT, log_fname_base=DATAFILE, start_pass=1, verbose=0):
def convert(cvsroot, target=SVNROOT, log_fname_base=DATAFILE, start_pass=1, verbose=0): "Convert a CVS repository to an SVN repository." # prepare the operation context ctx = _ctx() ctx.cvsroot = cvsroot ctx.target = target ctx.log_fname_base = log_fname_base ctx.verbose = verbose times = [ None ] * len(_passes) for i in range(start_pass - 1, len(_passes)): times[i] = time.time() if verbose: print '----- pass %d -----' % (i + 1) _passes[i](ctx) times.append(time.time()) if verbose: for i in range(start_pass, len(_passes)+1): print 'pass %d: %d seconds' % (i, int(times[i] - times[i-1])) print ' total:', int(times[len(_passes)] - times[start_pass-1]), 'seconds'
1ea553ef1f59c2ab034594d92c29d3b4b3e1eca7 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/1ea553ef1f59c2ab034594d92c29d3b4b3e1eca7/cvs2svn.py
print 'USAGE: %s [-p pass] repository-path' % sys.argv[0]
print 'USAGE: %s [-v] [-p pass] repository-path' % sys.argv[0]
def usage(): print 'USAGE: %s [-p pass] repository-path' % sys.argv[0] sys.exit(1)
1ea553ef1f59c2ab034594d92c29d3b4b3e1eca7 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/1ea553ef1f59c2ab034594d92c29d3b4b3e1eca7/cvs2svn.py
convert(args[0], start_pass=start_pass, verbose=verbose)
util.run_app(convert, args[0], start_pass=start_pass, verbose=verbose)
def main(): opts, args = getopt.getopt(sys.argv[1:], 'p:v') if len(args) != 1: usage() verbose = 0 start_pass = 1 for opt, value in opts: if opt == '-p': start_pass = int(value) if start_pass < 1 or start_pass > len(_passes): print 'ERROR: illegal value (%d) for starting pass. ' \ 'must be 1 through %d.' % (start_pass, len(_passes)) sys.exit(1) elif opt == '-v': verbose = 1 convert(args[0], start_pass=start_pass, verbose=verbose)
1ea553ef1f59c2ab034594d92c29d3b4b3e1eca7 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/1ea553ef1f59c2ab034594d92c29d3b4b3e1eca7/cvs2svn.py
url = os.path.join(svntest.main.test_area_url, svntest.main.general_repo_dir, sbox.name) print url
url = svntest.main.test_area_url + '/' + svntest.main.current_repo_dir
def diff_pure_repository_update_a_file(sbox): "pure repository diff update a file" if sbox.build(): return 1 wc_dir = sbox.wc_dir was_cwd = os.getcwd() os.chdir(wc_dir) update_a_file() svntest.main.run_svn(None, 'ci') os.chdir(was_cwd) url = os.path.join(svntest.main.test_area_url, svntest.main.general_repo_dir, sbox.name) print url diff_output, err_output = svntest.main.run_svn(None, 'diff', '-r1:2', url) return check_update_a_file(diff_output)
4eed35780043d4c83b11f6b98dc4acbf654b2acc /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/4eed35780043d4c83b11f6b98dc4acbf654b2acc/diff_tests.py
files = [ ] for t in inst_targets: files.append(t.output)
files = _sorted_files(inst_targets)
def main(fname, oname=None, skip_depends=0): parser = ConfigParser.ConfigParser(_cfg_defaults) parser.read(fname) if oname is None: oname = os.path.splitext(os.path.basename(fname))[0] + '-outputs.mk' ofile = open(oname, 'w') ofile.write('# DO NOT EDIT -- AUTOMATICALLY GENERATED\n\n') errors = 0 targets = { } install = { } # install area name -> targets test_progs = [ ] test_deps = [ ] fs_test_progs = [ ] fs_test_deps = [ ] file_deps = [ ] target_dirs = { } manpages = [ ] infopages = [ ] target_names = _filter_targets(parser.sections()) # PASS 1: collect the targets and some basic info for target in target_names: try: target_ob = Target(target, parser.get(target, 'path'), parser.get(target, 'install'), parser.get(target, 'type')) except GenMakeError, e: print e errors = 1 continue targets[target] = target_ob itype = target_ob.install if install.has_key(itype): install[itype].append(target_ob) else: install[itype] = [ target_ob ] target_dirs[target_ob.path] = None if errors: sys.exit(1) # PASS 2: generate the outputs for target in target_names: target_ob = targets[target] path = target_ob.path bldtype = target_ob.type objext = target_ob.objext tpath = target_ob.output tfile = os.path.basename(tpath) if target_ob.install == 'test' and bldtype == 'exe': test_deps.append(tpath) if parser.get(target, 'testing') != 'skip': test_progs.append(tpath) if target_ob.install == 'fs-test' and bldtype == 'exe': fs_test_deps.append(tpath) if parser.get(target, 'testing') != 'skip': fs_test_progs.append(tpath) pats = parser.get(target, 'sources') if not pats: pats = _default_sources[bldtype] sources, s_errors = _collect_paths(pats, path) errors = errors or s_errors objects = [ ] for src in sources: if src[-2:] == '.c': objname = src[:-2] + objext objects.append(objname) file_deps.append((src, objname)) else: print 'ERROR: unknown file extension on', src errors = 1 retreat = _retreat_dots(path) libs = [ ] deps = [ ] for lib in string.split(parser.get(target, 'libs')): if lib in target_names: tlib = targets[lib] target_ob.deps.append(tlib) deps.append(tlib.output) # link in the library by simply referring to the .la file ### hmm. use join() for retreat + ... ? libs.append(retreat + os.path.join(tlib.path, lib + '.la')) else: # something we don't know, so just include it directly libs.append(lib) for man in string.split(parser.get(target, 'manpages')): manpages.append(man) for info in string.split(parser.get(target, 'infopages')): infopages.append(info) targ_varname = string.replace(target, '-', '_') ldflags = parser.get(target, 'link-flags') add_deps = parser.get(target, 'add-deps') objnames = string.join(map(os.path.basename, objects)) ofile.write('%s_DEPS = %s %s\n' '%s_OBJECTS = %s\n' '%s: $(%s_DEPS)\n' '\tcd %s && $(LINK) -o %s %s $(%s_OBJECTS) %s $(LIBS)\n\n' % (targ_varname, string.join(objects + deps), add_deps, targ_varname, objnames, tpath, targ_varname, path, tfile, ldflags, targ_varname, string.join(libs))) custom = parser.get(target, 'custom') if custom == 'apache-mod': # special build, needing Apache includes ofile.write('# build these special -- use APACHE_INCLUDES\n') for src in sources: if src[-2:] == '.c': ofile.write('%s%s: %s\n\t$(COMPILE_APACHE_MOD)\n' % (src[:-2], objext, src)) ofile.write('\n') elif custom == 'swig-py': ofile.write('# build this with -DSWIGPYTHON\n') for src in sources: if src[-2:] == '.c': ofile.write('%s%s: %s\n\t$(COMPILE_SWIG_PY)\n' % (src[:-2], objext, src)) ofile.write('\n') for g_name, g_targets in install.items(): target_names = [ ] for i in g_targets: target_names.append(i.output) ofile.write('%s: %s\n\n' % (g_name, string.join(target_names))) cfiles = [ ] for target in targets.values(): # .la files are handled by the standard 'clean' rule; clean all the # other targets if target.output[-3:] != '.la': cfiles.append(target.output) ofile.write('CLEAN_FILES = %s\n\n' % string.join(cfiles)) for area, inst_targets in install.items(): files = [ ] for t in inst_targets: files.append(t.output) if area == 'apache-mod': ofile.write('install-mods-shared: %s\n' % (string.join(files),)) la_tweaked = { } for file in files: # cd to dirname before install to work around libtool 1.4.2 bug. dirname, fname = os.path.split(file) base, ext = os.path.splitext(fname) name = string.replace(base, 'libmod_', '') ofile.write('\tcd %s ; $(INSTALL_MOD_SHARED) -n %s %s\n' % (dirname, name, fname)) if ext == '.la': la_tweaked[file + '-a'] = None for t in inst_targets: for dep in t.deps: bt = dep.output if bt[-3:] == '.la': la_tweaked[bt + '-a'] = None la_tweaked = la_tweaked.keys() s_files, s_errors = _collect_paths(parser.get('static-apache', 'paths')) errors = errors or s_errors # Construct a .libs directory within the Apache area and populate it # with the appropriate files. Also drop the .la file in the target dir. ofile.write('\ninstall-mods-static: %s\n' '\t$(MKDIR) %s\n' % (string.join(la_tweaked + s_files), os.path.join('$(APACHE_TARGET)', '.libs'))) for file in la_tweaked: dirname, fname = os.path.split(file) base = os.path.splitext(fname)[0] ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' '\t$(INSTALL_MOD_STATIC) %s %s\n' % (os.path.join(dirname, '.libs', base + '.a'), os.path.join('$(APACHE_TARGET)', '.libs', base + '.a'), file, os.path.join('$(APACHE_TARGET)', base + '.la'))) # copy the other files to the target dir for file in s_files: ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' % (file, os.path.join('$(APACHE_TARGET)', os.path.basename(file)))) ofile.write('\n') elif area != 'test' and area != 'fs-test': area_var = string.replace(area, '-', '_') ofile.write('install-%s: %s\n' '\t$(MKDIR) $(%sdir)\n' % (area, string.join(files), area_var)) for file in files: # cd to dirname before install to work around libtool 1.4.2 bug. dirname, fname = os.path.split(file) ofile.write('\tcd %s ; $(INSTALL_%s) %s %s\n' % (dirname, string.upper(area_var), fname, os.path.join('$(%sdir)' % area_var, fname))) ofile.write('\n') includes, i_errors = _collect_paths(parser.get('includes', 'paths')) errors = errors or i_errors ofile.write('install-include: %s\n' '\t$(MKDIR) $(includedir)\n' % (string.join(includes),)) for file in includes: ofile.write('\t$(INSTALL_INCLUDE) %s %s\n' % (os.path.join('$(top_srcdir)', file), os.path.join('$(includedir)', os.path.basename(file)))) ofile.write('\n# handy shortcut targets\n') for name, target in targets.items(): ofile.write('%s: %s\n' % (name, target.output)) ofile.write('\n') scripts, s_errors = _collect_paths(parser.get('test-scripts', 'paths')) errors = errors or s_errors script_dirs = [] for script in scripts: script_dirs.append(re.compile("[-a-z0-9A-Z_.]*$").sub("", script)) fs_scripts, fs_errors = _collect_paths(parser.get('fs-test-scripts', 'paths')) errors = errors or fs_errors ofile.write('BUILD_DIRS = %s %s\n' % (string.join(target_dirs.keys()), string.join(script_dirs))) ofile.write('FS_TEST_DEPS = %s\n\n' % string.join(fs_test_deps + fs_scripts)) ofile.write('FS_TEST_PROGRAMS = %s\n\n' % string.join(fs_test_progs + fs_scripts)) ofile.write('TEST_DEPS = %s\n\n' % string.join(test_deps + scripts)) ofile.write('TEST_PROGRAMS = %s\n\n' % string.join(test_progs + scripts)) ofile.write('MANPAGES = %s\n\n' % string.join(manpages)) ofile.write('INFOPAGES = %s\n\n' % string.join(infopages)) if not skip_depends: # # Find all the available headers and what they depend upon. the # include_deps is a dictionary mapping a short header name to a tuple # of the full path to the header and a dictionary of dependent header # names (short) mapping to None. # # Example: # { 'short.h' : ('/path/to/short.h', # { 'other.h' : None, 'foo.h' : None }) } # # Note that this structure does not allow for similarly named headers # in per-project directories. SVN doesn't have this at this time, so # this structure works quite fine. (the alternative would be to use # the full pathname for the key, but that is actually a bit harder to # work with since we only see short names when scanning, and keeping # a second variable around for mapping the short to long names is more # than I cared to do right now) # include_deps = _create_include_deps(includes) for d in target_dirs.keys(): hdrs = glob.glob(os.path.join(d, '*.h')) if hdrs: more_deps = _create_include_deps(hdrs, include_deps) include_deps.update(more_deps) for src, objname in file_deps: hdrs = [ ] for short in _find_includes(src, include_deps): hdrs.append(include_deps[short][0]) ofile.write('%s: %s %s\n' % (objname, src, string.join(hdrs))) if errors: sys.exit(1)
c9df17812072a715211b6806f8d091fac3872f04 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/c9df17812072a715211b6806f8d091fac3872f04/gen-make.py
status_list[6][3]['wc_rev'] = '0'
def get_standard_status_list(wc_dir): "Return a status list reflecting local mods made by next routine." status_list = svntest.actions.get_virginal_status_list(wc_dir, '1') ### todo: use status-hash below instead. # `.' status_list[0][3]['status'] = '_M' # A/B/lambda, A/D status_list[5][3]['status'] = 'M ' status_list[11][3]['status'] = 'M ' # A/B/E, A/D/H/chi status_list[6][3]['status'] = 'R ' status_list[6][3]['wc_rev'] = '0' status_list[18][3]['status'] = 'R ' status_list[18][3]['wc_rev'] = '0' # A/B/E/alpha, A/B/E/beta, A/C, A/D/gamma status_list[7][3]['status'] = 'D ' status_list[8][3]['status'] = 'D ' status_list[10][3]['status'] = 'D ' status_list[12][3]['status'] = 'D ' status_list[15][3]['status'] = 'D ' # A/D/G/pi, A/D/H/omega status_list[14][3]['status'] = '_M' status_list[20][3]['status'] = 'MM' # New things status_list.append([os.path.join(wc_dir, 'Q'), None, {}, {'status' : 'A ', 'locked' : ' ', 'wc_rev' : '0', 'repos_rev' : '1'}]) status_list.append([os.path.join(wc_dir, 'Q', 'floo'), None, {}, {'status' : 'A ', 'locked' : ' ', 'wc_rev' : '0', 'repos_rev' : '1'}]) status_list.append([os.path.join(wc_dir, 'A', 'D', 'H', 'gloo'), None, {}, {'status' : 'A ', 'locked' : ' ', 'wc_rev' : '0', 'repos_rev' : '1'}]) status_list.append([os.path.join(wc_dir, 'A', 'B', 'E', 'bloo'), None, {}, {'status' : 'A ', 'locked' : ' ', 'wc_rev' : '0', 'repos_rev' : '1'}]) return status_list
641841876735f3191332bcd89502ebdfaf161367 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/641841876735f3191332bcd89502ebdfaf161367/commit_tests.py
status_list[18][3]['wc_rev'] = '0'
def get_standard_status_list(wc_dir): "Return a status list reflecting local mods made by next routine." status_list = svntest.actions.get_virginal_status_list(wc_dir, '1') ### todo: use status-hash below instead. # `.' status_list[0][3]['status'] = '_M' # A/B/lambda, A/D status_list[5][3]['status'] = 'M ' status_list[11][3]['status'] = 'M ' # A/B/E, A/D/H/chi status_list[6][3]['status'] = 'R ' status_list[6][3]['wc_rev'] = '0' status_list[18][3]['status'] = 'R ' status_list[18][3]['wc_rev'] = '0' # A/B/E/alpha, A/B/E/beta, A/C, A/D/gamma status_list[7][3]['status'] = 'D ' status_list[8][3]['status'] = 'D ' status_list[10][3]['status'] = 'D ' status_list[12][3]['status'] = 'D ' status_list[15][3]['status'] = 'D ' # A/D/G/pi, A/D/H/omega status_list[14][3]['status'] = '_M' status_list[20][3]['status'] = 'MM' # New things status_list.append([os.path.join(wc_dir, 'Q'), None, {}, {'status' : 'A ', 'locked' : ' ', 'wc_rev' : '0', 'repos_rev' : '1'}]) status_list.append([os.path.join(wc_dir, 'Q', 'floo'), None, {}, {'status' : 'A ', 'locked' : ' ', 'wc_rev' : '0', 'repos_rev' : '1'}]) status_list.append([os.path.join(wc_dir, 'A', 'D', 'H', 'gloo'), None, {}, {'status' : 'A ', 'locked' : ' ', 'wc_rev' : '0', 'repos_rev' : '1'}]) status_list.append([os.path.join(wc_dir, 'A', 'B', 'E', 'bloo'), None, {}, {'status' : 'A ', 'locked' : ' ', 'wc_rev' : '0', 'repos_rev' : '1'}]) return status_list
641841876735f3191332bcd89502ebdfaf161367 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/641841876735f3191332bcd89502ebdfaf161367/commit_tests.py
% (targ_varname, string.join(objects), string.join(deps),
% (targ_varname, string.join(objects + deps), add_deps,
def main(fname, oname=None, skip_depends=0): parser = ConfigParser.ConfigParser(_cfg_defaults) parser.read(fname) if oname is None: oname = os.path.splitext(os.path.basename(fname))[0] + '-outputs.mk' ofile = open(oname, 'w') ofile.write('# DO NOT EDIT -- AUTOMATICALLY GENERATED\n\n') errors = 0 targets = { } groups = { } # group name -> targets install = { } # install area name -> targets test_progs = [ ] test_deps = [ ] file_deps = [ ] target_dirs = { } target_names = _filter_targets(parser.sections()) # PASS 1: collect the targets and some basic info for target in target_names: try: target_ob = Target(target, parser.get(target, 'path'), parser.get(target, 'install'), parser.get(target, 'type')) except GenMakeError, e: print e errors = 1 continue targets[target] = target_ob group = parser.get(target, 'group') if groups.has_key(group): groups[group].append(target_ob.output) else: groups[group] = [ target_ob.output ] itype = target_ob.install if install.has_key(itype): install[itype].append(target_ob) else: install[itype] = [ target_ob ] target_dirs[target_ob.path] = None if errors: sys.exit(1) # PASS 2: generate the outputs for target in target_names: target_ob = targets[target] path = target_ob.path bldtype = target_ob.type objext = target_ob.objext tpath = target_ob.output tfile = os.path.basename(tpath) if target_ob.install == 'test' and bldtype == 'exe': test_deps.append(tpath) if parser.get(target, 'testing') != 'skip': test_progs.append(tpath) pats = parser.get(target, 'sources') if not pats: pats = _default_sources[bldtype] sources, s_errors = _collect_paths(pats, path) errors = errors or s_errors objects = [ ] for src in sources: if src[-2:] == '.c': objname = src[:-2] + objext objects.append(objname) file_deps.append((src, objname)) else: print 'ERROR: unknown file extension on', src errors = 1 retreat = _retreat_dots(path) libs = [ ] deps = [ ] for lib in string.split(parser.get(target, 'libs')): if lib in target_names: tlib = targets[lib] target_ob.deps.append(tlib) deps.append(tlib.output) dep_path = tlib.path if bldtype == 'lib': # we need to hack around a libtool problem: it cannot record a # dependency of one shared lib on another shared lib. ### fix this by upgrading to the new libtool 1.4 release... # strip "lib" from the front so we have -lsvn_foo if lib[:3] == 'lib': lib = lib[3:] libs.append('-L%s -l%s' % (retreat + os.path.join(dep_path, '.libs'), lib)) else: # linking executables can refer to .la files libs.append(retreat + os.path.join(dep_path, lib + '.la')) else: # something we don't know, so just include it directly libs.append(lib) targ_varname = string.replace(target, '-', '_') ldflags = parser.get(target, 'link-flags') objnames = string.join(map(os.path.basename, objects)) ofile.write('%s_DEPS = %s %s\n' '%s_OBJECTS = %s\n' '%s: $(%s_DEPS)\n' '\tcd %s && $(LINK) -o %s %s $(%s_OBJECTS) %s $(LIBS)\n\n' % (targ_varname, string.join(objects), string.join(deps), targ_varname, objnames, tpath, targ_varname, path, tfile, ldflags, targ_varname, string.join(libs))) custom = parser.get(target, 'custom') if custom == 'apache-mod': # special build, needing Apache includes ofile.write('# build these special -- use APACHE_INCLUDES\n') for src in sources: if src[-2:] == '.c': ofile.write('%s%s: %s\n\t$(COMPILE_APACHE_MOD)\n' % (src[:-2], objext, src)) ofile.write('\n') for g_name, g_targets in groups.items(): ofile.write('%s: %s\n\n' % (g_name, string.join(g_targets))) ofile.write('CLEAN_DIRS = %s\n' % string.join(target_dirs.keys())) cfiles = [ ] for target in targets.values(): # .la files are handled by the standard 'clean' rule; clean all the # other targets if target.output[-3:] != '.la': cfiles.append(target.output) ofile.write('CLEAN_FILES = %s\n\n' % string.join(cfiles)) for area, inst_targets in install.items(): files = [ ] for t in inst_targets: files.append(t.output) if area == 'apache-mod': ofile.write('install-mods-shared: %s\n' % (string.join(files),)) la_tweaked = { } for file in files: base, ext = os.path.splitext(os.path.basename(file)) name = string.replace(base, 'libmod_', '') ofile.write('\t$(INSTALL_MOD_SHARED) -n %s %s\n' % (name, file)) if ext == '.la': la_tweaked[file + '-a'] = None for t in inst_targets: for dep in t.deps: bt = dep.output if bt[-3:] == '.la': la_tweaked[bt + '-a'] = None la_tweaked = la_tweaked.keys() s_files, s_errors = _collect_paths(parser.get('static-apache', 'paths')) errors = errors or s_errors ofile.write('\ninstall-mods-static: %s\n' '\t$(mkinstalldirs) %s\n' % (string.join(la_tweaked + s_files), os.path.join('$(APACHE_TARGET)', '.libs'))) for file in la_tweaked: dirname, fname = os.path.split(file) base = os.path.splitext(fname)[0] ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' '\t$(INSTALL_MOD_STATIC) %s %s\n' % (os.path.join(dirname, '.libs', base + '.a'), os.path.join('$(APACHE_TARGET)', '.libs', base + '.a'), file, os.path.join('$(APACHE_TARGET)', base + '.la'))) for file in s_files: ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' % (file, os.path.join('$(APACHE_TARGET)', os.path.basename(file)))) ofile.write('\n') elif area != 'test': ofile.write('install-%s: %s\n' '\t$(mkinstalldirs) $(%sdir)\n' % (area, string.join(files), area)) for file in files: ofile.write('\t$(INSTALL_%s) %s %s\n' % (string.upper(area), file, os.path.join('$(%sdir)' % area, os.path.basename(file)))) ofile.write('\n') includes, i_errors = _collect_paths(parser.get('includes', 'paths')) errors = errors or i_errors ofile.write('install-include: %s\n' '\t$(mkinstalldirs) $(includedir)\n' % (string.join(includes),)) for file in includes: ofile.write('\t$(INSTALL_INCLUDE) %s %s\n' % (file, os.path.join('$(includedir)', os.path.basename(file)))) ofile.write('\n# handy shortcut targets\n') for name, target in targets.items(): ofile.write('%s: %s\n' % (name, target.output)) ofile.write('\n') scripts, s_errors = _collect_paths(parser.get('test-scripts', 'paths')) errors = errors or s_errors ofile.write('TEST_DEPS = %s\n\n' % string.join(test_deps + scripts)) ofile.write('TEST_PROGRAMS = %s\n\n' % string.join(test_progs + scripts)) if not skip_depends: # # Find all the available headers and what they depend upon. the # include_deps is a dictionary mapping a short header name to a tuple # of the full path to the header and a dictionary of dependent header # names (short) mapping to None. # # Example: # { 'short.h' : ('/path/to/short.h', # { 'other.h' : None, 'foo.h' : None }) } # # Note that this structure does not allow for similarly named headers # in per-project directories. SVN doesn't have this at this time, so # this structure works quite fine. (the alternative would be to use # the full pathname for the key, but that is actually a bit harder to # work with since we only see short names when scanning, and keeping # a second variable around for mapping the short to long names is more # than I cared to do right now) # include_deps = _create_include_deps(includes) for d in target_dirs.keys(): hdrs = glob.glob(os.path.join(d, '*.h')) if hdrs: more_deps = _create_include_deps(hdrs, include_deps) include_deps.update(more_deps) for src, objname in file_deps: hdrs = [ ] for short in _find_includes(src, include_deps): hdrs.append(include_deps[short][0]) ofile.write('%s: %s %s\n' % (objname, src, string.join(hdrs))) if errors: sys.exit(1)
5e33dfc7be98134bfb187ad0c78d8b958e93cfda /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/5e33dfc7be98134bfb187ad0c78d8b958e93cfda/gen-make.py
commit_deleted_edited
commit_deleted_edited, commit_in_dir_scheduled_for_addition,
def commit_deleted_edited(): "commit files that have been deleted, but also edited" # Bootstrap: make independent repo and working copy. sbox = sandbox(commit_deleted_edited) wc_dir = os.path.join(svntest.main.general_wc_dir, sbox) if svntest.actions.make_repo_and_wc(sbox): return 1 # Make some convenient paths. iota_path = os.path.join(wc_dir, 'iota') mu_path = os.path.join(wc_dir, 'A', 'mu') # Edit the files. svntest.main.file_append(iota_path, "This file has been edited.") svntest.main.file_append(mu_path, "This file has been edited.") # Schedule the files for removal. svntest.main.run_svn(None, 'remove', iota_path) svntest.main.run_svn(None, 'remove', mu_path) # Make our output list output_list = [(iota_path, None, {}, {'verb' : 'Deleting'}), (mu_path, None, {}, {'verb' : 'Deleting'})] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Items in the status list are all at rev 1, except the two things # we changed...but then, they don't exist at all. status_list = svntest.actions.get_virginal_status_list(wc_dir, '2') status_list.pop(path_index(status_list, iota_path)) status_list.pop(path_index(status_list, mu_path)) for item in status_list: item[3]['wc_rev'] = '1' expected_status_tree = svntest.tree.build_generic_tree(status_list) if svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir): return 1 return 0
da164d67313e981dd3b1ba8e21d954060bce5412 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/da164d67313e981dd3b1ba8e21d954060bce5412/commit_tests.py
script_dirs = [] for script in scripts: script_dirs.append(re.compile("[-a-z0-9A-Z_.]*$").sub("", script))
def write(self): errors = 0 for target in self.target_names: target_ob = self.targets[target]
7fb8f6dc51b92a34b2aa3675f29d3450f05747c5 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/7fb8f6dc51b92a34b2aa3675f29d3450f05747c5/gen_base.py
self.ofile.write('BUILD_DIRS = %s %s\n' % (string.join(self.target_dirs.keys()), string.join(script_dirs)))
script_dirs = map(os.path.dirname, scripts + fs_scripts) build_dirs = self.target_dirs.copy() for d in script_dirs: build_dirs[d] = None self.ofile.write('BUILD_DIRS = %s\n' % string.join(build_dirs.keys()))
def write(self): errors = 0 for target in self.target_names: target_ob = self.targets[target]
7fb8f6dc51b92a34b2aa3675f29d3450f05747c5 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/7fb8f6dc51b92a34b2aa3675f29d3450f05747c5/gen_base.py
commit_multiple_targets
commit_multiple_targets, commit_multiple_targets_2
def commit_multiple_targets(): "commit multiple targets" wc_dir = os.path.join (general_wc_dir, 'commit_multiple_targets') if make_repo_and_wc('commit_multiple_targets'): return 1 # This test will commit three targets: psi, B, and pi. In that order. # Make local mods to many files. AB_path = os.path.join(wc_dir, 'A', 'B') lambda_path = os.path.join(wc_dir, 'A', 'B', 'lambda') rho_path = os.path.join(wc_dir, 'A', 'D', 'G', 'rho') pi_path = os.path.join(wc_dir, 'A', 'D', 'G', 'pi') omega_path = os.path.join(wc_dir, 'A', 'D', 'H', 'omega') psi_path = os.path.join(wc_dir, 'A', 'D', 'H', 'psi') svn_test_main.file_append (lambda_path, 'new appended text for lambda') svn_test_main.file_append (rho_path, 'new appended text for rho') svn_test_main.file_append (pi_path, 'new appended text for pi') svn_test_main.file_append (omega_path, 'new appended text for omega') svn_test_main.file_append (psi_path, 'new appended text for psi') # Just for kicks, add a property to A/D/G as well. We'll make sure # that it *doesn't* get committed. ADG_path = os.path.join(wc_dir, 'A', 'D', 'G') svn_test_main.run_svn('propset', 'foo', 'bar', ADG_path) # Created expected output tree for 'svn ci'. We should see changes # only on these three targets, no others. output_list = [ [psi_path, None, {'verb' : 'Changing' }], [lambda_path, None, {'verb' : 'Changing' }], [pi_path, None, {'verb' : 'Changing' }] ] expected_output_tree = svn_tree.build_generic_tree(output_list) # Create expected status tree; all local revisions should be at 1, # but our three targets should be at 2. status_list = get_virginal_status_list(wc_dir, '2') for item in status_list: if ((item[0] != psi_path) and (item[0] != lambda_path) and (item[0] != pi_path)): item[2]['wc_rev'] = '1' # rho and omega should still display as locally modified: if ((item[0] == rho_path) or (item[0] == omega_path)): item[2]['status'] = 'M ' # A/D/G should still have a local property set, too. if (item[0] == ADG_path): item[2]['status'] = '_M' expected_status_tree = svn_tree.build_generic_tree(status_list) return run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, psi_path, AB_path, pi_path)
b86c457b67280f2a1a5ad40187470b5202329795 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/b86c457b67280f2a1a5ad40187470b5202329795/local_tests.py
"ensure update is not reporting additions"
"ensure update is not munging additions or replacements"
def update_ignores_added(): "ensure update is not reporting additions" sbox = sandbox(update_ignores_added) wc_dir = os.path.join (svntest.main.general_wc_dir, sbox) if svntest.actions.make_repo_and_wc(sbox): return 1 # Create a new file, 'zeta', and schedule it for addition. zeta_path = os.path.join(wc_dir, 'A', 'B', 'zeta') svntest.main.file_append(zeta_path, "This is the file 'zeta'.") svntest.main.run_svn(None, 'add', zeta_path) # Now update. "zeta at revision 0" should *not* be reported. # Create expected output tree for an update of the wc_backup. output_list = [] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Create expected disk tree for the update. my_greek_tree = svntest.main.copy_greek_tree() my_greek_tree.append(['A/B/zeta', "This is the file 'zeta'.", {}, {}]) expected_disk_tree = svntest.tree.build_generic_tree(my_greek_tree) # Create expected status tree for the update. status_list = svntest.actions.get_virginal_status_list(wc_dir, '1') status_list.append([zeta_path, None, {}, {'status' : 'A ', 'wc_rev' : '0', 'repos_rev' : '1'}]) expected_status_tree = svntest.tree.build_generic_tree(status_list) # Do the update and check the results in three ways. return svntest.actions.run_and_verify_update(wc_dir, expected_output_tree, expected_disk_tree, expected_status_tree)
3512cf5ebaceba466e5a8bb341037db35dd1d776 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/3512cf5ebaceba466e5a8bb341037db35dd1d776/update_tests.py
gamma_path = os.path.join(wc_dir, 'A', 'D', 'gamma') svntest.main.run_svn(None, 'delete', gamma_path) svntest.main.file_append(gamma_path, "\nThis is a new 'gamma' now.") svntest.main.run_svn(None, 'add', gamma_path)
def update_ignores_added(): "ensure update is not reporting additions" sbox = sandbox(update_ignores_added) wc_dir = os.path.join (svntest.main.general_wc_dir, sbox) if svntest.actions.make_repo_and_wc(sbox): return 1 # Create a new file, 'zeta', and schedule it for addition. zeta_path = os.path.join(wc_dir, 'A', 'B', 'zeta') svntest.main.file_append(zeta_path, "This is the file 'zeta'.") svntest.main.run_svn(None, 'add', zeta_path) # Now update. "zeta at revision 0" should *not* be reported. # Create expected output tree for an update of the wc_backup. output_list = [] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Create expected disk tree for the update. my_greek_tree = svntest.main.copy_greek_tree() my_greek_tree.append(['A/B/zeta', "This is the file 'zeta'.", {}, {}]) expected_disk_tree = svntest.tree.build_generic_tree(my_greek_tree) # Create expected status tree for the update. status_list = svntest.actions.get_virginal_status_list(wc_dir, '1') status_list.append([zeta_path, None, {}, {'status' : 'A ', 'wc_rev' : '0', 'repos_rev' : '1'}]) expected_status_tree = svntest.tree.build_generic_tree(status_list) # Do the update and check the results in three ways. return svntest.actions.run_and_verify_update(wc_dir, expected_output_tree, expected_disk_tree, expected_status_tree)
3512cf5ebaceba466e5a8bb341037db35dd1d776 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/3512cf5ebaceba466e5a8bb341037db35dd1d776/update_tests.py
status_list = svntest.actions.get_virginal_status_list(wc_dir, '1')
status_list = svntest.actions.get_virginal_status_list(wc_dir, '2') for item in status_list: if item[0] == gamma_path: item[3]['wc_rev'] = '1' item[3]['status'] = 'R '
def update_ignores_added(): "ensure update is not reporting additions" sbox = sandbox(update_ignores_added) wc_dir = os.path.join (svntest.main.general_wc_dir, sbox) if svntest.actions.make_repo_and_wc(sbox): return 1 # Create a new file, 'zeta', and schedule it for addition. zeta_path = os.path.join(wc_dir, 'A', 'B', 'zeta') svntest.main.file_append(zeta_path, "This is the file 'zeta'.") svntest.main.run_svn(None, 'add', zeta_path) # Now update. "zeta at revision 0" should *not* be reported. # Create expected output tree for an update of the wc_backup. output_list = [] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Create expected disk tree for the update. my_greek_tree = svntest.main.copy_greek_tree() my_greek_tree.append(['A/B/zeta', "This is the file 'zeta'.", {}, {}]) expected_disk_tree = svntest.tree.build_generic_tree(my_greek_tree) # Create expected status tree for the update. status_list = svntest.actions.get_virginal_status_list(wc_dir, '1') status_list.append([zeta_path, None, {}, {'status' : 'A ', 'wc_rev' : '0', 'repos_rev' : '1'}]) expected_status_tree = svntest.tree.build_generic_tree(status_list) # Do the update and check the results in three ways. return svntest.actions.run_and_verify_update(wc_dir, expected_output_tree, expected_disk_tree, expected_status_tree)
3512cf5ebaceba466e5a8bb341037db35dd1d776 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/3512cf5ebaceba466e5a8bb341037db35dd1d776/update_tests.py
'repos_rev' : '1'}])
'repos_rev' : '2'}])
def update_ignores_added(): "ensure update is not reporting additions" sbox = sandbox(update_ignores_added) wc_dir = os.path.join (svntest.main.general_wc_dir, sbox) if svntest.actions.make_repo_and_wc(sbox): return 1 # Create a new file, 'zeta', and schedule it for addition. zeta_path = os.path.join(wc_dir, 'A', 'B', 'zeta') svntest.main.file_append(zeta_path, "This is the file 'zeta'.") svntest.main.run_svn(None, 'add', zeta_path) # Now update. "zeta at revision 0" should *not* be reported. # Create expected output tree for an update of the wc_backup. output_list = [] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Create expected disk tree for the update. my_greek_tree = svntest.main.copy_greek_tree() my_greek_tree.append(['A/B/zeta', "This is the file 'zeta'.", {}, {}]) expected_disk_tree = svntest.tree.build_generic_tree(my_greek_tree) # Create expected status tree for the update. status_list = svntest.actions.get_virginal_status_list(wc_dir, '1') status_list.append([zeta_path, None, {}, {'status' : 'A ', 'wc_rev' : '0', 'repos_rev' : '1'}]) expected_status_tree = svntest.tree.build_generic_tree(status_list) # Do the update and check the results in three ways. return svntest.actions.run_and_verify_update(wc_dir, expected_output_tree, expected_disk_tree, expected_status_tree)
3512cf5ebaceba466e5a8bb341037db35dd1d776 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/3512cf5ebaceba466e5a8bb341037db35dd1d776/update_tests.py
def update_ignores_added(): "ensure update is not reporting additions" sbox = sandbox(update_ignores_added) wc_dir = os.path.join (svntest.main.general_wc_dir, sbox) if svntest.actions.make_repo_and_wc(sbox): return 1 # Create a new file, 'zeta', and schedule it for addition. zeta_path = os.path.join(wc_dir, 'A', 'B', 'zeta') svntest.main.file_append(zeta_path, "This is the file 'zeta'.") svntest.main.run_svn(None, 'add', zeta_path) # Now update. "zeta at revision 0" should *not* be reported. # Create expected output tree for an update of the wc_backup. output_list = [] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Create expected disk tree for the update. my_greek_tree = svntest.main.copy_greek_tree() my_greek_tree.append(['A/B/zeta', "This is the file 'zeta'.", {}, {}]) expected_disk_tree = svntest.tree.build_generic_tree(my_greek_tree) # Create expected status tree for the update. status_list = svntest.actions.get_virginal_status_list(wc_dir, '1') status_list.append([zeta_path, None, {}, {'status' : 'A ', 'wc_rev' : '0', 'repos_rev' : '1'}]) expected_status_tree = svntest.tree.build_generic_tree(status_list) # Do the update and check the results in three ways. return svntest.actions.run_and_verify_update(wc_dir, expected_output_tree, expected_disk_tree, expected_status_tree)
3512cf5ebaceba466e5a8bb341037db35dd1d776 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/3512cf5ebaceba466e5a8bb341037db35dd1d776/update_tests.py
out, err = svntest.main.run_svn(1, 'commit', '-m', '"logmsg"', mu_path) if len(err) == 0: return 1 return 0
if svntest.actions.run_and_verify_commit (wc_dir, None, None, "unversioned", None, None, None, None, mu_path): return 1 Q_path = os.path.join(wc_dir, 'Q') bloo_path = os.path.join(Q_path, 'bloo') os.mkdir(Q_path) svntest.main.file_append(bloo_path, "New contents.") svntest.main.run_svn(None, 'add', '--recursive', Q_path) return svntest.actions.run_and_verify_commit (wc_dir, None, None, "unversioned", None, None, None, None, bloo_path)
def commit_in_dir_scheduled_for_addition(sbox): "commit a file inside dir scheduled for addition" if sbox.build(): return 1 wc_dir = sbox.wc_dir A_path = os.path.join(wc_dir, 'A') Z_path = os.path.join(wc_dir, 'Z') mu_path = os.path.join(wc_dir, 'Z', 'mu') svntest.main.run_svn(None, 'move', A_path, Z_path) out, err = svntest.main.run_svn(1, 'commit', '-m', '"logmsg"', mu_path) ### FIXME: # # In commit 1275, sussman fixed subversion/libsvn_client/copy.c, and # said: # # This was causing commit_test #15 to fail, but this test was # written only to expect generic failure, so it still passing, so # it looked as though 'make check' was passing. If you ran # commit_tests.py by hand, though, you'd see the extra stderr # output. The moral of the story is that commit_test #15 should # be using run_and_verify_commit() to look for a *specific* # expected errorstring. Anyone wanna fix it? # # This is the test that needs to be fixed, right? if len(err) == 0: return 1 return 0
4cfd9caafc39837d000ffd27729d25fa8cd767d6 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/4cfd9caafc39837d000ffd27729d25fa8cd767d6/commit_tests.py
"Return a status list reflecting the local mods made by make_standard_slew_of_changes()."
"""Return a status list reflecting the local mods made by make_standard_slew_of_changes()."""
def get_standard_status_list(wc_dir): "Return a status list reflecting the local mods made by make_standard_slew_of_changes()." status_list = svntest.actions.get_virginal_status_list(wc_dir, '1') ### todo: use status-hash below instead. # `.' status_list[0][3]['status'] = '_M' # A/B/lambda, A/D status_list[5][3]['status'] = 'M ' status_list[11][3]['status'] = 'M ' # A/B/E, A/D/H/chi status_list[6][3]['status'] = 'R ' status_list[18][3]['status'] = 'R ' # A/B/E/alpha, A/B/E/beta, A/C, A/D/gamma status_list[7][3]['status'] = 'D ' status_list[8][3]['status'] = 'D ' status_list[10][3]['status'] = 'D ' status_list[12][3]['status'] = 'D ' status_list[15][3]['status'] = 'D ' # A/D/G/pi, A/D/H/omega status_list[14][3]['status'] = '_M' status_list[20][3]['status'] = 'MM' # New things status_list.append([os.path.join(wc_dir, 'Q'), None, {}, {'status' : 'A ', 'locked' : ' ', 'wc_rev' : '0', 'repos_rev' : '1'}]) status_list.append([os.path.join(wc_dir, 'Q', 'floo'), None, {}, {'status' : 'A ', 'locked' : ' ', 'wc_rev' : '0', 'repos_rev' : '1'}]) status_list.append([os.path.join(wc_dir, 'A', 'D', 'H', 'gloo'), None, {}, {'status' : 'A ', 'locked' : ' ', 'wc_rev' : '0', 'repos_rev' : '1'}]) status_list.append([os.path.join(wc_dir, 'A', 'B', 'E', 'bloo'), None, {}, {'status' : 'A ', 'locked' : ' ', 'wc_rev' : '0', 'repos_rev' : '1'}]) return status_list
c8503ee429f4b98fefb73b44e096cc2ca166096c /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/c8503ee429f4b98fefb73b44e096cc2ca166096c/commit_tests.py
objects.append(objname) file_deps.append((src, objname)) elif src[-5:] == '.texi': objname = src[:-5] + objext
def main(fname, oname=None, skip_depends=0): parser = ConfigParser.ConfigParser(_cfg_defaults) parser.read(fname) if oname is None: oname = os.path.splitext(os.path.basename(fname))[0] + '-outputs.mk' ofile = open(oname, 'w') ofile.write('# DO NOT EDIT -- AUTOMATICALLY GENERATED\n\n') errors = 0 targets = { } groups = { } # group name -> targets install = { } # install area name -> targets test_progs = [ ] test_deps = [ ] file_deps = [ ] target_dirs = { } target_names = _filter_targets(parser.sections()) # PASS 1: collect the targets and some basic info for target in target_names: try: target_ob = Target(target, parser.get(target, 'path'), parser.get(target, 'install'), parser.get(target, 'type')) except GenMakeError, e: print e errors = 1 continue targets[target] = target_ob group = parser.get(target, 'group') if groups.has_key(group): groups[group].append(target_ob.output) else: groups[group] = [ target_ob.output ] itype = target_ob.install if install.has_key(itype): install[itype].append(target_ob) else: install[itype] = [ target_ob ] target_dirs[target_ob.path] = None if errors: sys.exit(1) # PASS 2: generate the outputs for target in target_names: target_ob = targets[target] path = target_ob.path bldtype = target_ob.type objext = target_ob.objext tpath = target_ob.output tfile = os.path.basename(tpath) if target_ob.install == 'test' and bldtype == 'exe': test_deps.append(tpath) if parser.get(target, 'testing') != 'skip': test_progs.append(tpath) pats = parser.get(target, 'sources') if not pats: pats = _default_sources[bldtype] sources, s_errors = _collect_paths(pats, path) errors = errors or s_errors objects = [ ] for src in sources: if src[-2:] == '.c': objname = src[:-2] + objext objects.append(objname) file_deps.append((src, objname)) elif src[-5:] == '.texi': objname = src[:-5] + objext objects.append(objname) file_deps.append((src, objname)) else: print 'ERROR: unknown file extension on', src errors = 1 retreat = _retreat_dots(path) libs = [ ] deps = [ ] for lib in string.split(parser.get(target, 'libs')): if lib in target_names: tlib = targets[lib] target_ob.deps.append(tlib) deps.append(tlib.output) dep_path = tlib.path if bldtype == 'lib': # we need to hack around a libtool problem: it cannot record a # dependency of one shared lib on another shared lib. ### fix this by upgrading to the new libtool 1.4 release... # strip "lib" from the front so we have -lsvn_foo if lib[:3] == 'lib': lib = lib[3:] libs.append('-L%s -l%s' % (retreat + os.path.join(dep_path, '.libs'), lib)) else: # linking executables can refer to .la files libs.append(retreat + os.path.join(dep_path, lib + '.la')) else: # something we don't know, so just include it directly libs.append(lib) targ_varname = string.replace(target, '-', '_') ldflags = parser.get(target, 'link-flags') add_deps = parser.get(target, 'add-deps') objnames = string.join(map(os.path.basename, objects)) ofile.write('%s_DEPS = %s %s\n' '%s_OBJECTS = %s\n' '%s: $(%s_DEPS)\n' '\tcd %s && $(LINK) -o %s %s $(%s_OBJECTS) %s $(LIBS)\n\n' % (targ_varname, string.join(objects + deps), add_deps, targ_varname, objnames, tpath, targ_varname, path, tfile, ldflags, targ_varname, string.join(libs))) custom = parser.get(target, 'custom') if custom == 'apache-mod': # special build, needing Apache includes ofile.write('# build these special -- use APACHE_INCLUDES\n') for src in sources: if src[-2:] == '.c': ofile.write('%s%s: %s\n\t$(COMPILE_APACHE_MOD)\n' % (src[:-2], objext, src)) ofile.write('\n') for g_name, g_targets in groups.items(): ofile.write('%s: %s\n\n' % (g_name, string.join(g_targets))) ofile.write('BUILD_DIRS = %s\n' % string.join(target_dirs.keys())) cfiles = [ ] for target in targets.values(): # .la files are handled by the standard 'clean' rule; clean all the # other targets if target.output[-3:] != '.la': cfiles.append(target.output) ofile.write('CLEAN_FILES = %s\n\n' % string.join(cfiles)) for area, inst_targets in install.items(): files = [ ] for t in inst_targets: files.append(t.output) if area == 'apache-mod': ofile.write('install-mods-shared: %s\n' % (string.join(files),)) la_tweaked = { } for file in files: base, ext = os.path.splitext(os.path.basename(file)) name = string.replace(base, 'libmod_', '') ofile.write('\t$(INSTALL_MOD_SHARED) -n %s %s\n' % (name, file)) if ext == '.la': la_tweaked[file + '-a'] = None for t in inst_targets: for dep in t.deps: bt = dep.output if bt[-3:] == '.la': la_tweaked[bt + '-a'] = None la_tweaked = la_tweaked.keys() s_files, s_errors = _collect_paths(parser.get('static-apache', 'paths')) errors = errors or s_errors ofile.write('\ninstall-mods-static: %s\n' '\t$(MKDIR) %s\n' % (string.join(la_tweaked + s_files), os.path.join('$(APACHE_TARGET)', '.libs'))) for file in la_tweaked: dirname, fname = os.path.split(file) base = os.path.splitext(fname)[0] ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' '\t$(INSTALL_MOD_STATIC) %s %s\n' % (os.path.join(dirname, '.libs', base + '.a'), os.path.join('$(APACHE_TARGET)', '.libs', base + '.a'), file, os.path.join('$(APACHE_TARGET)', base + '.la'))) for file in s_files: ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' % (file, os.path.join('$(APACHE_TARGET)', os.path.basename(file)))) ofile.write('\n') elif area != 'test': ofile.write('install-%s: %s\n' '\t$(MKDIR) $(%sdir)\n' % (area, string.join(files), area)) for file in files: ofile.write('\t$(INSTALL_%s) %s %s\n' % (string.upper(area), file, os.path.join('$(%sdir)' % area, os.path.basename(file)))) ofile.write('\n') includes, i_errors = _collect_paths(parser.get('includes', 'paths')) errors = errors or i_errors ofile.write('install-include: %s\n' '\t$(MKDIR) $(includedir)\n' % (string.join(includes),)) for file in includes: ofile.write('\t$(INSTALL_INCLUDE) %s %s\n' % (os.path.join('$(top_srcdir)', file), os.path.join('$(includedir)', os.path.basename(file)))) ofile.write('\n# handy shortcut targets\n') for name, target in targets.items(): ofile.write('%s: %s\n' % (name, target.output)) ofile.write('\n') scripts, s_errors = _collect_paths(parser.get('test-scripts', 'paths')) errors = errors or s_errors ofile.write('TEST_DEPS = %s\n\n' % string.join(test_deps + scripts)) ofile.write('TEST_PROGRAMS = %s\n\n' % string.join(test_progs + scripts)) if not skip_depends: # # Find all the available headers and what they depend upon. the # include_deps is a dictionary mapping a short header name to a tuple # of the full path to the header and a dictionary of dependent header # names (short) mapping to None. # # Example: # { 'short.h' : ('/path/to/short.h', # { 'other.h' : None, 'foo.h' : None }) } # # Note that this structure does not allow for similarly named headers # in per-project directories. SVN doesn't have this at this time, so # this structure works quite fine. (the alternative would be to use # the full pathname for the key, but that is actually a bit harder to # work with since we only see short names when scanning, and keeping # a second variable around for mapping the short to long names is more # than I cared to do right now) # include_deps = _create_include_deps(includes) for d in target_dirs.keys(): hdrs = glob.glob(os.path.join(d, '*.h')) if hdrs: more_deps = _create_include_deps(hdrs, include_deps) include_deps.update(more_deps) for src, objname in file_deps: hdrs = [ ] for short in _find_includes(src, include_deps): hdrs.append(include_deps[short][0]) ofile.write('%s: %s %s\n' % (objname, src, string.join(hdrs))) if errors: sys.exit(1)
5ec78c59fe0169e10917051903d05b3d1117b425 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/5ec78c59fe0169e10917051903d05b3d1117b425/gen-make.py
tfile = name self.objext = '.info'
def __init__(self, name, path, install, type): self.name = name self.deps = [ ] # dependencies (list of other Target objects) self.path = path self.type = type
5ec78c59fe0169e10917051903d05b3d1117b425 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/5ec78c59fe0169e10917051903d05b3d1117b425/gen-make.py
def run_one_test(n, test_list): "Run the Nth client test in TEST_LIST, return the result." if (n < 1) or (n > len(test_list) - 1): print "There is no test", `n` + ".\n" return 1 # Run the test. error = test_list[n]() if error: print "FAIL:", else: print "PASS:", print sys.argv[0], n, ":", test_list[n].__doc__ return error
1124222bd601bf484d0f435d0b86cb722f9e6d09 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/1124222bd601bf484d0f435d0b86cb722f9e6d09/main.py
got_error = run_one_test(n, test_list)
if run_one_test(n, test_list): got_error = 1
def run_tests(test_list): "Main routine to run all tests in TEST_LIST." testnum = 0 # Parse commandline arg, list tests or run one test if (len(sys.argv) > 1): if (sys.argv[1] == 'list'): print "Test # Test Description" print "------ ----------------" n = 1 for x in test_list[1:]: print " ", n, " ", x.__doc__ n = n+1 return 0 else: try: testnum = int(sys.argv[1]) return run_one_test(testnum, test_list) except ValueError: print "warning: ignoring bogus argument" # run all the tests. got_error = 0 for n in range(len(test_list)): if n: got_error = run_one_test(n, test_list) return got_error
1124222bd601bf484d0f435d0b86cb722f9e6d09 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/1124222bd601bf484d0f435d0b86cb722f9e6d09/main.py
def run_tests(test_list): "Main routine to run all tests in TEST_LIST." testnum = 0 # Parse commandline arg, list tests or run one test if (len(sys.argv) > 1): if (sys.argv[1] == 'list'): print "Test # Test Description" print "------ ----------------" n = 1 for x in test_list[1:]: print " ", n, " ", x.__doc__ n = n+1 return 0 else: try: testnum = int(sys.argv[1]) return run_one_test(testnum, test_list) except ValueError: print "warning: ignoring bogus argument" # run all the tests. got_error = 0 for n in range(len(test_list)): if n: got_error = run_one_test(n, test_list) return got_error
1124222bd601bf484d0f435d0b86cb722f9e6d09 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/1124222bd601bf484d0f435d0b86cb722f9e6d09/main.py
status_list = svntest.actions.get_virginal_status_list(wc_dir, 1)
status_list = svntest.actions.get_virginal_status_list(wc_dir, '1')
def commit_props(): "commit properties" # Bootstrap sbox = sandbox(commit_props) wc_dir = os.path.join (svntest.main.general_wc_dir, sbox) if svntest.actions.make_repo_and_wc(sbox): return 1 # Add a property to a file and a directory mu_path = os.path.join(wc_dir, 'A', 'mu') H_path = os.path.join(wc_dir, 'A', 'D', 'H') svntest.main.run_svn('propset', 'blue', 'azul', mu_path) svntest.main.run_svn('propset', 'red', 'rojo', H_path) # Create expected output tree. output_list = [ [mu_path, None, {}, {'verb' : 'Changing'}], [ H_path, None, {}, {'verb' : 'Changing'}] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Created expected status tree. status_list = svntest.actions.get_virginal_status_list(wc_dir, 1) for item in status_list: item[3]['repos_rev'] = '2' # post-commit status if (item[0] == mu_path) or (item[0] == H_path): item[3]['wc_rev'] = '2' item[3]['status'] = '__' expected_status_tree = svntest.tree.build_generic_tree(status_list) # Commit the one file. return svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir)
80a5a0aaba9ae02f632bad977b5d83a58bbfe58a /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/80a5a0aaba9ae02f632bad977b5d83a58bbfe58a/prop_tests.py
status_list = svntest.actions.get_virginal_status_list(wc_dir, 1)
status_list = svntest.actions.get_virginal_status_list(wc_dir, '1')
def update_props(): "receive properties via update" # Bootstrap sbox = sandbox(update_props) wc_dir = os.path.join (svntest.main.general_wc_dir, sbox) if svntest.actions.make_repo_and_wc(sbox): return 1 # Make a backup copy of the working copy wc_backup = wc_dir + 'backup' svntest.actions.duplicate_dir(wc_dir, wc_backup) # Add a property to a file and a directory mu_path = os.path.join(wc_dir, 'A', 'mu') H_path = os.path.join(wc_dir, 'A', 'D', 'H') svntest.main.run_svn('propset', 'blue', 'azul', mu_path) svntest.main.run_svn('propset', 'red', 'rojo', H_path) # Create expected output tree. output_list = [ [mu_path, None, {}, {'verb' : 'Changing'}], [ H_path, None, {}, {'verb' : 'Changing'}] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Created expected status tree. status_list = svntest.actions.get_virginal_status_list(wc_dir, 1) for item in status_list: item[3]['repos_rev'] = '2' # post-commit status if (item[0] == mu_path) or (item[0] == H_path): item[3]['wc_rev'] = '2' item[3]['status'] = '__' expected_status_tree = svntest.tree.build_generic_tree(status_list) # Commit the one file. if svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir): return 1 # Create expected output tree for an update of the wc_backup. output_list = [ [os.path.join(wc_backup, mu_path), None, {}, {'status' : '_U'}], [os.path.join(wc_backup, H_path), None, {}, {'status' : '_U'}] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Create expected disk tree for the update. my_greek_tree = svntest.main.copy_greek_tree() my_greek_tree[2][2]['blue'] = 'azul' # A/mu my_greek_tree[16][2]['red'] = 'rojo' # A/D/H expected_disk_tree = svntest.tree.build_generic_tree(my_greek_tree) # Create expected status tree for the update. status_list = svntest.actions.get_virginal_status_list(wc_backup, '2') expected_status_tree = svntest.tree.build_generic_tree(status_list) # Do the update and check the results in three ways. return svntest.actions.run_and_verify_update(wc_backup, expected_output_tree, expected_disk_tree, expected_status_tree)
80a5a0aaba9ae02f632bad977b5d83a58bbfe58a /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/80a5a0aaba9ae02f632bad977b5d83a58bbfe58a/prop_tests.py
def update_props(): "receive properties via update" # Bootstrap sbox = sandbox(update_props) wc_dir = os.path.join (svntest.main.general_wc_dir, sbox) if svntest.actions.make_repo_and_wc(sbox): return 1 # Make a backup copy of the working copy wc_backup = wc_dir + 'backup' svntest.actions.duplicate_dir(wc_dir, wc_backup) # Add a property to a file and a directory mu_path = os.path.join(wc_dir, 'A', 'mu') H_path = os.path.join(wc_dir, 'A', 'D', 'H') svntest.main.run_svn('propset', 'blue', 'azul', mu_path) svntest.main.run_svn('propset', 'red', 'rojo', H_path) # Create expected output tree. output_list = [ [mu_path, None, {}, {'verb' : 'Changing'}], [ H_path, None, {}, {'verb' : 'Changing'}] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Created expected status tree. status_list = svntest.actions.get_virginal_status_list(wc_dir, 1) for item in status_list: item[3]['repos_rev'] = '2' # post-commit status if (item[0] == mu_path) or (item[0] == H_path): item[3]['wc_rev'] = '2' item[3]['status'] = '__' expected_status_tree = svntest.tree.build_generic_tree(status_list) # Commit the one file. if svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir): return 1 # Create expected output tree for an update of the wc_backup. output_list = [ [os.path.join(wc_backup, mu_path), None, {}, {'status' : '_U'}], [os.path.join(wc_backup, H_path), None, {}, {'status' : '_U'}] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Create expected disk tree for the update. my_greek_tree = svntest.main.copy_greek_tree() my_greek_tree[2][2]['blue'] = 'azul' # A/mu my_greek_tree[16][2]['red'] = 'rojo' # A/D/H expected_disk_tree = svntest.tree.build_generic_tree(my_greek_tree) # Create expected status tree for the update. status_list = svntest.actions.get_virginal_status_list(wc_backup, '2') expected_status_tree = svntest.tree.build_generic_tree(status_list) # Do the update and check the results in three ways. return svntest.actions.run_and_verify_update(wc_backup, expected_output_tree, expected_disk_tree, expected_status_tree)
80a5a0aaba9ae02f632bad977b5d83a58bbfe58a /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/80a5a0aaba9ae02f632bad977b5d83a58bbfe58a/prop_tests.py
expected_status_tree)
expected_status_tree, None, None, None, None, 1)
def update_props(): "receive properties via update" # Bootstrap sbox = sandbox(update_props) wc_dir = os.path.join (svntest.main.general_wc_dir, sbox) if svntest.actions.make_repo_and_wc(sbox): return 1 # Make a backup copy of the working copy wc_backup = wc_dir + 'backup' svntest.actions.duplicate_dir(wc_dir, wc_backup) # Add a property to a file and a directory mu_path = os.path.join(wc_dir, 'A', 'mu') H_path = os.path.join(wc_dir, 'A', 'D', 'H') svntest.main.run_svn('propset', 'blue', 'azul', mu_path) svntest.main.run_svn('propset', 'red', 'rojo', H_path) # Create expected output tree. output_list = [ [mu_path, None, {}, {'verb' : 'Changing'}], [ H_path, None, {}, {'verb' : 'Changing'}] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Created expected status tree. status_list = svntest.actions.get_virginal_status_list(wc_dir, 1) for item in status_list: item[3]['repos_rev'] = '2' # post-commit status if (item[0] == mu_path) or (item[0] == H_path): item[3]['wc_rev'] = '2' item[3]['status'] = '__' expected_status_tree = svntest.tree.build_generic_tree(status_list) # Commit the one file. if svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir): return 1 # Create expected output tree for an update of the wc_backup. output_list = [ [os.path.join(wc_backup, mu_path), None, {}, {'status' : '_U'}], [os.path.join(wc_backup, H_path), None, {}, {'status' : '_U'}] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Create expected disk tree for the update. my_greek_tree = svntest.main.copy_greek_tree() my_greek_tree[2][2]['blue'] = 'azul' # A/mu my_greek_tree[16][2]['red'] = 'rojo' # A/D/H expected_disk_tree = svntest.tree.build_generic_tree(my_greek_tree) # Create expected status tree for the update. status_list = svntest.actions.get_virginal_status_list(wc_backup, '2') expected_status_tree = svntest.tree.build_generic_tree(status_list) # Do the update and check the results in three ways. return svntest.actions.run_and_verify_update(wc_backup, expected_output_tree, expected_disk_tree, expected_status_tree)
80a5a0aaba9ae02f632bad977b5d83a58bbfe58a /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/80a5a0aaba9ae02f632bad977b5d83a58bbfe58a/prop_tests.py
if target_ob.install == 'test' and bldtype == 'exe' \ and parser.get(target, 'testing') != 'skip': test_progs.append(tpath)
if target_ob.install == 'test' and bldtype == 'exe': test_deps.append(tpath) if parser.get(target, 'testing') != 'skip': test_progs.append(tpath)
def main(fname, oname=None, skip_depends=0): parser = ConfigParser.ConfigParser(_cfg_defaults) parser.read(fname) if oname is None: oname = os.path.splitext(os.path.basename(fname))[0] + '-outputs.mk' ofile = open(oname, 'w') ofile.write('# DO NOT EDIT -- AUTOMATICALLY GENERATED\n\n') errors = 0 targets = { } groups = { } # group name -> targets install = { } # install area name -> targets test_progs = [ ] file_deps = [ ] target_dirs = { } target_names = _filter_targets(parser.sections()) # PASS 1: collect the targets and some basic info for target in target_names: try: target_ob = Target(target, parser.get(target, 'path'), parser.get(target, 'install'), parser.get(target, 'type')) except GenMakeError, e: print e errors = 1 continue targets[target] = target_ob group = parser.get(target, 'group') if groups.has_key(group): groups[group].append(target_ob.output) else: groups[group] = [ target_ob.output ] itype = target_ob.install if install.has_key(itype): install[itype].append(target_ob) else: install[itype] = [ target_ob ] target_dirs[target_ob.path] = None if errors: sys.exit(1) # PASS 2: generate the outputs for target in target_names: target_ob = targets[target] path = target_ob.path bldtype = target_ob.type objext = target_ob.objext tpath = target_ob.output tfile = os.path.basename(tpath) if target_ob.install == 'test' and bldtype == 'exe' \ and parser.get(target, 'testing') != 'skip': test_progs.append(tpath) pats = parser.get(target, 'sources') if not pats: pats = _default_sources[bldtype] sources, s_errors = _collect_paths(pats, path) errors = errors or s_errors objects = [ ] for src in sources: if src[-2:] == '.c': objname = src[:-2] + objext objects.append(objname) file_deps.append((src, objname)) else: print 'ERROR: unknown file extension on', src errors = 1 retreat = _retreat_dots(path) libs = [ ] deps = [ ] for lib in string.split(parser.get(target, 'libs')): if lib in target_names: tlib = targets[lib] target_ob.deps.append(tlib) deps.append(tlib.output) dep_path = tlib.path if bldtype == 'lib': # we need to hack around a libtool problem: it cannot record a # dependency of one shared lib on another shared lib. ### fix this by upgrading to the new libtool 1.4 release... # strip "lib" from the front so we have -lsvn_foo if lib[:3] == 'lib': lib = lib[3:] libs.append('-L%s -l%s' % (retreat + os.path.join(dep_path, '.libs'), lib)) else: # linking executables can refer to .la files libs.append(retreat + os.path.join(dep_path, lib + '.la')) else: # something we don't know, so just include it directly libs.append(lib) targ_varname = string.replace(target, '-', '_') ldflags = parser.get(target, 'link-flags') objnames = string.join(map(os.path.basename, objects)) ofile.write('%s_DEPS = %s %s\n' '%s_OBJECTS = %s\n' '%s: $(%s_DEPS)\n' '\tcd %s && $(LINK) -o %s %s $(%s_OBJECTS) %s $(LIBS)\n\n' % (targ_varname, string.join(objects), string.join(deps), targ_varname, objnames, tpath, targ_varname, path, tfile, ldflags, targ_varname, string.join(libs))) custom = parser.get(target, 'custom') if custom == 'apache-mod': # special build, needing Apache includes ofile.write('# build these special -- use APACHE_INCLUDES\n') for src in sources: if src[-2:] == '.c': ofile.write('%s%s: %s\n\t$(COMPILE_APACHE_MOD)\n' % (src[:-2], objext, src)) ofile.write('\n') for g_name, g_targets in groups.items(): ofile.write('%s: %s\n\n' % (g_name, string.join(g_targets))) ofile.write('CLEAN_DIRS = %s\n' % string.join(target_dirs.keys())) cfiles = [ ] for target in targets.values(): # .la files are handled by the standard 'clean' rule; clean all the # other targets if target.output[-3:] != '.la': cfiles.append(target.output) ofile.write('CLEAN_FILES = %s\n\n' % string.join(cfiles)) for area, inst_targets in install.items(): files = [ ] for t in inst_targets: files.append(t.output) if area == 'apache-mod': ofile.write('install-mods-shared: %s\n' % (string.join(files),)) la_tweaked = { } for file in files: base, ext = os.path.splitext(os.path.basename(file)) name = string.replace(base, 'libmod_', '') ofile.write('\t$(INSTALL_MOD_SHARED) -n %s %s\n' % (name, file)) if ext == '.la': la_tweaked[file + '-a'] = None for t in inst_targets: for dep in t.deps: bt = dep.output if bt[-3:] == '.la': la_tweaked[bt + '-a'] = None la_tweaked = la_tweaked.keys() s_files, s_errors = _collect_paths(parser.get('static-apache', 'paths')) errors = errors or s_errors ofile.write('\ninstall-mods-static: %s\n' '\t$(mkinstalldirs) %s\n' % (string.join(la_tweaked + s_files), os.path.join('$(APACHE_TARGET)', '.libs'))) for file in la_tweaked: dirname, fname = os.path.split(file) base = os.path.splitext(fname)[0] ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' '\t$(INSTALL_MOD_STATIC) %s %s\n' % (os.path.join(dirname, '.libs', base + '.a'), os.path.join('$(APACHE_TARGET)', '.libs', base + '.a'), file, os.path.join('$(APACHE_TARGET)', base + '.la'))) for file in s_files: ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' % (file, os.path.join('$(APACHE_TARGET)', os.path.basename(file)))) ofile.write('\n') elif area != 'test': ofile.write('install-%s: %s\n' '\t$(mkinstalldirs) $(%sdir)\n' % (area, string.join(files), area)) for file in files: ofile.write('\t$(INSTALL_%s) %s %s\n' % (string.upper(area), file, os.path.join('$(%sdir)' % area, os.path.basename(file)))) ofile.write('\n') includes, i_errors = _collect_paths(parser.get('includes', 'paths')) errors = errors or i_errors ofile.write('install-include: %s\n' '\t$(mkinstalldirs) $(includedir)\n' % (string.join(includes),)) for file in includes: ofile.write('\t$(INSTALL_INCLUDE) %s %s\n' % (file, os.path.join('$(includedir)', os.path.basename(file)))) ofile.write('\n# handy shortcut targets\n') for name, target in targets.items(): ofile.write('%s: %s\n' % (name, target.output)) ofile.write('\n') scripts, s_errors = _collect_paths(parser.get('test-scripts', 'paths')) errors = errors or s_errors ofile.write('TEST_PROGRAMS = %s\n\n' % string.join(test_progs + scripts)) if not skip_depends: # # Find all the available headers and what they depend upon. the # include_deps is a dictionary mapping a short header name to a tuple # of the full path to the header and a dictionary of dependent header # names (short) mapping to None. # # Example: # { 'short.h' : ('/path/to/short.h', # { 'other.h' : None, 'foo.h' : None }) } # # Note that this structure does not allow for similarly named headers # in per-project directories. SVN doesn't have this at this time, so # this structure works quite fine. (the alternative would be to use # the full pathname for the key, but that is actually a bit harder to # work with since we only see short names when scanning, and keeping # a second variable around for mapping the short to long names is more # than I cared to do right now) # include_deps = _create_include_deps(includes) for d in target_dirs.keys(): hdrs = glob.glob(os.path.join(d, '*.h')) if hdrs: more_deps = _create_include_deps(hdrs, include_deps) include_deps.update(more_deps) for src, objname in file_deps: hdrs = [ ] for short in _find_includes(src, include_deps): hdrs.append(include_deps[short][0]) ofile.write('%s: %s %s\n' % (objname, src, string.join(hdrs))) if errors: sys.exit(1)
8cf81efca561548d0dfc08af0bf5a703470a0c0a /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/8cf81efca561548d0dfc08af0bf5a703470a0c0a/gen-make.py
diff_pure_repository_update_a_file ]
diff_pure_repository_update_a_file, diff_only_property_change ]
def diff_pure_repository_update_a_file(sbox): "pure repository diff update a file" if sbox.build(): return 1 wc_dir = sbox.wc_dir was_cwd = os.getcwd() os.chdir(wc_dir) # rev 2 update_a_file() svntest.main.run_svn(None, 'ci', '-m', '"log msg"') # rev 3 add_a_file_in_a_subdir() svntest.main.run_svn(None, 'ci', '-m', '"log msg"') # rev 4 add_a_file() svntest.main.run_svn(None, 'ci', '-m', '"log msg"') # rev 5 update_added_file() svntest.main.run_svn(None, 'ci', '-m', '"log msg"') svntest.main.run_svn(None, 'up', '-r2') os.chdir(was_cwd) url = svntest.main.test_area_url + '/' + svntest.main.current_repo_dir diff_output, err_output = svntest.main.run_svn(None, 'diff', '-r1:2', url) if check_update_a_file(diff_output): return 1 os.chdir(wc_dir) diff_output, err_output = svntest.main.run_svn(None, 'diff', '-r1:2') os.chdir(was_cwd) if check_update_a_file(diff_output): return 1 diff_output, err_output = svntest.main.run_svn(None, 'diff', '-r2:3', url) if check_add_a_file_in_a_subdir(diff_output): return 1 os.chdir(wc_dir) diff_output, err_output = svntest.main.run_svn(None, 'diff', '-r2:3') os.chdir(was_cwd) if check_add_a_file_in_a_subdir(diff_output): return 1 diff_output, err_output = svntest.main.run_svn(None, 'diff', '-r4:5', url) if check_update_added_file(diff_output): return 1 os.chdir(wc_dir) diff_output, err_output = svntest.main.run_svn(None, 'diff', '-r4:5') os.chdir(was_cwd) if check_update_added_file(diff_output): return 1 os.chdir(wc_dir) diff_output, err_output = svntest.main.run_svn(None, 'diff', '-rh') os.chdir(was_cwd) if check_add_a_file_in_a_subdir_reverse(diff_output): return 1 return 0
30c9d4efca9475ef89f063bcd70b8c1a4e99e28f /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/30c9d4efca9475ef89f063bcd70b8c1a4e99e28f/diff_tests.py
update_binary_file
update_binary_file, update_binary_file_2
def update_binary_file_2(): "update to an old revision of a binary files" sbox = sandbox(update_binary_file_2) wc_dir = os.path.join (svntest.main.general_wc_dir, sbox) if svntest.actions.make_repo_and_wc(sbox): return 1 # Suck up contents of a test .png file. fp = open("theta.png") theta_contents = fp.read() fp.close() # 102400 is svn_txdelta_window_size. We're going to make sure we # have at least 102401 bytes of data in our second binary file (for # no reason other than we have had problems in the past with getting # svndiff data out of the repository for files > 102400 bytes). # How? Well, we'll just keep doubling the binary contents of the # original theta.png until we're big enough. zeta_contents = theta_contents while(len(zeta_contents) < 102401): zeta_contents = zeta_contents + zeta_contents # Write our two files' contents out to disk, in A/theta and A/zeta. theta_path = os.path.join(wc_dir, 'A', 'theta') fp = open(theta_path, 'w') fp.write(theta_contents) fp.close() zeta_path = os.path.join(wc_dir, 'A', 'zeta') fp = open(zeta_path, 'w') fp.write(zeta_contents) fp.close() # Now, `svn add' those two files. svntest.main.run_svn(None, 'add', theta_path, zeta_path) # Created expected output tree for 'svn ci' output_list = [ [theta_path, None, {}, {'verb' : 'Adding' }], [zeta_path, None, {}, {'verb' : 'Adding' }] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Create expected status tree status_list = svntest.actions.get_virginal_status_list(wc_dir, '2') for item in status_list: item[3]['wc_rev'] = '1' status_list.append([theta_path, None, {}, {'status' : '__', 'locked' : ' ', 'wc_rev' : '2', 'repos_rev' : '2'}]) status_list.append([zeta_path, None, {}, {'status' : '__', 'locked' : ' ', 'wc_rev' : '2', 'repos_rev' : '2'}]) expected_status_tree = svntest.tree.build_generic_tree(status_list) # Commit the new binary filea, creating revision 2. if svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir): return 1 # Make some mods to the binary files. svntest.main.file_append (theta_path, "foobar") new_theta_contents = theta_contents + "foobar" svntest.main.file_append (zeta_path, "foobar") new_zeta_contents = zeta_contents + "foobar" # Created expected output tree for 'svn ci' output_list = [ [theta_path, None, {}, {'verb' : 'Sending' }], [zeta_path, None, {}, {'verb' : 'Sending' }] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Create expected status tree status_list = svntest.actions.get_virginal_status_list(wc_dir, '3') for item in status_list: item[3]['wc_rev'] = '1' status_list.append([theta_path, None, {}, {'status' : '__', 'locked' : ' ', 'wc_rev' : '3', 'repos_rev' : '3'}]) status_list.append([zeta_path, None, {}, {'status' : '__', 'locked' : ' ', 'wc_rev' : '3', 'repos_rev' : '3'}]) expected_status_tree = svntest.tree.build_generic_tree(status_list) # Commit original working copy again, creating revision 3. if svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir): return 1 # Create expected output tree for an update of wc_backup. output_list = [ [theta_path, None, {}, {'status' : 'U '}], [zeta_path, None, {}, {'status' : 'U '}] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Create expected disk tree for the update -- # look! binary contents, and a binary property! my_greek_tree = svntest.main.copy_greek_tree() my_greek_tree.append(['A/theta', theta_contents, {'svn:mime-type' : 'application/octet-stream'}, {}]) my_greek_tree.append(['A/zeta', zeta_contents, {'svn:mime-type' : 'application/octet-stream'}, {}]) expected_disk_tree = svntest.tree.build_generic_tree(my_greek_tree) # Create expected status tree for the update. status_list = svntest.actions.get_virginal_status_list(wc_dir, '3') for item in status_list: item[3]['wc_rev'] = '2' status_list.append([theta_path, None, {}, {'status' : '__', 'locked' : ' ', 'wc_rev' : '2', 'repos_rev' : '3'}]) status_list.append([zeta_path, None, {}, {'status' : '__', 'locked' : ' ', 'wc_rev' : '2', 'repos_rev' : '3'}]) expected_status_tree = svntest.tree.build_generic_tree(status_list) # Do an update from revision 2 and make sure that our binary file # gets reverted to its original contents. return svntest.actions.run_and_verify_update(wc_dir, expected_output_tree, expected_disk_tree, expected_status_tree, None, None, None, None, 1, '-r', '2')
c116938c1d32cb26892bf5c256db86cab916d741 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/c116938c1d32cb26892bf5c256db86cab916d741/basic_tests.py
return check_update_a_file(diff_output)
if check_update_a_file(diff_output): return 1 os.chdir(wc_dir) diff_output, err_output = svntest.main.run_svn(None, 'diff', '-r1:2') os.chdir(was_cwd) if check_update_a_file(diff_output): return 1 diff_output, err_output = svntest.main.run_svn(None, 'diff', '-r2:3', url) if check_add_a_file_in_a_subdir(diff_output): return 1 os.chdir(wc_dir) diff_output, err_output = svntest.main.run_svn(None, 'diff', '-r2:3') os.chdir(was_cwd) if check_add_a_file_in_a_subdir(diff_output): return 1 diff_output, err_output = svntest.main.run_svn(None, 'diff', '-r4:5', url) if check_update_added_file(diff_output): return 1 os.chdir(wc_dir) diff_output, err_output = svntest.main.run_svn(None, 'diff', '-r4:5') os.chdir(was_cwd) if check_update_added_file(diff_output): return 1 os.chdir(wc_dir) diff_output, err_output = svntest.main.run_svn(None, 'diff', '-rh') os.chdir(was_cwd) if check_add_a_file_in_a_subdir_reverse(diff_output): return 1 return 0
def diff_pure_repository_update_a_file(sbox): "pure repository diff update a file" if sbox.build(): return 1 wc_dir = sbox.wc_dir was_cwd = os.getcwd() os.chdir(wc_dir) update_a_file() svntest.main.run_svn(None, 'ci') os.chdir(was_cwd) url = svntest.main.test_area_url + '/' + svntest.main.current_repo_dir diff_output, err_output = svntest.main.run_svn(None, 'diff', '-r1:2', url) return check_update_a_file(diff_output)
217cbb97451db931a32604ca4fd4e18e6a267887 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/217cbb97451db931a32604ca4fd4e18e6a267887/diff_tests.py
rm = re.compile ('^(..)(.)(.+)(\d+)\s+(.+)')
rm = re.compile ('^(..)(.)([^0-9]+)(\d+|-)\s+(.+)')
def build_tree_from_status(lines): "Return a tree derived by parsing the output LINES from 'st'." root = SVNTreeNode(root_node_name) rm = re.compile ('^.+\:.+(\d+)') lastline = string.strip(lines.pop()) match = rm.search(lastline) if match and match.groups(): repos_rev = match.group(1) else: repos_rev = '?' rm = re.compile ('^(..)(.)(.+)(\d+)\s+(.+)') for line in lines: match = rm.search(line) if match and match.groups(): new_branch = create_from_path(match.group(5), None, {}, {'status' : match.group(1), 'locked' : match.group(2), 'wc_rev' : match.group(4), 'repos_rev' : repos_rev}) root.add_child(new_branch) return root
fad4e4f01ef392f09f2a345d9252f198035a7938 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/fad4e4f01ef392f09f2a345d9252f198035a7938/tree.py
new_branch = create_from_path(match.group(5), None, {}, {'status' : match.group(1), 'locked' : match.group(2), 'wc_rev' : match.group(4), 'repos_rev' : repos_rev})
if match.group(4) != '-': new_branch = create_from_path(match.group(5), None, {}, {'status' : match.group(1), 'locked' : match.group(2), 'wc_rev' : match.group(4), 'repos_rev' : repos_rev})
def build_tree_from_status(lines): "Return a tree derived by parsing the output LINES from 'st'." root = SVNTreeNode(root_node_name) rm = re.compile ('^.+\:.+(\d+)') lastline = string.strip(lines.pop()) match = rm.search(lastline) if match and match.groups(): repos_rev = match.group(1) else: repos_rev = '?' rm = re.compile ('^(..)(.)(.+)(\d+)\s+(.+)') for line in lines: match = rm.search(line) if match and match.groups(): new_branch = create_from_path(match.group(5), None, {}, {'status' : match.group(1), 'locked' : match.group(2), 'wc_rev' : match.group(4), 'repos_rev' : repos_rev}) root.add_child(new_branch) return root
fad4e4f01ef392f09f2a345d9252f198035a7938 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/fad4e4f01ef392f09f2a345d9252f198035a7938/tree.py
output_list = [ [os.path.join(wc_backup, mu_path),
output_list = [ [mu_path,
def update_props(): "receive properties via update" # Bootstrap sbox = sandbox(update_props) wc_dir = os.path.join (svntest.main.general_wc_dir, sbox) if svntest.actions.make_repo_and_wc(sbox): return 1 # Make a backup copy of the working copy wc_backup = wc_dir + 'backup' svntest.actions.duplicate_dir(wc_dir, wc_backup) # Add a property to a file and a directory mu_path = os.path.join(wc_dir, 'A', 'mu') H_path = os.path.join(wc_dir, 'A', 'D', 'H') svntest.main.run_svn('propset', 'blue', 'azul', mu_path) svntest.main.run_svn('propset', 'red', 'rojo', H_path) # Create expected output tree. output_list = [ [mu_path, None, {}, {'verb' : 'Changing'}], [ H_path, None, {}, {'verb' : 'Changing'}] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Created expected status tree. status_list = svntest.actions.get_virginal_status_list(wc_dir, '1') for item in status_list: item[3]['repos_rev'] = '2' # post-commit status if (item[0] == mu_path) or (item[0] == H_path): item[3]['wc_rev'] = '2' item[3]['status'] = '__' expected_status_tree = svntest.tree.build_generic_tree(status_list) # Commit the one file. if svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir): return 1 # Create expected output tree for an update of the wc_backup. output_list = [ [os.path.join(wc_backup, mu_path), None, {}, {'status' : '_U'}], [os.path.join(wc_backup, H_path), None, {}, {'status' : '_U'}] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Create expected disk tree for the update. my_greek_tree = svntest.main.copy_greek_tree() my_greek_tree[2][2]['blue'] = 'azul' # A/mu my_greek_tree[16][2]['red'] = 'rojo' # A/D/H expected_disk_tree = svntest.tree.build_generic_tree(my_greek_tree) # Create expected status tree for the update. status_list = svntest.actions.get_virginal_status_list(wc_backup, '2') for item in status_list: if (item[0] == mu_path) or (item[0] == H_path): item[3]['status'] = '__' expected_status_tree = svntest.tree.build_generic_tree(status_list) # Do the update and check the results in three ways... INCLUDING PROPS return svntest.actions.run_and_verify_update(wc_backup, expected_output_tree, expected_disk_tree, expected_status_tree, None, None, None, None, 1)
75724d356d00b24134b0a93f754ce95943cfe5cc /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/75724d356d00b24134b0a93f754ce95943cfe5cc/prop_tests.py
[os.path.join(wc_backup, H_path),
[H_path,
def update_props(): "receive properties via update" # Bootstrap sbox = sandbox(update_props) wc_dir = os.path.join (svntest.main.general_wc_dir, sbox) if svntest.actions.make_repo_and_wc(sbox): return 1 # Make a backup copy of the working copy wc_backup = wc_dir + 'backup' svntest.actions.duplicate_dir(wc_dir, wc_backup) # Add a property to a file and a directory mu_path = os.path.join(wc_dir, 'A', 'mu') H_path = os.path.join(wc_dir, 'A', 'D', 'H') svntest.main.run_svn('propset', 'blue', 'azul', mu_path) svntest.main.run_svn('propset', 'red', 'rojo', H_path) # Create expected output tree. output_list = [ [mu_path, None, {}, {'verb' : 'Changing'}], [ H_path, None, {}, {'verb' : 'Changing'}] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Created expected status tree. status_list = svntest.actions.get_virginal_status_list(wc_dir, '1') for item in status_list: item[3]['repos_rev'] = '2' # post-commit status if (item[0] == mu_path) or (item[0] == H_path): item[3]['wc_rev'] = '2' item[3]['status'] = '__' expected_status_tree = svntest.tree.build_generic_tree(status_list) # Commit the one file. if svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir): return 1 # Create expected output tree for an update of the wc_backup. output_list = [ [os.path.join(wc_backup, mu_path), None, {}, {'status' : '_U'}], [os.path.join(wc_backup, H_path), None, {}, {'status' : '_U'}] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Create expected disk tree for the update. my_greek_tree = svntest.main.copy_greek_tree() my_greek_tree[2][2]['blue'] = 'azul' # A/mu my_greek_tree[16][2]['red'] = 'rojo' # A/D/H expected_disk_tree = svntest.tree.build_generic_tree(my_greek_tree) # Create expected status tree for the update. status_list = svntest.actions.get_virginal_status_list(wc_backup, '2') for item in status_list: if (item[0] == mu_path) or (item[0] == H_path): item[3]['status'] = '__' expected_status_tree = svntest.tree.build_generic_tree(status_list) # Do the update and check the results in three ways... INCLUDING PROPS return svntest.actions.run_and_verify_update(wc_backup, expected_output_tree, expected_disk_tree, expected_status_tree, None, None, None, None, 1)
75724d356d00b24134b0a93f754ce95943cfe5cc /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/75724d356d00b24134b0a93f754ce95943cfe5cc/prop_tests.py
def update_props(): "receive properties via update" # Bootstrap sbox = sandbox(update_props) wc_dir = os.path.join (svntest.main.general_wc_dir, sbox) if svntest.actions.make_repo_and_wc(sbox): return 1 # Make a backup copy of the working copy wc_backup = wc_dir + 'backup' svntest.actions.duplicate_dir(wc_dir, wc_backup) # Add a property to a file and a directory mu_path = os.path.join(wc_dir, 'A', 'mu') H_path = os.path.join(wc_dir, 'A', 'D', 'H') svntest.main.run_svn('propset', 'blue', 'azul', mu_path) svntest.main.run_svn('propset', 'red', 'rojo', H_path) # Create expected output tree. output_list = [ [mu_path, None, {}, {'verb' : 'Changing'}], [ H_path, None, {}, {'verb' : 'Changing'}] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Created expected status tree. status_list = svntest.actions.get_virginal_status_list(wc_dir, '1') for item in status_list: item[3]['repos_rev'] = '2' # post-commit status if (item[0] == mu_path) or (item[0] == H_path): item[3]['wc_rev'] = '2' item[3]['status'] = '__' expected_status_tree = svntest.tree.build_generic_tree(status_list) # Commit the one file. if svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir): return 1 # Create expected output tree for an update of the wc_backup. output_list = [ [os.path.join(wc_backup, mu_path), None, {}, {'status' : '_U'}], [os.path.join(wc_backup, H_path), None, {}, {'status' : '_U'}] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Create expected disk tree for the update. my_greek_tree = svntest.main.copy_greek_tree() my_greek_tree[2][2]['blue'] = 'azul' # A/mu my_greek_tree[16][2]['red'] = 'rojo' # A/D/H expected_disk_tree = svntest.tree.build_generic_tree(my_greek_tree) # Create expected status tree for the update. status_list = svntest.actions.get_virginal_status_list(wc_backup, '2') for item in status_list: if (item[0] == mu_path) or (item[0] == H_path): item[3]['status'] = '__' expected_status_tree = svntest.tree.build_generic_tree(status_list) # Do the update and check the results in three ways... INCLUDING PROPS return svntest.actions.run_and_verify_update(wc_backup, expected_output_tree, expected_disk_tree, expected_status_tree, None, None, None, None, 1)
75724d356d00b24134b0a93f754ce95943cfe5cc /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/75724d356d00b24134b0a93f754ce95943cfe5cc/prop_tests.py
def update_props(): "receive properties via update" # Bootstrap sbox = sandbox(update_props) wc_dir = os.path.join (svntest.main.general_wc_dir, sbox) if svntest.actions.make_repo_and_wc(sbox): return 1 # Make a backup copy of the working copy wc_backup = wc_dir + 'backup' svntest.actions.duplicate_dir(wc_dir, wc_backup) # Add a property to a file and a directory mu_path = os.path.join(wc_dir, 'A', 'mu') H_path = os.path.join(wc_dir, 'A', 'D', 'H') svntest.main.run_svn('propset', 'blue', 'azul', mu_path) svntest.main.run_svn('propset', 'red', 'rojo', H_path) # Create expected output tree. output_list = [ [mu_path, None, {}, {'verb' : 'Changing'}], [ H_path, None, {}, {'verb' : 'Changing'}] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Created expected status tree. status_list = svntest.actions.get_virginal_status_list(wc_dir, '1') for item in status_list: item[3]['repos_rev'] = '2' # post-commit status if (item[0] == mu_path) or (item[0] == H_path): item[3]['wc_rev'] = '2' item[3]['status'] = '__' expected_status_tree = svntest.tree.build_generic_tree(status_list) # Commit the one file. if svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir): return 1 # Create expected output tree for an update of the wc_backup. output_list = [ [os.path.join(wc_backup, mu_path), None, {}, {'status' : '_U'}], [os.path.join(wc_backup, H_path), None, {}, {'status' : '_U'}] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Create expected disk tree for the update. my_greek_tree = svntest.main.copy_greek_tree() my_greek_tree[2][2]['blue'] = 'azul' # A/mu my_greek_tree[16][2]['red'] = 'rojo' # A/D/H expected_disk_tree = svntest.tree.build_generic_tree(my_greek_tree) # Create expected status tree for the update. status_list = svntest.actions.get_virginal_status_list(wc_backup, '2') for item in status_list: if (item[0] == mu_path) or (item[0] == H_path): item[3]['status'] = '__' expected_status_tree = svntest.tree.build_generic_tree(status_list) # Do the update and check the results in three ways... INCLUDING PROPS return svntest.actions.run_and_verify_update(wc_backup, expected_output_tree, expected_disk_tree, expected_status_tree, None, None, None, None, 1)
75724d356d00b24134b0a93f754ce95943cfe5cc /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/75724d356d00b24134b0a93f754ce95943cfe5cc/prop_tests.py
"unversioned",
"Can't find an entry",
def commit_unversioned_thing(sbox): "committing unversioned object produces error" if sbox.build(): return 1 wc_dir = sbox.wc_dir # Create an unversioned file in the wc. svntest.main.file_append(os.path.join(wc_dir, 'blorg'), "nothing to see") # Commit a non-existent file and *expect* failure: return svntest.actions.run_and_verify_commit (wc_dir, None, None, "unversioned", None, None, None, None, os.path.join(wc_dir,'blorg'))
fd15119aa5e39eda867e6823933be1956666e0f8 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/6036/fd15119aa5e39eda867e6823933be1956666e0f8/commit_tests.py
def make_vardict(_group): vars = {} _cur = libweb100.web100_var_head(_group) while _cur != None: var = Web100Var(_cur, _group) vars[str(var)] = var _cur = libweb100.web100_var_next(_cur) return vars
def make_vardict(_group): vars = {} _cur = libweb100.web100_var_head(_group) while _cur != None: var = Web100Var(_cur, _group) vars[str(var)] = var _cur = libweb100.web100_var_next(_cur) return vars
b9ad2de15b9dcb95206f40d2cc79af53da65c426 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/10080/b9ad2de15b9dcb95206f40d2cc79af53da65c426/Web100.py
self.write_vars = make_vardict(self._tune_group) self.read_vars = make_vardict(self._read_group) for (name, var) in self.write_vars.items(): try: self.read_vars[name] except: self.read_vars[name] = var
_cur = libweb100.web100_var_head(self._read_group) while _cur != None: var = Web100Var(_cur, self._read_group) self.read_vars[str(var)] = var _cur = libweb100.web100_var_next(_cur)
def __init__(self, host=None): if (host != None): raise error("Remote agents not supported.") _agent = libweb100.web100_attach(libweb100.WEB100_AGENT_TYPE_LOCAL, None) if _agent == None: libweb100_err() self._agent = _agent self._tune_group = libweb100.web100_group_find(_agent, "tune") if self._tune_group == None: libweb100_err() self._read_group = libweb100.web100_group_find(_agent, "read") if self._read_group == None: libweb100_err() self.write_vars = make_vardict(self._tune_group) self.read_vars = make_vardict(self._read_group) for (name, var) in self.write_vars.items(): try: self.read_vars[name] except: self.read_vars[name] = var self.bufp = libweb100.new_bufp()
b9ad2de15b9dcb95206f40d2cc79af53da65c426 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/10080/b9ad2de15b9dcb95206f40d2cc79af53da65c426/Web100.py
try: libweb100.delete_bufp(self.bufp) except: pass
libweb100.delete_bufp(self.bufp)
def __del__(self): try: libweb100.delete_bufp(self.bufp) except: pass
b9ad2de15b9dcb95206f40d2cc79af53da65c426 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/10080/b9ad2de15b9dcb95206f40d2cc79af53da65c426/Web100.py
var.valtobuf(val, self.agent.bufp) if libweb100.web100_raw_write(var._var, self._connection, self.agent.bufp) != \
buf = var.valtobuf(val, self.agent.bufp) if libweb100.web100_raw_write(var._var, self._connection, buf) != \
def write(self, name, val): """Write a value to a single variable.""" try: var = self.agent.write_vars[name] except KeyError: raise error("No writable variable '%s' found."%name) var.valtobuf(val, self.agent.bufp) if libweb100.web100_raw_write(var._var, self._connection, self.agent.bufp) != \ libweb100.WEB100_ERR_SUCCESS: libweb100_err()
b9ad2de15b9dcb95206f40d2cc79af53da65c426 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/10080/b9ad2de15b9dcb95206f40d2cc79af53da65c426/Web100.py
class Web100ReadLog: def __init__(self, logname): self._log = libweb100.web100_log_open_read(logname) if self._log == None: libweb100_err() self._snap = libweb100.web100_snapshot_alloc_from_log(self._log) if self._snap == None: libweb100_err() self.vars = make_vardict(libweb100.web100_get_log_group(self._log)) self.bufp = libweb100.new_bufp() def __del__(self): libweb100.delete_bufp(self.bufp) def read(self): if libweb100.web100_snap_from_log(self._snap, self._log) != \ libweb100.WEB100_ERR_SUCCESS: return None snap = {} for (name, var) in self.vars.items(): if libweb100.web100_snap_read(var._var, self._snap, self.bufp) != \ libweb100.WEB100_ERR_SUCCESS: libweb100_err() snap[name] = var.val(self.bufp) return snap class Web100WriteLog: def __init__(self, logname, conn, _snap): self.conn = conn self._snap = _snap self._log = libweb100.web100_log_open_write(logname, conn._connection, libweb100.web100_get_snap_group(_snap)) if self._log == None: libweb100_err() def write(self): if libweb100.web100_log_write(self._log, self._snap) != \ libweb100.WEB100_ERR_SUCCESS: libweb100_err()
def write(self, name, val): """Write a value to a single variable.""" try: var = self.agent.write_vars[name] except KeyError: raise error("No writable variable '%s' found."%name) var.valtobuf(val, self.agent.bufp) if libweb100.web100_raw_write(var._var, self._connection, self.agent.bufp) != \ libweb100.WEB100_ERR_SUCCESS: libweb100_err()
b9ad2de15b9dcb95206f40d2cc79af53da65c426 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/10080/b9ad2de15b9dcb95206f40d2cc79af53da65c426/Web100.py
elif self._type == libweb100.WEB100_TYPE_INET_ADDRESS_IPV4 or \ self._type == libweb100.WEB100_TYPE_IP_ADDRESS or \ self._type == libweb100.WEB100_TYPE_INET_ADDRESS_IPV6 or \ self._type == libweb100.WEB100_TYPE_INET_ADDRESS:
else:
def val(self, bufp): if self._type == libweb100.WEB100_TYPE_INET_PORT_NUMBER or\ self._type == libweb100.WEB100_TYPE_UNSIGNED16: return libweb100.u16p_value(libweb100.bufp_to_u16p(bufp)) elif self._type == libweb100.WEB100_TYPE_INTEGER or \ self._type == libweb100.WEB100_TYPE_INTEGER32: return libweb100.s32p_value(libweb100.bufp_to_s32p(bufp)) elif self._type == libweb100.WEB100_TYPE_COUNTER32 or \ self._type == libweb100.WEB100_TYPE_GAUGE32 or \ self._type == libweb100.WEB100_TYPE_UNSIGNED32 or \ self._type == libweb100.WEB100_TYPE_TIME_TICKS: return libweb100.u32p_value(libweb100.bufp_to_u32p(bufp)) elif self._type == libweb100.WEB100_TYPE_COUNTER64: return libweb100.u64p_value(libweb100.bufp_to_u64p(bufp)) elif self._type == libweb100.WEB100_TYPE_INET_ADDRESS_IPV4 or \ self._type == libweb100.WEB100_TYPE_IP_ADDRESS or \ self._type == libweb100.WEB100_TYPE_INET_ADDRESS_IPV6 or \ self._type == libweb100.WEB100_TYPE_INET_ADDRESS: return libweb100.web100_value_to_text(self._type, bufp) else: raise error("Unknown Web100 type: %d"%self._type)
b9ad2de15b9dcb95206f40d2cc79af53da65c426 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/10080/b9ad2de15b9dcb95206f40d2cc79af53da65c426/Web100.py
else: raise error("Unknown Web100 type: %d"%self._type)
def val(self, bufp): if self._type == libweb100.WEB100_TYPE_INET_PORT_NUMBER or\ self._type == libweb100.WEB100_TYPE_UNSIGNED16: return libweb100.u16p_value(libweb100.bufp_to_u16p(bufp)) elif self._type == libweb100.WEB100_TYPE_INTEGER or \ self._type == libweb100.WEB100_TYPE_INTEGER32: return libweb100.s32p_value(libweb100.bufp_to_s32p(bufp)) elif self._type == libweb100.WEB100_TYPE_COUNTER32 or \ self._type == libweb100.WEB100_TYPE_GAUGE32 or \ self._type == libweb100.WEB100_TYPE_UNSIGNED32 or \ self._type == libweb100.WEB100_TYPE_TIME_TICKS: return libweb100.u32p_value(libweb100.bufp_to_u32p(bufp)) elif self._type == libweb100.WEB100_TYPE_COUNTER64: return libweb100.u64p_value(libweb100.bufp_to_u64p(bufp)) elif self._type == libweb100.WEB100_TYPE_INET_ADDRESS_IPV4 or \ self._type == libweb100.WEB100_TYPE_IP_ADDRESS or \ self._type == libweb100.WEB100_TYPE_INET_ADDRESS_IPV6 or \ self._type == libweb100.WEB100_TYPE_INET_ADDRESS: return libweb100.web100_value_to_text(self._type, bufp) else: raise error("Unknown Web100 type: %d"%self._type)
b9ad2de15b9dcb95206f40d2cc79af53da65c426 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/10080/b9ad2de15b9dcb95206f40d2cc79af53da65c426/Web100.py
print cfgtuple
def main(): from twisted.internet import reactor from ConfigParser import SafeConfigParser # Read the configuration file configFile = '/etc/pybal/pybal.conf' config = SafeConfigParser() config.read(configFile) services = {} for section in config.sections(): cfgtuple = ( config.get(section, 'protocol'), config.get(section, 'ip'), config.getint(section, 'port'), config.get(section, 'scheduler')) print cfgtuple services[section] = ipvs.LVSService(section, cfgtuple) crd = Coordinator(services[section], configURL=config.get(section, 'config')) print "Created LVS service '%s'" % section reactor.run()
c8fdbba4466a4bea392035c163db0fafe299cd09 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/11222/c8fdbba4466a4bea392035c163db0fafe299cd09/pybal.py
s = hmac.HMAC(secret, digestmod = MD5)
s = hmac.HMAC(secret, digestmod = SHA256)
def gen_hmac(secret, ip): epoch_mins = (long)(time()/60) s = hmac.HMAC(secret, digestmod = MD5) s.update(socket.inet_aton(socket.gethostbyname(ip))) s.update(struct.pack("i", epoch_mins)) # "i" is for integer print s.hexdigest()
948f2b6209e51a4f00f367173a0e1c9592bba7cb /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/3161/948f2b6209e51a4f00f367173a0e1c9592bba7cb/gen_hmac.py
r" *use +(?P<module>[a-zA-Z_][a-zA-Z_0-9]*)(?P<only> *, *only:)? *(?P<imports>.*)$",
r" *use +(?P<module>[a-zA-Z_][a-zA-Z_0-9]*)(?P<only> *, *only *:)? *(?P<imports>.*)$",
def parseUse(inFile): """Parses the use statements in inFile The parsing stops at the first non use statement. Returns something like: ([{'module':'module1','only':['el1',el2=>el3']},...], '! comment1\\n!comment2...\\n', 'last line (the line that stopped the parsing)') """ useStartRe=re.compile( r" *(?P<use>use[^&!]*)(?P<continue>&?) *(?P<comment>!.*)?$", flags=re.IGNORECASE) commentRe=re.compile(r" *!.*$") contLineRe=re.compile( r"(?P<contLine>[^&!]*)(?P<continue>&?) *(?P<comment>!.*)?$") useParseRe=re.compile( r" *use +(?P<module>[a-zA-Z_][a-zA-Z_0-9]*)(?P<only> *, *only:)? *(?P<imports>.*)$", flags=re.IGNORECASE) lineNr=0 comments="" modules=[] line="" while 1: line=inFile.readline() lineNr=lineNr+1 if not line: break m=useStartRe.match(line) if m: # read whole use compactedUse=m.group('use') useComments="" if m.group('comment'): useComments=m.group('comment')+'\n' while m.group('continue'): lineNr=lineNr+1 m=contLineRe.match(inFile.readline()) compactedUse=compactedUse+m.group('contLine') if m.group('comment'): useComments=useComments+m.group('comment')+'\n' # parse use m=useParseRe.match(compactedUse) if not m: raise SyntaxError("could not parse use ending at line "+ str(lineNr)+" (compactedUse="+compactedUse+ ")") useAtt={'module':m.group('module')} if m.group('only'): useAtt['only']=map(string.strip, string.split(m.group('imports'),',')) else: useAtt['renames']=map(string.strip, string.split(m.group('imports'),',')) if useComments : useAtt['comments']=useComments # add use to modules modules.append(useAtt) elif commentRe.match(line): comments=comments+line elif line and not line.isspace(): break return (modules,comments,line)
7d204ee385d3314605c55eb04b0437db277fe8b6 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/2834/7d204ee385d3314605c55eb04b0437db277fe8b6/normalizeUse.py
elif m.has_key('renames'): outFile.write(" USE "+m['module']+","+ string.ljust("",38)) if m['renames']: outFile.write(m['renames'][0]) for i in range(1,len(m['renames'])): write(",&\n"+string.ljust("",45)+m['renames'][i])
else: outFile.write(" USE "+m['module']) if m.has_key('renames') and m['renames']: outFile.write(","+string.ljust("",38)+ m['renames'][0]) for i in range(1,len(m['renames'])): outFile.write(",&\n"+string.ljust("",45)+m['renames'][i])
def writeUseLong(modules,outFile): for m in modules: if m.has_key('only'): outFile.write(" USE "+m['module']+","+ string.rjust('ONLY: ',38-len(m['module']))) if m['only']: outFile.write(m['only'][0]) for i in range(1,len(m['only'])): outFile.write(",&\n"+string.ljust("",45)+m['only'][i]) elif m.has_key('renames'): outFile.write(" USE "+m['module']+","+ string.ljust("",38)) if m['renames']: outFile.write(m['renames'][0]) for i in range(1,len(m['renames'])): write(",&\n"+string.ljust("",45)+m['renames'][i]) if m.has_key('comments'): for commt in m['comments']: outfile.write("&\n"+m['comments'][i]) outfile.write("\n")
7d204ee385d3314605c55eb04b0437db277fe8b6 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/2834/7d204ee385d3314605c55eb04b0437db277fe8b6/normalizeUse.py
outFile.write("\n") outFile.write('\n'.join(m['comments']))
file.write("\n") file.write('\n'.join(m['comments']))
def writeUseShort(modules,file): """Writes a declaration in a compact way""" for m in modules: uLine=[] if m.has_key('only'): uLine.append(" USE "+m['module']+", ONLY: ") for k in m['only'][:-1]: uLine.append(k+", ") uLine.append(m['only'][-1]) elif m.has_key('renames') and m['renames']: uLine.append(" USE "+m['module']+", ") for k in m['renames'][:-1]: uLine.append(k+", ") uLine.append(m['renames'][-1]) else: uLine.append(" USE "+m['module']) writeInCols(uLine,7,79,0,file) if m['comments']: outFile.write("\n") outFile.write('\n'.join(m['comments'])) file.write("\n")
1215423c4325b019e41e99a69e365640a29242e1 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/2834/1215423c4325b019e41e99a69e365640a29242e1/normalizeFortranFile.py
nonWordRe=re.compile(r"(\W)")
nonWordRe=re.compile(r"([^a-zA-Z0-9_.])")
def writeInCols(dLine,indentCol,maxCol,indentAtt,file): """writes out the strings (trying not to cut them) in dLine up to maxCol indenting each newline with indentCol. The '&' of the continuation line is at maxCol. indentAtt is the actual intent, and the new indent is returned""" strRe=re.compile(r"('[^'\n]*'|\"[^\"\n]*\")") nonWordRe=re.compile(r"(\W)") maxSize=maxCol-indentCol-1 tol=min(maxSize/6,6)+indentCol for fragment in dLine: if indentAtt+len(fragment)<maxCol: file.write(fragment) indentAtt+=len(fragment) elif len(fragment.lstrip())<=maxSize: file.write("&\n"+(" "*indentCol)) file.write(fragment.lstrip()) indentAtt=indentCol+len(fragment.lstrip()) else: sPieces=strRe.split(fragment) for sPiece in sPieces: if sPiece and (not (sPiece[0]=='"' or sPiece[0]=="'")): subPieces=nonWordRe.split(sPiece) else: subPieces=[sPiece] for subPiece in subPieces: if indentAtt==indentCol: file.write(subPiece.lstrip()) indentAtt+=len(subPiece.lstrip()) elif indentAtt<tol or indentAtt+len(subPiece)<maxCol: file.write(subPiece) indentAtt+=len(subPiece) else: file.write("&\n"+(" "*indentCol)) file.write(subPiece.lstrip()) indentAtt=indentCol+len(subPiece.lstrip()) return indentAtt
306bf65acffe3e1cf1230009a83273c8d9720fce /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/2834/306bf65acffe3e1cf1230009a83273c8d9720fce/normalizeFortranFile.py
try: prettfyInplace(fileName,bkDir, normalize_use=defaultsDict['normalize-use'], upcase_keywords=defaultsDict['upcase'], interfaces_dir=defaultsDict['interface-dir'], replace=defaultsDict['replace']) except: import traceback sys.stdout.write('-'*60+"\n") traceback.print_exc(file=sys.stdout) sys.stdout.write('-'*60+"\n") sys.stdout.write("Processing file '"+fileName+"'\n")
prettfyInplace(fileName,bkDir, normalize_use=defaultsDict['normalize-use'], upcase_keywords=defaultsDict['upcase'], interfaces_dir=defaultsDict['interface-dir'], replace=defaultsDict['replace'])
def prettfyInplace(fileName,bkDir="preprettify",normalize_use=1, upcase_keywords=1, interfaces_dir=None, replace=None,logFile=sys.stdout): """Same as prettify, but inplace, replaces only if needed""" if not os.path.exists(bkDir): os.mkdir(bkDir) if not os.path.isdir(bkDir): raise Error("bk-dir must be a directory, was "+bkDir) infile=open(fileName,'r') outfile=prettifyFile(infile, normalize_use, upcase_keywords, interfaces_dir, replace) if (infile==outfile): return infile.seek(0) outfile.seek(0) same=1 while 1: l1=outfile.readline() l2=infile.readline() if (l1!=l2): same=0 break if not l1: break if (not same): bkName=os.path.join(bkDir,os.path.basename(fileName)) bName=bkName i=0 while os.path.exists(bkName): i+=1 bkName=bName+"."+str(i) infile.seek(0) bkFile=file(bkName,"w") while 1: l1=infile.readline() if not l1: break bkFile.write(l1) bkFile.close() outfile.seek(0) newFile=file(fileName,'w') while 1: l1=outfile.readline() if not l1: break newFile.write(l1) newFile.close() infile.close() outfile.close()
d1fef2183aced069fa880442943e9128e08634fd /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/2834/d1fef2183aced069fa880442943e9128e08634fd/prettify.py
nullifys="".join(nullifyRe.findall(rest))
nullifys=",".join(nullifyRe.findall(rest))
def cleanDeclarations(routine,logFile=sys.stdout): """cleans up the declaration part of the given parsed routine removes unused variables""" global rVar commentToRemoveRe=re.compile(r" *! *(?:interface|arguments|parameters|locals?|\** *local +variables *\**|\** *local +parameters *\**) *$",re.IGNORECASE) nullifyRe=re.compile(r" *nullify *\(([^()]+)\) *\n?",re.IGNORECASE|re.MULTILINE) if not routine['kind']: return if (routine['core'] and re.match(" *type *[a-zA-Z_]+ *$",routine['core'][0],re.IGNORECASE)): logFile.write("*** routine %s contains local types, not fully cleaned ***\n"% (routine['name'])) if re.search("^#","".join(routine['declarations']),re.MULTILINE): logFile.write("*** routine %s declarations contain preprocessor directives ***\n*** declarations not cleaned ***\n"%( routine['name'])) return try: rest="".join(routine['strippedCore']).lower() nullifys="".join(nullifyRe.findall(rest)) rest=nullifyRe.sub("",rest) paramDecl=[] decls=[] for d in routine['parsedDeclarations']: d['normalizedType']=d['type'] if d['parameters']: d['normalizedType']+=d['parameters'] if (d["attributes"]): d['attributes'].sort(lambda x,y:cmp(x.lower(),y.lower())) d['normalizedType']+=', ' d['normalizedType']+=', '.join(d['attributes']) if "parameter" in map(str.lower,d['attributes']): paramDecl.append(d) else: decls.append(d) sortDeclarations(paramDecl) sortDeclarations(decls) has_routinen=0 pos_routinep=-1 for d in paramDecl: for i in xrange(len(d['vars'])): v=d['vars'][i] m=varRe.match(v) lowerV=m.group("var").lower() if lowerV=="routinen": has_routinen=1 d['vars'][i]="routineN = '"+routine['name']+"'" elif lowerV=="routinep": pos_routinep=i d['vars'][i]="routineP = moduleN//':'//routineN" if not has_routinen and pos_routinep>=0: d['vars'].insert(pos_routinep,"routineN = '"+routine['name']+"'") if routine['arguments']: routine['lowercaseArguments']=map(lambda x:x.lower(),routine['arguments']) else: routine['lowercaseArguments']=[] if routine['result']: routine['lowercaseArguments'].append(routine['result'].lower()) argDeclDict={} localDecl=[] for d in decls: localD={} localD.update(d) localD['vars']=[] argD=None for v in d['vars']: m=varRe.match(v) lowerV=m.group("var").lower() if lowerV in routine['lowercaseArguments']: argD={} argD.update(d) argD['vars']=[v] if argDeclDict.has_key(lowerV): raise SyntaxError( "multiple declarations not supported. var="+v+ " declaration="+str(d)+"routine="+routine['name']) argDeclDict[lowerV]=argD else: pos=findWord(lowerV,rest) if (pos!=-1): localD['vars'].append(v) else: if findWord(lowerV,nullifys)!=-1: if not rmNullify(lowerV,routine['core']): raise SyntaxError( "could not remove nullify of "+lowerV+ " as expected, routine="+routine['name']) logFile.write("removed var %s in routine %s\n" % (lowerV,routine['name'])) rVar+=1 if (len(localD['vars'])): localDecl.append(localD) argDecl=[] for arg in routine['lowercaseArguments']: argDecl.append(argDeclDict[arg]) if routine['kind'].lower()=='function': aDecl=argDecl[:-1] else: aDecl=argDecl isOptional=0 for arg in aDecl: attIsOptional= ("optional" in map(lambda x:x.lower(), arg['attributes'])) if isOptional and not attIsOptional: logFile.write("*** warning non optional args %s after optional in routine %s\n" %( repr(arg['vars']),routine['name'])) if attIsOptional: isOptional=1 enforceDeclDependecies(argDecl) newDecl=StringIO() for comment in routine['preDeclComments']: if not commentToRemoveRe.match(comment): newDecl.write(comment) newDecl.writelines(routine['use']) writeDeclarations(argDecl,newDecl) if argDecl and paramDecl: newDecl.write("\n") writeDeclarations(paramDecl,newDecl) if (argDecl or paramDecl) and localDecl: newDecl.write("\n") writeDeclarations(localDecl,newDecl) if argDecl or paramDecl or localDecl: newDecl.write("\n") wrote=0 for comment in routine['declComments']: if not commentToRemoveRe.match(comment): newDecl.write(comment) newDecl.write("\n") wrote=1 if wrote: newDecl.write("\n") routine['declarations']=[newDecl.getvalue()] except: if routine.has_key('name'): logFile.write("**** exception cleaning routine "+routine['name']+" ****") logFile.write("parsedDeclartions="+str(routine['parsedDeclarations'])) raise
2b9adf191673224355edbc64a1944889557dca98 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/2834/2b9adf191673224355edbc64a1944889557dca98/normalizeFortranFile.py
'g4x6':{'atoms':[3,4,5,6],'distances':[7,8,9,10,11],'molecule':[2]},
'g4x6':{'atoms':[3,4,5,6],'distances':[7,8,9,10,11,12],'molecule':[2]},
def cons_conv(oldInput,oldSect,newInput,new_sect,conv): del new_sect.auto_g new_sect.pre_comments=oldSect.pre_comments new_sect.post_comments=oldSect.post_comments consTypes={'g3x3':{'atoms':[3,4,5],'distances':[6,7,8],'molecule':[2]}, 'g4x6':{'atoms':[3,4,5,6],'distances':[7,8,9,10,11],'molecule':[2]}, 'dist':{'atoms':[3,4],'distance':[5],'molecule':[2]}} for line in oldSect.raw_lines: if line.isspace():continue ll=line.split() sname=ll[0].lower() if sname.lower()=='dist': s=guaranteePath(new_sect,conv.upcase("internals")) sAtt=Section(conv.upcase('distance')) s[-1].add_subsection(sAtt) else: sAtt=Section(conv.upcase(sname)) new_sect.add_subsection(sAtt) convAtt=consTypes[sname] for (k,v) in convAtt.iteritems(): kw=Keyword(conv.upcase(k),map(lambda x:ll[x],v)) sAtt.add_keyword(kw)
e0c4b5ab23ad2adb0c363b9e52cb0f9ff781518d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/2834/e0c4b5ab23ad2adb0c363b9e52cb0f9ff781518d/input_converter.py
if line.split()[0].lower()=="end":break
ll=line.split() if ll[0].lower()=="end":break
def ff_conv(oldInput,oldSect,newInput,new_sect,conv): del new_sect.auto_g new_sect.pre_comments=oldSect.pre_comments new_sect.post_comments=oldSect.post_comments l_nr=0 nl=len(oldSect.raw_lines) while 1: if l_nr>=nl: break line=oldSect.raw_lines[l_nr] ll=line.strip().lower() if ll=="charges": while 1: l_nr+=1 if l_nr>=nl: break line=oldSect.raw_lines[l_nr] if line.split()[0].lower()=="end":break ch=Section(conv.upcase("charge")) ch.add_keyword(Keyword(conv.upcase("atom"), values=[line.split()[0]])) ch.add_keyword(Keyword(conv.upcase("charge"), values=[line.split()[1]])) new_sect.add_subsection(ch) elif ll=="bends": while 1: l_nr+=1 if l_nr>=nl: break line=oldSect.raw_lines[l_nr] if line.split()[0].lower()=="end":break ch=Section(conv.upcase("bend")) ch.add_keyword(Keyword(conv.upcase("atoms"),values=line.split()[1:4])) ch.add_keyword(Keyword(conv.upcase("k"),values=[line.split()[4]])) ch.add_keyword(Keyword(conv.upcase("theta0"),values=[line.split()[5]])) new_sect.add_subsection(ch) elif ll=="bonds": while 1: l_nr+=1 if l_nr>=nl: break line=oldSect.raw_lines[l_nr] if line.split()[0].lower()=="end":break ch=Section(conv.upcase("bond")) ch.add_keyword(Keyword(conv.upcase("atoms"),values=line.split()[1:3])) ch.add_keyword(Keyword(conv.upcase("k"),values=[line.split()[3]])) ch.add_keyword(Keyword(conv.upcase("r0"),values=[line.split()[4]])) new_sect.add_subsection(ch) elif ll.split()[0]=="parmfile": new_sect.add_keyword(Keyword("parmfile",[line.split()[2]])) new_sect.add_keyword(Keyword("parmtype",[line.split()[1]])) elif ll.split()[0]=="ei_scale": new_sect.add_keyword(Keyword("ei_scale14",[line.split()[1]])) elif ll.split()[0]=="vdw_scale": new_sect.add_keyword(Keyword("vdw_scale14",[line.split()[1]])) elif ll.split()[0]=="rcut_nb": new_sect.add_keyword(Keyword("rcut_nb",[line.split()[1]])) elif ll.split()[0]=="nonbonded": ss=Section(conv.upcase("nonbonded")) new_sect.add_subsection(ss) f_data={'lennard-jones':{3:'epsilon',4:'sigma',5:'rcut'}, 'bmhft':{3:'rcut'},'eam':{3:'parmfile'},'ipbv':{3:'rcut'}, 'williams':{3:'a',4:'b',5:'c',6:'rcut'}} while 1: l_nr+=1 if l_nr>=nl: break line=oldSect.raw_lines[l_nr] if line.split()[0].lower()=="end":break l=line.split() sname=l[0].lower() ch=Section(conv.upcase(l[0])) ch.add_keyword(Keyword("atom",l[1:3])) for idx in f_data[sname]: kname=f_data[sname][idx] ch.add_keyword(Keyword(conv.upcase(kname),values=[l[idx]])) ss.add_subsection(ch) else: print "WARNING ignoring line ",repr(line),"in forcefield section" l_nr+=1
e0c4b5ab23ad2adb0c363b9e52cb0f9ff781518d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/2834/e0c4b5ab23ad2adb0c363b9e52cb0f9ff781518d/input_converter.py
ch.add_keyword(Keyword(conv.upcase("atoms"),values=line.split()[1:3])) ch.add_keyword(Keyword(conv.upcase("k"),values=[line.split()[3]])) ch.add_keyword(Keyword(conv.upcase("r0"),values=[line.split()[4]]))
ch.add_keyword(Keyword(conv.upcase("atoms"),values=ll[1:3])) if ll[0].lower()=="harmonic": ch.add_keyword(Keyword(conv.upcase("k"),values=[ll[3]])) ch.add_keyword(Keyword(conv.upcase("r0"),values=[line.split()[4]])) elif ll[0].lower()=="quartic": ch.add_keyword(Keyword(conv.upcase("k"),values=ll[3:6])) ch.add_keyword(Keyword(conv.upcase("r0"),values=[line.split()[6]])) else: print "WARNING unknown bond type in forcefield section:",ll[0]
def ff_conv(oldInput,oldSect,newInput,new_sect,conv): del new_sect.auto_g new_sect.pre_comments=oldSect.pre_comments new_sect.post_comments=oldSect.post_comments l_nr=0 nl=len(oldSect.raw_lines) while 1: if l_nr>=nl: break line=oldSect.raw_lines[l_nr] ll=line.strip().lower() if ll=="charges": while 1: l_nr+=1 if l_nr>=nl: break line=oldSect.raw_lines[l_nr] if line.split()[0].lower()=="end":break ch=Section(conv.upcase("charge")) ch.add_keyword(Keyword(conv.upcase("atom"), values=[line.split()[0]])) ch.add_keyword(Keyword(conv.upcase("charge"), values=[line.split()[1]])) new_sect.add_subsection(ch) elif ll=="bends": while 1: l_nr+=1 if l_nr>=nl: break line=oldSect.raw_lines[l_nr] if line.split()[0].lower()=="end":break ch=Section(conv.upcase("bend")) ch.add_keyword(Keyword(conv.upcase("atoms"),values=line.split()[1:4])) ch.add_keyword(Keyword(conv.upcase("k"),values=[line.split()[4]])) ch.add_keyword(Keyword(conv.upcase("theta0"),values=[line.split()[5]])) new_sect.add_subsection(ch) elif ll=="bonds": while 1: l_nr+=1 if l_nr>=nl: break line=oldSect.raw_lines[l_nr] if line.split()[0].lower()=="end":break ch=Section(conv.upcase("bond")) ch.add_keyword(Keyword(conv.upcase("atoms"),values=line.split()[1:3])) ch.add_keyword(Keyword(conv.upcase("k"),values=[line.split()[3]])) ch.add_keyword(Keyword(conv.upcase("r0"),values=[line.split()[4]])) new_sect.add_subsection(ch) elif ll.split()[0]=="parmfile": new_sect.add_keyword(Keyword("parmfile",[line.split()[2]])) new_sect.add_keyword(Keyword("parmtype",[line.split()[1]])) elif ll.split()[0]=="ei_scale": new_sect.add_keyword(Keyword("ei_scale14",[line.split()[1]])) elif ll.split()[0]=="vdw_scale": new_sect.add_keyword(Keyword("vdw_scale14",[line.split()[1]])) elif ll.split()[0]=="rcut_nb": new_sect.add_keyword(Keyword("rcut_nb",[line.split()[1]])) elif ll.split()[0]=="nonbonded": ss=Section(conv.upcase("nonbonded")) new_sect.add_subsection(ss) f_data={'lennard-jones':{3:'epsilon',4:'sigma',5:'rcut'}, 'bmhft':{3:'rcut'},'eam':{3:'parmfile'},'ipbv':{3:'rcut'}, 'williams':{3:'a',4:'b',5:'c',6:'rcut'}} while 1: l_nr+=1 if l_nr>=nl: break line=oldSect.raw_lines[l_nr] if line.split()[0].lower()=="end":break l=line.split() sname=l[0].lower() ch=Section(conv.upcase(l[0])) ch.add_keyword(Keyword("atom",l[1:3])) for idx in f_data[sname]: kname=f_data[sname][idx] ch.add_keyword(Keyword(conv.upcase(kname),values=[l[idx]])) ss.add_subsection(ch) else: print "WARNING ignoring line ",repr(line),"in forcefield section" l_nr+=1
e0c4b5ab23ad2adb0c363b9e52cb0f9ff781518d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/2834/e0c4b5ab23ad2adb0c363b9e52cb0f9ff781518d/input_converter.py
ch.add_keyword(Keyword("atom",l[1:3]))
ch.add_keyword(Keyword("atoms",l[1:3]))
def ff_conv(oldInput,oldSect,newInput,new_sect,conv): del new_sect.auto_g new_sect.pre_comments=oldSect.pre_comments new_sect.post_comments=oldSect.post_comments l_nr=0 nl=len(oldSect.raw_lines) while 1: if l_nr>=nl: break line=oldSect.raw_lines[l_nr] ll=line.strip().lower() if ll=="charges": while 1: l_nr+=1 if l_nr>=nl: break line=oldSect.raw_lines[l_nr] if line.split()[0].lower()=="end":break ch=Section(conv.upcase("charge")) ch.add_keyword(Keyword(conv.upcase("atom"), values=[line.split()[0]])) ch.add_keyword(Keyword(conv.upcase("charge"), values=[line.split()[1]])) new_sect.add_subsection(ch) elif ll=="bends": while 1: l_nr+=1 if l_nr>=nl: break line=oldSect.raw_lines[l_nr] if line.split()[0].lower()=="end":break ch=Section(conv.upcase("bend")) ch.add_keyword(Keyword(conv.upcase("atoms"),values=line.split()[1:4])) ch.add_keyword(Keyword(conv.upcase("k"),values=[line.split()[4]])) ch.add_keyword(Keyword(conv.upcase("theta0"),values=[line.split()[5]])) new_sect.add_subsection(ch) elif ll=="bonds": while 1: l_nr+=1 if l_nr>=nl: break line=oldSect.raw_lines[l_nr] if line.split()[0].lower()=="end":break ch=Section(conv.upcase("bond")) ch.add_keyword(Keyword(conv.upcase("atoms"),values=line.split()[1:3])) ch.add_keyword(Keyword(conv.upcase("k"),values=[line.split()[3]])) ch.add_keyword(Keyword(conv.upcase("r0"),values=[line.split()[4]])) new_sect.add_subsection(ch) elif ll.split()[0]=="parmfile": new_sect.add_keyword(Keyword("parmfile",[line.split()[2]])) new_sect.add_keyword(Keyword("parmtype",[line.split()[1]])) elif ll.split()[0]=="ei_scale": new_sect.add_keyword(Keyword("ei_scale14",[line.split()[1]])) elif ll.split()[0]=="vdw_scale": new_sect.add_keyword(Keyword("vdw_scale14",[line.split()[1]])) elif ll.split()[0]=="rcut_nb": new_sect.add_keyword(Keyword("rcut_nb",[line.split()[1]])) elif ll.split()[0]=="nonbonded": ss=Section(conv.upcase("nonbonded")) new_sect.add_subsection(ss) f_data={'lennard-jones':{3:'epsilon',4:'sigma',5:'rcut'}, 'bmhft':{3:'rcut'},'eam':{3:'parmfile'},'ipbv':{3:'rcut'}, 'williams':{3:'a',4:'b',5:'c',6:'rcut'}} while 1: l_nr+=1 if l_nr>=nl: break line=oldSect.raw_lines[l_nr] if line.split()[0].lower()=="end":break l=line.split() sname=l[0].lower() ch=Section(conv.upcase(l[0])) ch.add_keyword(Keyword("atom",l[1:3])) for idx in f_data[sname]: kname=f_data[sname][idx] ch.add_keyword(Keyword(conv.upcase(kname),values=[l[idx]])) ss.add_subsection(ch) else: print "WARNING ignoring line ",repr(line),"in forcefield section" l_nr+=1
e0c4b5ab23ad2adb0c363b9e52cb0f9ff781518d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/2834/e0c4b5ab23ad2adb0c363b9e52cb0f9ff781518d/input_converter.py
nrRe=re.compile("[-+]?[0-9]*\\.?[0-9]+([eEdD][-+]?[0-9]+)?")
nrRe=re.compile("[-+]?[0-9]*\\.?[0-9]+([eEdD][-+]?[0-9]+)?$")
def diffEpsilon(str1, str2,incomparable_val=1): """retuns the difference between two strings, parsing numbers and confronting them.""" import re nrRe=re.compile("[-+]?[0-9]*\\.?[0-9]+([eEdD][-+]?[0-9]+)?") tokens1=str1.split() tokens2=str2.split() distance=0.0 if len(tokens1)!=len(tokens2): return incomparable_val i=0 for t1 in tokens1: t2=tokens2[i] i=i+1 if (t1!=t2): if nrRe.match(t1) and nrRe.match(t2): (f1,f2)=(float(t1),float(t2)) distance=max(distance, compareNr(f1,f2)) else: return incomparable_val return distance
bd571a17048ee98a8c7aab8391ad0192de16d969 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/2834/bd571a17048ee98a8c7aab8391ad0192de16d969/diffEpsilon.py
alive = Alive()
def eekMexLogging(): # ------------------------------------------------------------ # Base Logging Setup # ------------------------------------------------------------ logging.basicConfig(level=logging.INFO, format='%(asctime)s %(name)-2s %(module)-10s %(levelname)-4s %(message)s', filename='eekmex.log', filemode='a') # ------------------------------------------------------------ # Logging Handlers # ------------------------------------------------------------ loggerConsole = logging.StreamHandler() loggerConsole.setLevel(logging.INFO) loggerFile = logging.FileHandler('/media/sdcard/eekmex.log', 'a') loggerFile.setLevel(logging.INFO) loggerFileGoogleEarth = logging.FileHandler('/media/sdcard/eekmexprekml.log', 'a') loggerFileGoogleEarth.setLevel(logging.WARNING) # ------------------------------------------------------------ # Logging Formatters # ------------------------------------------------------------ loggerConsoleFormatter = logging.Formatter('%(name)-2s: %(module)-10s %(levelname)-4s %(message)s') loggerConsole.setFormatter(loggerConsoleFormatter) loggerFileFormatter = logging.Formatter('%(asctime)s %(name)-2s %(module)-10s %(levelname)-4s %(message)s') loggerFile.setFormatter(loggerFileFormatter) loggerFileGoogleEarthFormatter = logging.Formatter('%(process)d %(asctime)s %(message)s', datefmt="%d %m %Y %H %M %S ") loggerFileGoogleEarth.setFormatter(loggerFileGoogleEarthFormatter) # ------------------------------------------------------------ # Logging Handlers # ------------------------------------------------------------ logging.getLogger('').addHandler(loggerConsole) logging.getLogger('').addHandler(loggerFile) logging.getLogger('').addHandler(loggerFileGoogleEarth)
80d63c3fca9b86c3b4865925328fd419a7050887 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/4019/80d63c3fca9b86c3b4865925328fd419a7050887/eekmex.py
alive.data()
def eekMexLogging(): # ------------------------------------------------------------ # Base Logging Setup # ------------------------------------------------------------ logging.basicConfig(level=logging.INFO, format='%(asctime)s %(name)-2s %(module)-10s %(levelname)-4s %(message)s', filename='eekmex.log', filemode='a') # ------------------------------------------------------------ # Logging Handlers # ------------------------------------------------------------ loggerConsole = logging.StreamHandler() loggerConsole.setLevel(logging.INFO) loggerFile = logging.FileHandler('/media/sdcard/eekmex.log', 'a') loggerFile.setLevel(logging.INFO) loggerFileGoogleEarth = logging.FileHandler('/media/sdcard/eekmexprekml.log', 'a') loggerFileGoogleEarth.setLevel(logging.WARNING) # ------------------------------------------------------------ # Logging Formatters # ------------------------------------------------------------ loggerConsoleFormatter = logging.Formatter('%(name)-2s: %(module)-10s %(levelname)-4s %(message)s') loggerConsole.setFormatter(loggerConsoleFormatter) loggerFileFormatter = logging.Formatter('%(asctime)s %(name)-2s %(module)-10s %(levelname)-4s %(message)s') loggerFile.setFormatter(loggerFileFormatter) loggerFileGoogleEarthFormatter = logging.Formatter('%(process)d %(asctime)s %(message)s', datefmt="%d %m %Y %H %M %S ") loggerFileGoogleEarth.setFormatter(loggerFileGoogleEarthFormatter) # ------------------------------------------------------------ # Logging Handlers # ------------------------------------------------------------ logging.getLogger('').addHandler(loggerConsole) logging.getLogger('').addHandler(loggerFile) logging.getLogger('').addHandler(loggerFileGoogleEarth)
80d63c3fca9b86c3b4865925328fd419a7050887 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/4019/80d63c3fca9b86c3b4865925328fd419a7050887/eekmex.py
self.backend = Backend(self.uri[1:].split('/')[0], self.factory ("http://" + self.uri[1:].split('/')[0]))
self.backend = Backend(self.uri[1:].split('/')[0], self.factory, ("http://" + self.uri[1:].split('/')[0],))
def process(self): """ Each new request begins processing here """ log.debug("Request: " + self.method + " " + self.uri); # Clean up URL self.uri = self.simplify_path(self.uri)
ef36f2d6f1135a2dfe7542ab8cd99b9eac31034c /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/ef36f2d6f1135a2dfe7542ab8cd99b9eac31034c/apt_proxy.py
if log.isEnabled('apt'): self.cache = apt_pkg.GetCache() else: self.__fake_stdout() self.cache = apt_pkg.GetCache() self.__restore_stdout()
self.cache = apt_pkg.GetCache()
def load(self): """ Regenerates the fake configuration and load the packages server. """ if not self.loaded: shutil.rmtree(self.status_dir+'/apt/lists/') os.makedirs(self.status_dir+'/apt/lists/partial') sources = open(self.status_dir+'/'+'apt/etc/sources.list', 'w') for file in self.packages.keys(): # we should probably clear old entries from self.packages and # take into account the recorded mtime as optimization fake_uri='http://apt-proxy:'+file source_line='deb '+dirname(fake_uri)+'/ /' listpath=(self.status_dir+'/apt/lists/' +apt_pkg.URItoFileName(fake_uri)) sources.write(source_line+'\n')
d3184cc6441737648cb1117d63dfd496902add05 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/d3184cc6441737648cb1117d63dfd496902add05/packages.py
def import_directory(factory, dir, recursive=0): """ Import all files in a given directory into the cache This is used by apt-proxy-import to import new files into the cache """ if not os.path.exists(dir): log.err('Directory ' + dir + ' does not exist', 'import') return if recursive: log.debug("Importing packages from directory tree: " + dir, 'import') for root, dirs, files in os.walk(dir): for file in files: import_file(factory, root, file) else: log.debug("Importing packages from directory: " + dir, 'import') for file in os.listdir(dir): mode = os.stat(dir + '/' + file)[stat.ST_MODE] if not stat.S_ISDIR(mode): import_file(factory, dir, file) for backend in factory.backends: backend.packages.unload()
def compare(a, b): return apt_pkg.VersionCompare(a[0], b[0])
d3184cc6441737648cb1117d63dfd496902add05 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/d3184cc6441737648cb1117d63dfd496902add05/packages.py
def import_debs(factory, dir): if not os.path.exists(dir): os.makedirs(dir) for file in os.listdir(dir): if file[-4:]!='.deb': log.msg("IGNORING:"+ file, 'import') continue log.msg("considering:"+ dir+'/'+file, 'import') paths = get_mirror_path(factory, dir+'/'+file) if paths: if len(paths) != 1: log.msg("WARNING: multiple ocurrences", 'import') log.msg(str(paths), 'import') path = paths[0]
def import_file(factory, dir, file): """ Import a .deb into cache from given filename """ if file[-4:]!='.deb': log.msg("Ignoring (unknown file type):"+ file, 'import') return log.debug("considering: " + dir + '/' + file, 'import') paths = get_mirror_path(factory, dir+'/'+file) if paths: if len(paths) != 1: log.debug("WARNING: multiple ocurrences", 'import') log.debug(str(paths), 'import') cache_path = paths[0] else: log.debug("Not found, trying to guess", 'import') cache_path = closest_match(AptDpkgInfo(dir+'/'+file), get_mirror_versions(factory, dir+'/'+file)) if cache_path: log.debug("MIRROR_PATH:"+ cache_path, 'import') src_path = dir+'/'+file dest_path = factory.cache_dir+cache_path if not os.path.exists(dest_path): log.debug("IMPORTING:" + src_path, 'import') dest_path = re.sub(r'/\./', '/', dest_path) if not os.path.exists(dirname(dest_path)): os.makedirs(dirname(dest_path)) f = open(dest_path, 'w') fcntl.lockf(f.fileno(), fcntl.LOCK_EX) f.truncate(0) shutil.copy2(src_path, dest_path) f.close() if hasattr(factory, 'access_times'): atime = os.stat(src_path)[stat.ST_ATIME] factory.access_times[cache_path] = atime log.msg(file + ' imported', 'import')
def import_debs(factory, dir): if not os.path.exists(dir): os.makedirs(dir) for file in os.listdir(dir): if file[-4:]!='.deb': log.msg("IGNORING:"+ file, 'import') continue log.msg("considering:"+ dir+'/'+file, 'import') paths = get_mirror_path(factory, dir+'/'+file) if paths: if len(paths) != 1: log.msg("WARNING: multiple ocurrences", 'import') log.msg(str(paths), 'import') path = paths[0] else: log.msg("Not found, trying to guess", 'import') path = closest_match(AptDpkgInfo(dir+'/'+file), get_mirror_versions(factory, dir+'/'+file)) if path: log.msg("MIRROR_PATH:"+ path, 'import') spath = dir+'/'+file dpath = factory.cache_dir+path if not os.path.exists(dpath): log.msg("IMPORTING:"+spath, 'import') dpath = re.sub(r'/\./', '/', dpath) if not os.path.exists(dirname(dpath)): os.makedirs(dirname(dpath)) f = open(dpath, 'w') fcntl.lockf(f.fileno(), fcntl.LOCK_EX) f.truncate(0) shutil.copy2(spath, dpath) f.close() if hasattr(factory, 'access_times'): atime = os.stat(spath)[stat.ST_ATIME] factory.access_times[path] = atime for backend in factory.backends: backend.packages.unload()
d3184cc6441737648cb1117d63dfd496902add05 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/d3184cc6441737648cb1117d63dfd496902add05/packages.py
log.msg("Not found, trying to guess", 'import') path = closest_match(AptDpkgInfo(dir+'/'+file), get_mirror_versions(factory, dir+'/'+file)) if path: log.msg("MIRROR_PATH:"+ path, 'import') spath = dir+'/'+file dpath = factory.cache_dir+path if not os.path.exists(dpath): log.msg("IMPORTING:"+spath, 'import') dpath = re.sub(r'/\./', '/', dpath) if not os.path.exists(dirname(dpath)): os.makedirs(dirname(dpath)) f = open(dpath, 'w') fcntl.lockf(f.fileno(), fcntl.LOCK_EX) f.truncate(0) shutil.copy2(spath, dpath) f.close() if hasattr(factory, 'access_times'): atime = os.stat(spath)[stat.ST_ATIME] factory.access_times[path] = atime for backend in factory.backends: backend.packages.unload()
log.msg(file + ' skipped - already in cache', 'import') else: log.msg(file + ' skipped - no suitable backend found', 'import')
def import_debs(factory, dir): if not os.path.exists(dir): os.makedirs(dir) for file in os.listdir(dir): if file[-4:]!='.deb': log.msg("IGNORING:"+ file, 'import') continue log.msg("considering:"+ dir+'/'+file, 'import') paths = get_mirror_path(factory, dir+'/'+file) if paths: if len(paths) != 1: log.msg("WARNING: multiple ocurrences", 'import') log.msg(str(paths), 'import') path = paths[0] else: log.msg("Not found, trying to guess", 'import') path = closest_match(AptDpkgInfo(dir+'/'+file), get_mirror_versions(factory, dir+'/'+file)) if path: log.msg("MIRROR_PATH:"+ path, 'import') spath = dir+'/'+file dpath = factory.cache_dir+path if not os.path.exists(dpath): log.msg("IMPORTING:"+spath, 'import') dpath = re.sub(r'/\./', '/', dpath) if not os.path.exists(dirname(dpath)): os.makedirs(dirname(dpath)) f = open(dpath, 'w') fcntl.lockf(f.fileno(), fcntl.LOCK_EX) f.truncate(0) shutil.copy2(spath, dpath) f.close() if hasattr(factory, 'access_times'): atime = os.stat(spath)[stat.ST_ATIME] factory.access_times[path] = atime for backend in factory.backends: backend.packages.unload()
d3184cc6441737648cb1117d63dfd496902add05 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/d3184cc6441737648cb1117d63dfd496902add05/packages.py
log.debug('Opening database ' + filename)
def open_shelve(filename): from bsddb3 import db,dbshelve log.debug('Opening database ' + filename)
515e74c733626d74dfe8d685b4c41efe359006c5 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/515e74c733626d74dfe8d685b4c41efe359006c5/apt_proxy.py
try: shelve.verify(filename) except: os.rename(filename, filename+'.error') log.msg(filename+' could not be opened, moved to '+filename+'.error','db', 1) log.msg('Recreating '+ filename,'db', 1)
try: log.debug('Verifying database: ' + filename) shelve.verify(filename) except: os.rename(filename, filename+'.error') log.msg(filename+' could not be opened, moved to '+filename+'.error','db', 1) log.msg('Recreating '+ filename,'db', 1)
def open_shelve(filename): from bsddb3 import db,dbshelve log.debug('Opening database ' + filename)
515e74c733626d74dfe8d685b4c41efe359006c5 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/515e74c733626d74dfe8d685b4c41efe359006c5/apt_proxy.py
shelve = dbshelve.open(filename)
log.debug('Opening database ' + filename) shelve = dbshelve.open(filename)
def open_shelve(filename): from bsddb3 import db,dbshelve log.debug('Opening database ' + filename)
515e74c733626d74dfe8d685b4c41efe359006c5 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/515e74c733626d74dfe8d685b4c41efe359006c5/apt_proxy.py
log.debug('Upgrading from previous database format: %s' % filename + '.previous') from bsddb import dbshelve as old_dbshelve
log.msg('Upgrading from previous database format: %s' % filename + '.previous') import bsddb.dbshelve
def open_shelve(filename): from bsddb3 import db,dbshelve log.debug('Opening database ' + filename)
515e74c733626d74dfe8d685b4c41efe359006c5 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/515e74c733626d74dfe8d685b4c41efe359006c5/apt_proxy.py
previous_shelve = old_dbshelve.open(filename + '.previous')
previous_shelve = bsddb.dbshelve.open(filename + '.previous')
def open_shelve(filename): from bsddb3 import db,dbshelve log.debug('Opening database ' + filename)
515e74c733626d74dfe8d685b4c41efe359006c5 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/515e74c733626d74dfe8d685b4c41efe359006c5/apt_proxy.py
log.debug("abort - not implemented")
log.debug("abort - method not implemented")
def process(self): """ Each new request begins processing here """ log.debug("Request: " + self.method + " " + self.uri); # Clean up URL self.uri = self.simplify_path(self.uri)
f79e904ebe4a7e0b95797022142c5f44ead127c1 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/f79e904ebe4a7e0b95797022142c5f44ead127c1/apt_proxy.py
from bsddb3 import db,dbshelve,DBInvalidArgError
from bsddb3 import db,dbshelve
def open_shelve(filename): from bsddb3 import db,dbshelve,DBInvalidArgError log.debug('Opening database ' + filename)
6c7d87ef8f67d23bcd0237379d380c1297ee590f /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/6c7d87ef8f67d23bcd0237379d380c1297ee590f/apt_proxy.py
except DBInvalidArgError:
except db.DBInvalidArgError:
def open_shelve(filename): from bsddb3 import db,dbshelve,DBInvalidArgError log.debug('Opening database ' + filename)
6c7d87ef8f67d23bcd0237379d380c1297ee590f /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/6c7d87ef8f67d23bcd0237379d380c1297ee590f/apt_proxy.py
del self.factory.runningClients[self.request.uri]
try: del self.factory.runningClients[self.request.uri] except exceptions.KeyError: self.factory.debug("We are not on runningClients!!!") self.factory.debug(str(self.factory.runningClients)) raise exceptions.KeyError
def aptEnd(self): """ Called by subclasses when aptDataEnd does too much things.
e4cee446379b83045b5566adba087e587256fe70 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/e4cee446379b83045b5566adba087e587256fe70/apt_proxy.py
self.process = reactor.spawnProcess(self, exe, args)
self.nullhandle = open("/dev/null", "w") self.process = reactor.spawnProcess(self, exe, args, childFDs = { 0:"w", 1:self.nullhandle.fileno(), 2:"r" })
def __init__(self, request): self.factory = request.factory self.deferred = defer.Deferred() # Deferred that passes status back self.path = request.local_file
5c9bae059cd7d11136ad9fba35419cb5fb58a393 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/5c9bae059cd7d11136ad9fba35419cb5fb58a393/apt_proxy.py
log.debug("Last request removed",'client')
log.debug("Last request removed",'Fetcher')
def remove_request(self, request): """ Request should NOT be served through this Fetcher, the client probably closed the connection. If this is our last request, we may also close the connection with the server depending on the configuration.
5c9bae059cd7d11136ad9fba35419cb5fb58a393 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/5c9bae059cd7d11136ad9fba35419cb5fb58a393/apt_proxy.py
"telling the transport to loseConnection",'client')
"telling the transport to loseConnection",'Fetcher')
def remove_request(self, request): """ Request should NOT be served through this Fetcher, the client probably closed the connection. If this is our last request, we may also close the connection with the server depending on the configuration.
5c9bae059cd7d11136ad9fba35419cb5fb58a393 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/5c9bae059cd7d11136ad9fba35419cb5fb58a393/apt_proxy.py
log.debug(str(request.backend) + request.uri, 'fetcher_activate')
log.debug(str(request.backend) + request.uri, 'Fetcher.activate')
def activate(self, request): log.debug(str(request.backend) + request.uri, 'fetcher_activate') self.local_file = request.local_file self.local_mtime = request.local_mtime self.factory = request.factory self.request = request request.content.read()
5c9bae059cd7d11136ad9fba35419cb5fb58a393 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/5c9bae059cd7d11136ad9fba35419cb5fb58a393/apt_proxy.py
log.debug("Finished receiving data, status:%d saveData:%d" %(self.status_code, saveData));
log.debug("Finished receiving data, status:%d saveData:%d" %(self.status_code, saveData), 'Fetcher');
def apDataEnd(self, data, saveData=True): """ Called by subclasses when the data transfer is over.
5c9bae059cd7d11136ad9fba35419cb5fb58a393 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/5c9bae059cd7d11136ad9fba35419cb5fb58a393/apt_proxy.py
log.debug("no local time: "+self.local_file,'client')
log.debug("no local time: "+self.local_file,'Fetcher')
def apDataEnd(self, data, saveData=True): """ Called by subclasses when the data transfer is over.
5c9bae059cd7d11136ad9fba35419cb5fb58a393 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/5c9bae059cd7d11136ad9fba35419cb5fb58a393/apt_proxy.py
"connection already closed")
"connection already closed", 'Fetcher')
def apDataEnd(self, data, saveData=True): """ Called by subclasses when the data transfer is over.
5c9bae059cd7d11136ad9fba35419cb5fb58a393 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/5c9bae059cd7d11136ad9fba35419cb5fb58a393/apt_proxy.py
log.debug("We are not on runningFetchers!!!",'client')
log.debug("We are not on runningFetchers!!!",'Fetcher')
def apEnd(self): """ Called by subclasses when apDataEnd does too many things.
5c9bae059cd7d11136ad9fba35419cb5fb58a393 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/5c9bae059cd7d11136ad9fba35419cb5fb58a393/apt_proxy.py
'client')
'Fetcher')
def apEnd(self): """ Called by subclasses when apDataEnd does too many things.
5c9bae059cd7d11136ad9fba35419cb5fb58a393 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/5c9bae059cd7d11136ad9fba35419cb5fb58a393/apt_proxy.py
log.debug(' URI:' + self.request.uri, 'fetcher_activate')
log.debug(' URI:' + self.request.uri, 'Fetcher')
def apEnd(self): """ Called by subclasses when apDataEnd does too many things.
5c9bae059cd7d11136ad9fba35419cb5fb58a393 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/5c9bae059cd7d11136ad9fba35419cb5fb58a393/apt_proxy.py
+str(self.factory.runningFetchers),'client')
+str(self.factory.runningFetchers),'Fetcher')
def apEnd(self): """ Called by subclasses when apDataEnd does too many things.
5c9bae059cd7d11136ad9fba35419cb5fb58a393 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/5c9bae059cd7d11136ad9fba35419cb5fb58a393/apt_proxy.py
log.debug("have active Fetcher",'file_client')
log.debug("have active Fetcher",'Fetcher')
def apEndTransfer(self, fetcher_class): """ Remove this Fetcher and transfer all it's requests to a new instance of 'fetcher_class'. """ #Consider something like this: #req = dummyFetcher.fix_ref_request() #fetcher = fetcher_class() #dummyFetcher.transfer_requests(fetcher) #dummyFetcher.apEnd() #fetcher.activate(req)
5c9bae059cd7d11136ad9fba35419cb5fb58a393 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/5c9bae059cd7d11136ad9fba35419cb5fb58a393/apt_proxy.py
log.debug("Connection Failed: "+str(reason))
log.debug("Connection Failed: "+str(reason), 'Fetcher')
def connectionFailed(self, reason=None): """ Tell our requests that the connection with the server failed. """ msg = '[%s] Connection Failed: %s/%s'%( self.request.backend.base, self.request.backendServer.path, self.request.backend_uri)
5c9bae059cd7d11136ad9fba35419cb5fb58a393 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/5c9bae059cd7d11136ad9fba35419cb5fb58a393/apt_proxy.py
'client','9')
'Fetcher','9')
def connectionFailed(self, reason=None): """ Tell our requests that the connection with the server failed. """ msg = '[%s] Connection Failed: %s/%s'%( self.request.backend.base, self.request.backendServer.path, self.request.backend_uri)
5c9bae059cd7d11136ad9fba35419cb5fb58a393 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/5c9bae059cd7d11136ad9fba35419cb5fb58a393/apt_proxy.py
request.transport.loseConnection
request.finish()
def start_transfer(self, request): self.if_modified(request) if len(self.requests) == 0: #we had a single request and didn't have to send it self.apEnd() return
5c9bae059cd7d11136ad9fba35419cb5fb58a393 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/5c9bae059cd7d11136ad9fba35419cb5fb58a393/apt_proxy.py
def open_shelve(filename):
def open_shelve(dbname):
def open_shelve(filename): from bsddb3 import db,dbshelve
5c9bae059cd7d11136ad9fba35419cb5fb58a393 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/5c9bae059cd7d11136ad9fba35419cb5fb58a393/apt_proxy.py
db_dir = self.cache_dir+'/'+status_dir+'/db' if not os.path.exists(db_dir): os.makedirs(db_dir)
def open_shelve(filename): from bsddb3 import db,dbshelve
5c9bae059cd7d11136ad9fba35419cb5fb58a393 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/5c9bae059cd7d11136ad9fba35419cb5fb58a393/apt_proxy.py
self.update_times = open_shelve(db_dir+'/update.db')
self.update_times = open_shelve('update')
def open_shelve(filename): from bsddb3 import db,dbshelve
5c9bae059cd7d11136ad9fba35419cb5fb58a393 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/5c9bae059cd7d11136ad9fba35419cb5fb58a393/apt_proxy.py
self.access_times = open_shelve(db_dir+'/access.db')
self.access_times = open_shelve('access')
def open_shelve(filename): from bsddb3 import db,dbshelve
5c9bae059cd7d11136ad9fba35419cb5fb58a393 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/5c9bae059cd7d11136ad9fba35419cb5fb58a393/apt_proxy.py
self.packages = open_shelve(db_dir+'/packages.db')
self.packages = open_shelve('packages')
def open_shelve(filename): from bsddb3 import db,dbshelve
5c9bae059cd7d11136ad9fba35419cb5fb58a393 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/5c9bae059cd7d11136ad9fba35419cb5fb58a393/apt_proxy.py
self.local_mtime = os.stat(self.host_file)[stat.ST_MTIME]
if os.path.exists(self.host_file): self.local_mtime = os.stat(self.host_file)[stat.ST_MTIME]
def host_transfer_done(self): """ Called by our LoopbackRequest when the real Fetcher calls finish() on it.
a5f4c913a5496fe56c9c10b36e609ffd01ea44d0 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/a5f4c913a5496fe56c9c10b36e609ffd01ea44d0/apt_proxy.py
basic.FileSender().beginFileTransfer(file, request).addCallback(self.apEnd).addCallback(lambda r: file.close())
basic.FileSender().beginFileTransfer(file, request).addCallback(self.apEnd).addCallback(lambda r: file.close()).addCallback(lambda r: request.transport.loseConnection())
def insert_request(self, request): if not request.serve_if_cached: request.finish() return Fetcher.insert_request(self, request) self.if_modified(request) file = open(self.local_file,'rb') fcntl.lockf(file.fileno(), fcntl.LOCK_SH)
08af97e0aa56d91bbc67c990da996b12e7c77b78 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/08af97e0aa56d91bbc67c990da996b12e7c77b78/apt_proxy.py
basic.FileSender().beginFileTransfer(file, self.request).addCallback(self.apEnd).addCallback(lambda r: file.close())
basic.FileSender().beginFileTransfer(file, self.request).addCallback(self.apEnd).addCallback(lambda r: file.close()).addCallback(lambda r: request.transport.loseConnection())
def activate(self, request): Fetcher.activate(self, request) if not request.apFetcher: return self.factory.file_served(request.uri)
08af97e0aa56d91bbc67c990da996b12e7c77b78 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/08af97e0aa56d91bbc67c990da996b12e7c77b78/apt_proxy.py
FileType( re.compile(r"/(Packages|Release|Sources|Contents-.*)(\.(gz|bz2))?$"), "text/plain", 1),
FileType(re.compile(r"/(Packages|Release(\.gpg)?|Sources|Contents-.*)" r"(\.(gz|bz2))?$"), "text/plain", 1),
def check (self, name): "Returns true if name is of this filetype" if self.regex.search(name): return 1 else: return 0
411c83ae56d01172293195bdf787bd6c9364ee06 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/411c83ae56d01172293195bdf787bd6c9364ee06/apt_proxy.py
import bsddb
from bsddb import dbshelve as old_dbshelve
def open_shelve(filename): from bsddb3 import db,dbshelve log.debug('Opening database ' + filename)
8a619583902822990a865e72dd837af6970244c5 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/7531/8a619583902822990a865e72dd837af6970244c5/apt_proxy.py