rem
stringlengths
1
322k
add
stringlengths
0
2.05M
context
stringlengths
4
228k
meta
stringlengths
156
215
CGC = options.CGC check_cgc(CGC)
cgc_path = options.cgc_path check_cgc(cgc_path)
def main(cg_shader, CGC): matrixloadorder = get_matrixloadorder(cg_shader) glsl_vertex, glsl_fragment, log = cg_to_glsl(cg_shader, CGC) print log print fix_glsl(glsl_vertex) print print '// #o3d SplitMarker' print get_matrixloadorder(cg_shader).strip() print print fix_glsl(glsl_fragment)
b4063f4f2c6fc90bbda47a2fb5e91c437341938f /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/b4063f4f2c6fc90bbda47a2fb5e91c437341938f/convert.py
main(input, CGC)
main(input, cgc_path)
def main(cg_shader, CGC): matrixloadorder = get_matrixloadorder(cg_shader) glsl_vertex, glsl_fragment, log = cg_to_glsl(cg_shader, CGC) print log print fix_glsl(glsl_vertex) print print '// #o3d SplitMarker' print get_matrixloadorder(cg_shader).strip() print print fix_glsl(glsl_fragment)
b4063f4f2c6fc90bbda47a2fb5e91c437341938f /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/b4063f4f2c6fc90bbda47a2fb5e91c437341938f/convert.py
def percentile(N, percent, key=lambda x:x): """ Find the percentile of a list of values.
def __init__(self): self.site='UNDEFINED' self.times=[]
392f9de03d426db554c52ab8afd06fd4ea6d9436 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/392f9de03d426db554c52ab8afd06fd4ea6d9436/page_cycler_results_parser.py
@parameter N - is a list of values. Note N MUST BE already sorted. @parameter percent - a float value from 0.0 to 1.0. @parameter key - optional key function to compute value from each element of N.
class desktopui_PageCyclerTests(test.test): version = 1 results = {}
def percentile(N, percent, key=lambda x:x): """ Find the percentile of a list of values. @parameter N - is a list of values. Note N MUST BE already sorted. @parameter percent - a float value from 0.0 to 1.0. @parameter key - optional key function to compute value from each element of N. @return - the percentile of the values """ if not N: return None k = (len(N)-1) * percent f = math.floor(k) c = math.ceil(k) if f == c: return key(N[int(k)]) d0 = key(N[int(f)]) * (k-f) d1 = key(N[int(c)]) * (c-k) return d0+d1
392f9de03d426db554c52ab8afd06fd4ea6d9436 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/392f9de03d426db554c52ab8afd06fd4ea6d9436/page_cycler_results_parser.py
@return - the percentile of the values """ if not N: return None k = (len(N)-1) * percent f = math.floor(k) c = math.ceil(k) if f == c: return key(N[int(k)]) d0 = key(N[int(f)]) * (k-f) d1 = key(N[int(c)]) * (c-k) return d0+d1
def run_page_cycler(self, gtest_filter = ''): assert(gtest_filter != ''), gtest_filter+' cannot be empty!' cmd = ('CR_SOURCE_ROOT=/home/chronos/chromium/src /home/chronos/' 'chromium/src/x86-generic_out/Release/page_cycler_tests' ' --gtest_filter=')+gtest_filter xcmd = site_ui.xcommand(cmd) logging.debug('Running: '+gtest_filter) output = utils.system_output(xcmd) pcrp = PageCyclerResultsParser() result = pcrp.parse_results(output) logging.debug(result) self.results[gtest_filter] = result
def percentile(N, percent, key=lambda x:x): """ Find the percentile of a list of values. @parameter N - is a list of values. Note N MUST BE already sorted. @parameter percent - a float value from 0.0 to 1.0. @parameter key - optional key function to compute value from each element of N. @return - the percentile of the values """ if not N: return None k = (len(N)-1) * percent f = math.floor(k) c = math.ceil(k) if f == c: return key(N[int(k)]) d0 = key(N[int(f)]) * (k-f) d1 = key(N[int(c)]) * (c-k) return d0+d1
392f9de03d426db554c52ab8afd06fd4ea6d9436 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/392f9de03d426db554c52ab8afd06fd4ea6d9436/page_cycler_results_parser.py
def mean(numbers): assert(len(numbers) != 0), 'list should not be empty!' return sum(numbers)/len(numbers) class PageCyclerResultsParser: def parse_file(self, outfile = 'out.txt'): output = open(outfile).read() return self.parse_results(output) def parse_results(self, output = ''): median = functools.partial(percentile, percent=0.5) assert(output != ''), 'Output cannot be empty!' lines = output.split('\n') found = False token = '*RESULT times:' for index, line in enumerate(lines): if(line.startswith(token)): found = True break assert(found==True), token+' not found!?' timesline = lines[index] sitesline = lines[index-1] m = re.search('\[(.*?)\]', sitesline) sites = m.group(1).split(',') m = re.search('\[(.*?)\]', timesline) times = m.group(1).split(',') assert(len(times) % len(sites) == 0), 'Times not divisible by sites!' iterations = len(times)/len(sites) stList = [] for ii, site in enumerate(sites): st = SiteTimes() st.site = site for jj in range(0, iterations): mytime = float(times[jj*len(sites)+ii]) st.times.append(mytime) stList.append(st) medianList = [] totalTime = 0 for ii, st in enumerate(stList): sortedTimes=sorted(st.times) sortedTimes.pop() totalTime += mean(sortedTimes) return totalTime/len(stList)
def run_once(self): testNames=['PageCyclerTest.Alexa_usFile', 'PageCyclerTest.MozFile', 'PageCyclerTest.Intl1File', 'PageCyclerTest.Intl2File', 'PageCyclerTest.DhtmlFile', 'PageCyclerTest.Moz2File', 'PageCyclerTest.BloatFile', 'PageCyclerTest.DomFile', 'PageCyclerTest.MorejsFile', 'PageCyclerTest.MorejsnpFile'] for testName in testNames: self.run_page_cycler(testName) self.write_perf_keyval(self.results)
def mean(numbers): assert(len(numbers) != 0), 'list should not be empty!' return sum(numbers)/len(numbers)
392f9de03d426db554c52ab8afd06fd4ea6d9436 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/392f9de03d426db554c52ab8afd06fd4ea6d9436/page_cycler_results_parser.py
code = """ %(type)s %(name)s = c.%(name)s; """ file.Write(code % {'type': self.type, 'name': self.name})
file.Write(" %s %s = c.%s;\n" % (self.type, self.name, self.name)) file.Write(" if (%s != 0 && !id_manager()->GetServiceId(%s, &%s)) {\n" % (self.name, self.name, self.name)) file.Write(" SetGLError(GL_INVALID_VALUE);\n") file.Write(" return error::kNoError;\n") file.Write(" }\n")
def WriteGetCode(self, file): """Overridden from Argument.""" code = """ %(type)s %(name)s = c.%(name)s;
e44e099d2f29cafefdeb28f7a0e6982e38881fd9 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/e44e099d2f29cafefdeb28f7a0e6982e38881fd9/build_gles2_cmd_buffer.py
def GetInvalidArg(self, offset, index): """returns an invalid value by index.""" if self.resource_type == "Texture": return ("client_buffer_id_", "kNoError", "GL_INVALID_OPERATION") return ("client_texture_id_", "kNoError", "GL_INVALID_OPERATION")
e44e099d2f29cafefdeb28f7a0e6982e38881fd9 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/e44e099d2f29cafefdeb28f7a0e6982e38881fd9/build_gles2_cmd_buffer.py
self._chrome_zip_name = 'chrome-%s' % { 'linux': 'linux',
self._chrome_zip_name = 'chrome-%s' % { 'linux64': 'linux64bit', 'linux32': 'linux32bit',
def _ParseArgs(self): parser = optparse.OptionParser() parser.add_option( '-d', '--outdir', type='string', default=None, help='Directory in which to setup. This is typically the directory ' 'where the binaries would go when compiled from source.') parser.add_option( '-p', '--platform', type='string', default=pyauto_utils.GetCurrentPlatform(), help='Platform. Valid options: win, mac, linux32, linux64. ' 'Default: current platform (%s)' % pyauto_utils.GetCurrentPlatform()) self._options, self._args = parser.parse_args() if not self._options.outdir: print >>sys.stderr, 'Need output directory: -d/--outdir' sys.exit(1) if not self._args: print >>sys.stderr, 'Need download url' sys.exit(2) self._outdir = self._options.outdir self._url = self._args[0]
ec474c840e64c2dee642ff7b33f512558e062f99 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/ec474c840e64c2dee642ff7b33f512558e062f99/fetch_prebuilt_pyauto.py
{ 'linux': 'lib.target/_pyautolib.so',
{ 'linux64': 'lib.target/_pyautolib.so', 'linux32': 'lib.target/_pyautolib.so',
def _ParseArgs(self): parser = optparse.OptionParser() parser.add_option( '-d', '--outdir', type='string', default=None, help='Directory in which to setup. This is typically the directory ' 'where the binaries would go when compiled from source.') parser.add_option( '-p', '--platform', type='string', default=pyauto_utils.GetCurrentPlatform(), help='Platform. Valid options: win, mac, linux32, linux64. ' 'Default: current platform (%s)' % pyauto_utils.GetCurrentPlatform()) self._options, self._args = parser.parse_args() if not self._options.outdir: print >>sys.stderr, 'Need output directory: -d/--outdir' sys.exit(1) if not self._args: print >>sys.stderr, 'Need download url' sys.exit(2) self._outdir = self._options.outdir self._url = self._args[0]
ec474c840e64c2dee642ff7b33f512558e062f99 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/ec474c840e64c2dee642ff7b33f512558e062f99/fetch_prebuilt_pyauto.py
{ 'linux': '_pyautolib.so',
{ 'linux64': '_pyautolib.so', 'linux32': '_pyautolib.so',
def Run(self): self._ParseArgs() if not os.path.isdir(self._outdir): os.makedirs(self._outdir)
ec474c840e64c2dee642ff7b33f512558e062f99 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/ec474c840e64c2dee642ff7b33f512558e062f99/fetch_prebuilt_pyauto.py
if 'delta' not in perf_data[key] or 'var' not in perf_data[key]: bad_keys.append(key) if (not isinstance(perf_data[key]['delta'], int) and not isinstance(perf_data[key]['delta'], float)): bad_keys.append(key) if (not isinstance(perf_data[key]['var'], int) and not isinstance(perf_data[key]['var'], float)): bad_keys.append(key)
if 'regress' in perf_data[key]: if 'improve' not in perf_data[key]: bad_keys.append(key) if (not isinstance(perf_data[key]['regress'], int) and not isinstance(perf_data[key]['regress'], float)): bad_keys.append(key) if (not isinstance(perf_data[key]['improve'], int) and not isinstance(perf_data[key]['improve'], float)): bad_keys.append(key) else: if 'delta' not in perf_data[key] or 'var' not in perf_data[key]: bad_keys.append(key) if (not isinstance(perf_data[key]['delta'], int) and not isinstance(perf_data[key]['delta'], float)): bad_keys.append(key) if (not isinstance(perf_data[key]['var'], int) and not isinstance(perf_data[key]['var'], float)): bad_keys.append(key)
def testPerfExpectations(self): perf_data = LoadData()
774e88514f974563600cf45d30b1aa17c00d09e2 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/774e88514f974563600cf45d30b1aa17c00d09e2/perf_expectations_unittest.py
" .WillOnce(SetArgumentPointee<2>(strlen(kInfo)));") % (
" .WillOnce(SetArgumentPointee<2>(strlen(kInfo) + 1));") % (
def WriteServiceUnitTest(self, func, file): """Overrriden from TypeHandler.""" valid_test = """
9908a3c6d44dd8e9690c390f2572cbc212871dc7 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/9908a3c6d44dd8e9690c390f2572cbc212871dc7/build_gles2_cmd_buffer.py
if re.search(TsanAnalyzer.TSAN_WARNING_DESCRIPTION, self.line_):
if (re.search(TsanAnalyzer.TSAN_WARNING_DESCRIPTION, self.line_) and not common.IsWindows()):
def ParseReportFile(self, filename): self.cur_fd_ = open(filename, 'r')
0f2bc5194e8d9630dcc466d363c181c11d4768e9 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/0f2bc5194e8d9630dcc466d363c181c11d4768e9/tsan_analyze.py
for dir in ['BackMenu_', 'ForwardMenu_']:
for dir in ('BackMenu_', 'ForwardMenu_'):
def AddComputedActions(actions): """Add computed actions to the actions list. Arguments: actions: set of actions to add to. """ # Actions for back_forward_menu_model.cc. for dir in ['BackMenu_', 'ForwardMenu_']: actions.add(dir + 'ShowFullHistory') actions.add(dir + 'Popup') for i in range(1, 20): actions.add(dir + 'HistoryClick' + str(i)) actions.add(dir + 'ChapterClick' + str(i)) # Actions for new_tab_ui.cc. for i in range(1, 10): actions.add('MostVisited%d' % i)
1e495067895655d6adbd90afffe1df026dace1d8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/1e495067895655d6adbd90afffe1df026dace1d8/extract_actions.py
startup_pipe.write(struct.pack('@H', listen_port))
startup_pipe.write(struct.pack('@H', server_data['port']))
def main(options, args): logfile = open('testserver.log', 'w') sys.stdout = FileMultiplexer(sys.stdout, logfile) sys.stderr = FileMultiplexer(sys.stderr, logfile) port = options.port if options.server_type == SERVER_HTTP: if options.cert: # let's make sure the cert file exists. if not os.path.isfile(options.cert): print 'specified server cert file not found: ' + options.cert + \ ' exiting...' return for ca_cert in options.ssl_client_ca: if not os.path.isfile(ca_cert): print 'specified trusted client CA file not found: ' + ca_cert + \ ' exiting...' return server = HTTPSServer(('127.0.0.1', port), TestPageHandler, options.cert, options.ssl_client_auth, options.ssl_client_ca, options.ssl_bulk_cipher) print 'HTTPS server started on port %d...' % server.server_port else: server = StoppableHTTPServer(('127.0.0.1', port), TestPageHandler) print 'HTTP server started on port %d...' % server.server_port server.data_dir = MakeDataDir() server.file_root_url = options.file_root_url listen_port = server.server_port server._device_management_handler = None elif options.server_type == SERVER_SYNC: server = SyncHTTPServer(('127.0.0.1', port), SyncPageHandler) print 'Sync HTTP server started on port %d...' % server.server_port listen_port = server.server_port # means FTP Server else: my_data_dir = MakeDataDir() # Instantiate a dummy authorizer for managing 'virtual' users authorizer = pyftpdlib.ftpserver.DummyAuthorizer() # Define a new user having full r/w permissions and a read-only # anonymous user authorizer.add_user('chrome', 'chrome', my_data_dir, perm='elradfmw') authorizer.add_anonymous(my_data_dir) # Instantiate FTP handler class ftp_handler = pyftpdlib.ftpserver.FTPHandler ftp_handler.authorizer = authorizer # Define a customized banner (string returned when client connects) ftp_handler.banner = ("pyftpdlib %s based ftpd ready." % pyftpdlib.ftpserver.__ver__) # Instantiate FTP server class and listen to 127.0.0.1:port address = ('127.0.0.1', port) server = pyftpdlib.ftpserver.FTPServer(address, ftp_handler) listen_port = server.socket.getsockname()[1] print 'FTP server started on port %d...' % listen_port # Notify the parent that we've started. (BaseServer subclasses # bind their sockets on construction.) if options.startup_pipe is not None: if sys.platform == 'win32': fd = msvcrt.open_osfhandle(options.startup_pipe, 0) else: fd = options.startup_pipe startup_pipe = os.fdopen(fd, "w") # Write the listening port as a 2 byte value. This is _not_ using # network byte ordering since the other end of the pipe is on the same # machine. startup_pipe.write(struct.pack('@H', listen_port)) startup_pipe.close() try: server.serve_forever() except KeyboardInterrupt: print 'shutting down server' server.stop = True
52adb01bcfe2c7df52cfcb7c1cb6051ea95340f7 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/52adb01bcfe2c7df52cfcb7c1cb6051ea95340f7/testserver.py
logging.debug('thread %s starting' % (self.getName()))
logging.debug('%s starting' % (self.getName()))
def run(self): """Delegate main work to a helper method and watch for uncaught exceptions.""" self._start_time = time.time() self._num_tests = 0 try: logging.debug('thread %s starting' % (self.getName())) self._Run(test_runner=None, result_summary=None) logging.debug('thread %s done (%d tests)' % (self.getName(), self.GetNumTests())) except: # Save the exception for our caller to see. self._exception_info = sys.exc_info() self._stop_time = time.time() # Re-raise it and die. logging.error('thread %s dying: %s' % (self.getName(), self._exception_info)) raise self._stop_time = time.time()
e84e0af1bb8bce9694f71c06db04a4ee04ce4fa7 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/e84e0af1bb8bce9694f71c06db04a4ee04ce4fa7/test_shell_thread.py
logging.debug('thread %s done (%d tests)' % (self.getName(),
logging.debug('%s done (%d tests)' % (self.getName(),
def run(self): """Delegate main work to a helper method and watch for uncaught exceptions.""" self._start_time = time.time() self._num_tests = 0 try: logging.debug('thread %s starting' % (self.getName())) self._Run(test_runner=None, result_summary=None) logging.debug('thread %s done (%d tests)' % (self.getName(), self.GetNumTests())) except: # Save the exception for our caller to see. self._exception_info = sys.exc_info() self._stop_time = time.time() # Re-raise it and die. logging.error('thread %s dying: %s' % (self.getName(), self._exception_info)) raise self._stop_time = time.time()
e84e0af1bb8bce9694f71c06db04a4ee04ce4fa7 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/e84e0af1bb8bce9694f71c06db04a4ee04ce4fa7/test_shell_thread.py
logging.error('thread %s dying: %s' % (self.getName(),
logging.error('%s dying: %s' % (self.getName(),
def run(self): """Delegate main work to a helper method and watch for uncaught exceptions.""" self._start_time = time.time() self._num_tests = 0 try: logging.debug('thread %s starting' % (self.getName())) self._Run(test_runner=None, result_summary=None) logging.debug('thread %s done (%d tests)' % (self.getName(), self.GetNumTests())) except: # Save the exception for our caller to see. self._exception_info = sys.exc_info() self._stop_time = time.time() # Re-raise it and die. logging.error('thread %s dying: %s' % (self.getName(), self._exception_info)) raise self._stop_time = time.time()
e84e0af1bb8bce9694f71c06db04a4ee04ce4fa7 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/e84e0af1bb8bce9694f71c06db04a4ee04ce4fa7/test_shell_thread.py
logging.debug("%s failed:\n%s" % (path_utils.RelativeTestFilename(filename), error_str))
logging.debug("%s %s failed:\n%s" % (self.getName(), path_utils.RelativeTestFilename(filename), error_str))
def _Run(self, test_runner, result_summary): """Main work entry point of the thread. Basically we pull urls from the filename queue and run the tests until we run out of urls.
e84e0af1bb8bce9694f71c06db04a4ee04ce4fa7 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/e84e0af1bb8bce9694f71c06db04a4ee04ce4fa7/test_shell_thread.py
logging.debug(path_utils.RelativeTestFilename(filename) + " passed")
logging.debug("%s %s passed" % (self.getName(), path_utils.RelativeTestFilename(filename)))
def _Run(self, test_runner, result_summary): """Main work entry point of the thread. Basically we pull urls from the filename queue and run the tests until we run out of urls.
e84e0af1bb8bce9694f71c06db04a4ee04ce4fa7 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/e84e0af1bb8bce9694f71c06db04a4ee04ce4fa7/test_shell_thread.py
while 1: new_basename = os.path.splitext(basename)[0] if basename == new_basename: break else: basename = new_basename return basename
return os.path.splitext(basename)[0]
def ExtractModuleName(infile_path): """Infers the module name from the input file path. The input filename is supposed to be in the form "ModuleName.sigs". This function splits the filename from the extention on that basename of the path and returns that as the module name. Args: infile_path: String holding the path to the input file. Returns: The module name as a string. """ basename = os.path.basename(infile_path) # This loop continously removes suffixes of the filename separated by a "." # character. while 1: new_basename = os.path.splitext(basename)[0] if basename == new_basename: break else: basename = new_basename return basename
25d64ba1613990508557b9e3aa4d6a6e7adbb0af /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/25d64ba1613990508557b9e3aa4d6a6e7adbb0af/generate_stubs.py
def RewritePath(path, sysroot): """Rewrites a path by prefixing it with the sysroot if it is absolute."""
def RewritePath(path, opts): """Rewrites a path by stripping the prefix and prepending the sysroot.""" sysroot = opts.sysroot prefix = opts.strip_prefix
def RewritePath(path, sysroot): """Rewrites a path by prefixing it with the sysroot if it is absolute.""" if os.path.isabs(path) and not path.startswith(sysroot): path = path.lstrip('/') return os.path.join(sysroot, path) else: return path
090e84f6c261bf1557fa3074a65c518eb7fc03c0 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/090e84f6c261bf1557fa3074a65c518eb7fc03c0/rewrite_dirs.py
def RewriteLine(line, sysroot):
def RewriteLine(line, opts):
def RewriteLine(line, sysroot): """Rewrites all the paths in recognized options.""" args = line.split() count = len(args) i = 0 while i < count: for prefix in REWRITE_PREFIX: # The option can be either in the form "-I /path/to/dir" or # "-I/path/to/dir" so handle both. if args[i] == prefix: i += 1 try: args[i] = RewritePath(args[i], sysroot) except IndexError: sys.stderr.write('Missing argument following %s\n' % prefix) break elif args[i].startswith(prefix): args[i] = prefix + RewritePath(args[i][len(prefix):], sysroot) i += 1 return ' '.join(args)
090e84f6c261bf1557fa3074a65c518eb7fc03c0 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/090e84f6c261bf1557fa3074a65c518eb7fc03c0/rewrite_dirs.py
args[i] = RewritePath(args[i], sysroot)
args[i] = RewritePath(args[i], opts)
def RewriteLine(line, sysroot): """Rewrites all the paths in recognized options.""" args = line.split() count = len(args) i = 0 while i < count: for prefix in REWRITE_PREFIX: # The option can be either in the form "-I /path/to/dir" or # "-I/path/to/dir" so handle both. if args[i] == prefix: i += 1 try: args[i] = RewritePath(args[i], sysroot) except IndexError: sys.stderr.write('Missing argument following %s\n' % prefix) break elif args[i].startswith(prefix): args[i] = prefix + RewritePath(args[i][len(prefix):], sysroot) i += 1 return ' '.join(args)
090e84f6c261bf1557fa3074a65c518eb7fc03c0 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/090e84f6c261bf1557fa3074a65c518eb7fc03c0/rewrite_dirs.py
args[i] = prefix + RewritePath(args[i][len(prefix):], sysroot)
args[i] = prefix + RewritePath(args[i][len(prefix):], opts)
def RewriteLine(line, sysroot): """Rewrites all the paths in recognized options.""" args = line.split() count = len(args) i = 0 while i < count: for prefix in REWRITE_PREFIX: # The option can be either in the form "-I /path/to/dir" or # "-I/path/to/dir" so handle both. if args[i] == prefix: i += 1 try: args[i] = RewritePath(args[i], sysroot) except IndexError: sys.stderr.write('Missing argument following %s\n' % prefix) break elif args[i].startswith(prefix): args[i] = prefix + RewritePath(args[i][len(prefix):], sysroot) i += 1 return ' '.join(args)
090e84f6c261bf1557fa3074a65c518eb7fc03c0 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/090e84f6c261bf1557fa3074a65c518eb7fc03c0/rewrite_dirs.py
try: sysroot = argv[1] except IndexError: sys.stderr.write('usage: %s /path/to/sysroot\n' % argv[0]) return 1
parser = optparse.OptionParser() parser.add_option('-s', '--sysroot', default='/', help='sysroot to prepend') parser.add_option('-p', '--strip-prefix', default='', help='prefix to strip') opts, args = parser.parse_args(argv[1:])
def main(argv): try: sysroot = argv[1] except IndexError: sys.stderr.write('usage: %s /path/to/sysroot\n' % argv[0]) return 1 for line in sys.stdin.readlines(): line = RewriteLine(line.strip(), sysroot) print line return 0
090e84f6c261bf1557fa3074a65c518eb7fc03c0 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/090e84f6c261bf1557fa3074a65c518eb7fc03c0/rewrite_dirs.py
line = RewriteLine(line.strip(), sysroot)
line = RewriteLine(line.strip(), opts)
def main(argv): try: sysroot = argv[1] except IndexError: sys.stderr.write('usage: %s /path/to/sysroot\n' % argv[0]) return 1 for line in sys.stdin.readlines(): line = RewriteLine(line.strip(), sysroot) print line return 0
090e84f6c261bf1557fa3074a65c518eb7fc03c0 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/090e84f6c261bf1557fa3074a65c518eb7fc03c0/rewrite_dirs.py
input_api, output_api, sources))
input_api, output_api, source_file_filter=sources))
def _CommonChecks(input_api, output_api): results = [] # What does this code do? # It loads the default black list (e.g. third_party, experimental, etc) and # add our black list (breakpad, skia and v8 are still not following # google style and are not really living this repository). # See presubmit_support.py InputApi.FilterSourceFile for the (simple) usage. black_list = input_api.DEFAULT_BLACK_LIST + _EXCLUDED_PATHS white_list = input_api.DEFAULT_WHITE_LIST + _TEXT_FILES sources = lambda x: input_api.FilterSourceFile(x, black_list=black_list) text_files = lambda x: input_api.FilterSourceFile(x, black_list=black_list, white_list=white_list) results.extend(input_api.canned_checks.CheckLongLines( input_api, output_api, sources)) results.extend(input_api.canned_checks.CheckChangeHasNoTabs( input_api, output_api, sources)) results.extend(input_api.canned_checks.CheckChangeHasNoStrayWhitespace( input_api, output_api, sources)) results.extend(input_api.canned_checks.CheckChangeHasBugField( input_api, output_api)) results.extend(input_api.canned_checks.CheckChangeHasTestField( input_api, output_api)) results.extend(input_api.canned_checks.CheckChangeSvnEolStyle( input_api, output_api, text_files)) results.extend(input_api.canned_checks.CheckSvnForCommonMimeTypes( input_api, output_api)) results.extend(input_api.canned_checks.CheckLicense( input_api, output_api, _LICENSE_HEADER, sources)) return results
6c76dfe417e06961aaf92bb1ec42ffbe50c8d198 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/6c76dfe417e06961aaf92bb1ec42ffbe50c8d198/PRESUBMIT.py
input_api, output_api, sources))
input_api, output_api, source_file_filter=sources))
def _CommonChecks(input_api, output_api): results = [] # What does this code do? # It loads the default black list (e.g. third_party, experimental, etc) and # add our black list (breakpad, skia and v8 are still not following # google style and are not really living this repository). # See presubmit_support.py InputApi.FilterSourceFile for the (simple) usage. black_list = input_api.DEFAULT_BLACK_LIST + _EXCLUDED_PATHS white_list = input_api.DEFAULT_WHITE_LIST + _TEXT_FILES sources = lambda x: input_api.FilterSourceFile(x, black_list=black_list) text_files = lambda x: input_api.FilterSourceFile(x, black_list=black_list, white_list=white_list) results.extend(input_api.canned_checks.CheckLongLines( input_api, output_api, sources)) results.extend(input_api.canned_checks.CheckChangeHasNoTabs( input_api, output_api, sources)) results.extend(input_api.canned_checks.CheckChangeHasNoStrayWhitespace( input_api, output_api, sources)) results.extend(input_api.canned_checks.CheckChangeHasBugField( input_api, output_api)) results.extend(input_api.canned_checks.CheckChangeHasTestField( input_api, output_api)) results.extend(input_api.canned_checks.CheckChangeSvnEolStyle( input_api, output_api, text_files)) results.extend(input_api.canned_checks.CheckSvnForCommonMimeTypes( input_api, output_api)) results.extend(input_api.canned_checks.CheckLicense( input_api, output_api, _LICENSE_HEADER, sources)) return results
6c76dfe417e06961aaf92bb1ec42ffbe50c8d198 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/6c76dfe417e06961aaf92bb1ec42ffbe50c8d198/PRESUBMIT.py
input_api, output_api, sources))
input_api, output_api, source_file_filter=sources))
def _CommonChecks(input_api, output_api): results = [] # What does this code do? # It loads the default black list (e.g. third_party, experimental, etc) and # add our black list (breakpad, skia and v8 are still not following # google style and are not really living this repository). # See presubmit_support.py InputApi.FilterSourceFile for the (simple) usage. black_list = input_api.DEFAULT_BLACK_LIST + _EXCLUDED_PATHS white_list = input_api.DEFAULT_WHITE_LIST + _TEXT_FILES sources = lambda x: input_api.FilterSourceFile(x, black_list=black_list) text_files = lambda x: input_api.FilterSourceFile(x, black_list=black_list, white_list=white_list) results.extend(input_api.canned_checks.CheckLongLines( input_api, output_api, sources)) results.extend(input_api.canned_checks.CheckChangeHasNoTabs( input_api, output_api, sources)) results.extend(input_api.canned_checks.CheckChangeHasNoStrayWhitespace( input_api, output_api, sources)) results.extend(input_api.canned_checks.CheckChangeHasBugField( input_api, output_api)) results.extend(input_api.canned_checks.CheckChangeHasTestField( input_api, output_api)) results.extend(input_api.canned_checks.CheckChangeSvnEolStyle( input_api, output_api, text_files)) results.extend(input_api.canned_checks.CheckSvnForCommonMimeTypes( input_api, output_api)) results.extend(input_api.canned_checks.CheckLicense( input_api, output_api, _LICENSE_HEADER, sources)) return results
6c76dfe417e06961aaf92bb1ec42ffbe50c8d198 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/6c76dfe417e06961aaf92bb1ec42ffbe50c8d198/PRESUBMIT.py
input_api, output_api, text_files))
input_api, output_api, source_file_filter=text_files))
def _CommonChecks(input_api, output_api): results = [] # What does this code do? # It loads the default black list (e.g. third_party, experimental, etc) and # add our black list (breakpad, skia and v8 are still not following # google style and are not really living this repository). # See presubmit_support.py InputApi.FilterSourceFile for the (simple) usage. black_list = input_api.DEFAULT_BLACK_LIST + _EXCLUDED_PATHS white_list = input_api.DEFAULT_WHITE_LIST + _TEXT_FILES sources = lambda x: input_api.FilterSourceFile(x, black_list=black_list) text_files = lambda x: input_api.FilterSourceFile(x, black_list=black_list, white_list=white_list) results.extend(input_api.canned_checks.CheckLongLines( input_api, output_api, sources)) results.extend(input_api.canned_checks.CheckChangeHasNoTabs( input_api, output_api, sources)) results.extend(input_api.canned_checks.CheckChangeHasNoStrayWhitespace( input_api, output_api, sources)) results.extend(input_api.canned_checks.CheckChangeHasBugField( input_api, output_api)) results.extend(input_api.canned_checks.CheckChangeHasTestField( input_api, output_api)) results.extend(input_api.canned_checks.CheckChangeSvnEolStyle( input_api, output_api, text_files)) results.extend(input_api.canned_checks.CheckSvnForCommonMimeTypes( input_api, output_api)) results.extend(input_api.canned_checks.CheckLicense( input_api, output_api, _LICENSE_HEADER, sources)) return results
6c76dfe417e06961aaf92bb1ec42ffbe50c8d198 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/6c76dfe417e06961aaf92bb1ec42ffbe50c8d198/PRESUBMIT.py
input_api, output_api, _LICENSE_HEADER, sources))
input_api, output_api, _LICENSE_HEADER, source_file_filter=sources))
def _CommonChecks(input_api, output_api): results = [] # What does this code do? # It loads the default black list (e.g. third_party, experimental, etc) and # add our black list (breakpad, skia and v8 are still not following # google style and are not really living this repository). # See presubmit_support.py InputApi.FilterSourceFile for the (simple) usage. black_list = input_api.DEFAULT_BLACK_LIST + _EXCLUDED_PATHS white_list = input_api.DEFAULT_WHITE_LIST + _TEXT_FILES sources = lambda x: input_api.FilterSourceFile(x, black_list=black_list) text_files = lambda x: input_api.FilterSourceFile(x, black_list=black_list, white_list=white_list) results.extend(input_api.canned_checks.CheckLongLines( input_api, output_api, sources)) results.extend(input_api.canned_checks.CheckChangeHasNoTabs( input_api, output_api, sources)) results.extend(input_api.canned_checks.CheckChangeHasNoStrayWhitespace( input_api, output_api, sources)) results.extend(input_api.canned_checks.CheckChangeHasBugField( input_api, output_api)) results.extend(input_api.canned_checks.CheckChangeHasTestField( input_api, output_api)) results.extend(input_api.canned_checks.CheckChangeSvnEolStyle( input_api, output_api, text_files)) results.extend(input_api.canned_checks.CheckSvnForCommonMimeTypes( input_api, output_api)) results.extend(input_api.canned_checks.CheckLicense( input_api, output_api, _LICENSE_HEADER, sources)) return results
6c76dfe417e06961aaf92bb1ec42ffbe50c8d198 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/6c76dfe417e06961aaf92bb1ec42ffbe50c8d198/PRESUBMIT.py
f.write(command)
f.write(command.replace("%p", "$$.%p"))
def CreateBrowserWrapper(self, command, logfiles): """The program being run invokes Python or something else that can't stand to be valgrinded, and also invokes the Chrome browser. Set an environment variable to tell the program to prefix the Chrome commandline with a magic wrapper. Build the magic wrapper here. """ # We'll be storing the analyzed results of individual tests # in a temporary text file self._indirect_analyze_results_file = tempfile.mkstemp( dir=self.TMP_DIR, prefix="valgrind_analyze.", text=True)[1]
32f13524408684115e887b83166090e18418b679 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/32f13524408684115e887b83166090e18418b679/valgrind_test.py
logs_list = logfiles.replace("%p", "*")
logs_list = logfiles.replace("%p", "$$.*")
def CreateBrowserWrapper(self, command, logfiles): """The program being run invokes Python or something else that can't stand to be valgrinded, and also invokes the Chrome browser. Set an environment variable to tell the program to prefix the Chrome commandline with a magic wrapper. Build the magic wrapper here. """ # We'll be storing the analyzed results of individual tests # in a temporary text file self._indirect_analyze_results_file = tempfile.mkstemp( dir=self.TMP_DIR, prefix="valgrind_analyze.", text=True)[1]
32f13524408684115e887b83166090e18418b679 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/32f13524408684115e887b83166090e18418b679/valgrind_test.py
def __init__(self, methodName='runTest'): pyautolib.PyUITestSuite.__init__(self, sys.argv)
def __init__(self, methodName='runTest', extra_chrome_flags=None): """Initialize PyUITest. When redefining __init__ in a derived class, make sure that: o you make a call this __init__ o __init__ takes methodName as a arg. this is mandated by unittest module Args: methodName: the default method name. Internal use by unittest module extra_chrome_flags: additional flags to pass when launching chrome """ args = sys.argv if extra_chrome_flags is not None: args.append('--extra-chrome-flags=%s' % extra_chrome_flags) pyautolib.PyUITestSuite.__init__(self, args)
def __init__(self, methodName='runTest'): pyautolib.PyUITestSuite.__init__(self, sys.argv) # Figure out path to chromium binaries browser_dir = os.path.normpath(os.path.dirname(pyautolib.__file__)) os.environ['PATH'] = browser_dir + os.pathsep + os.environ['PATH'] self.Initialize(pyautolib.FilePath(browser_dir)) unittest.TestCase.__init__(self, methodName)
e792c9442b58da0f0abe42dae4a7abc75ea17127 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/e792c9442b58da0f0abe42dae4a7abc75ea17127/pyauto.py
if not os.path.exists('/usr/share/ca-certificates/root_ca_cert.crt'): logging.warning('WARNING: SSL certificate missing! SSL tests will fail.') logging.warning('You need to run:') logging.warning('sudo cp src/net/data/ssl/certificates/root_ca_cert.crt ' '/usr/share/ca-certificates/') logging.warning('sudo vi /etc/ca-certificates.conf') logging.warning(' (and add the line root_ca_cert.crt)') logging.warning('sudo update-ca-certificates')
def PrepareForTestWine(self): """Set up the Wine environment.
99331e5fad115265ffe27791e12a8daf356a3d4a /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/99331e5fad115265ffe27791e12a8daf356a3d4a/valgrind_test.py
code_file = open(path)
code_file = open(path, "r")
def _parse_api_calls(self, api_methods): """ Returns a list of Chrome extension API calls the sample makes.
935a2bac042bac9239b2df8286c90a4b3f3d55ff /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/935a2bac042bac9239b2df8286c90a4b3f3d55ff/directory.py
code_contents = code_file.read()
code_contents = unicode(code_file.read(), errors="replace")
def _parse_api_calls(self, api_methods): """ Returns a list of Chrome extension API calls the sample makes.
935a2bac042bac9239b2df8286c90a4b3f3d55ff /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/935a2bac042bac9239b2df8286c90a4b3f3d55ff/directory.py
print "ERROR: third_party/WebKit appears to not be under git control."
if os.path.exists('third_party/WebKit'): print "ERROR: third_party/WebKit appears to not be under git control." else: print "ERROR: third_party/WebKit could not be found." print "Did you run this script from the right directory?"
def main(): if not os.path.exists('third_party/WebKit/.git'): print "ERROR: third_party/WebKit appears to not be under git control." print "See http://code.google.com/p/chromium/wiki/UsingWebKitGit for" print "setup instructions." return webkit_rev = GetWebKitRev() print 'Desired revision: r%s.' % webkit_rev os.chdir('third_party/WebKit') changed = UpdateGClientBranch(webkit_rev) if changed: UpdateCurrentCheckoutIfAppropriate() else: print "Already on correct revision."
3ca97b949d1b71a4ea4046f03ced74a9b3081caf /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/3ca97b949d1b71a4ea4046f03ced74a9b3081caf/sync-webkit-git.py
pdf_files_path = os.path.join(self.DataDir(), 'plugin', 'pdf')
pdf_files_path = os.path.join(self.DataDir(), 'pyauto_private', 'pdf')
def testPDFRunner(self): """Navigate to pdf files and verify that browser doesn't crash""" # bail out if not a branded build properties = self.GetBrowserInfo()['properties'] if properties['branding'] != 'Google Chrome': return pdf_files_path = os.path.join(self.DataDir(), 'plugin', 'pdf') pdf_files = glob.glob(os.path.join(pdf_files_path, '*.pdf')) for pdf_file in pdf_files: # Some pdfs cause known crashes. Exclude them. crbug.com/63549 if os.path.basename(pdf_file) in ('nullip.pdf', 'sample.pdf'): continue url = self.GetFileURLForPath(os.path.join(pdf_files_path, pdf_file)) self.AppendTab(pyauto.GURL(url)) for tab_index in range(1, len(pdf_files) + 1): self.ActivateTab(tab_index) self._PerformPDFAction('fitToHeight', tab_index=tab_index) self._PerformPDFAction('fitToWidth', tab_index=tab_index) # Assert that there is at least 1 browser window. self.assertTrue(self.GetBrowserWindowCount(), 'Browser crashed, no window is open')
f7d05ad61e86c784bc050ee9cfef9e2e4d5aed77 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/f7d05ad61e86c784bc050ee9cfef9e2e4d5aed77/pdf.py
self._source_dir = layout_package.path_utils.GetAbsolutePath(
self._source_dir = layout_package.path_utils.get_absolute_path(
def __init__(self, options, args, test): # The known list of tests. # Recognise the original abbreviations as well as full executable names. self._test_list = { "base": self.TestBase, "base_unittests": self.TestBase, "browser": self.TestBrowser, "browser_tests": self.TestBrowser, "googleurl": self.TestGURL, "googleurl_unittests": self.TestGURL, "ipc": self.TestIpc, "ipc_tests": self.TestIpc, "layout": self.TestLayout, "layout_tests": self.TestLayout, "media": self.TestMedia, "media_unittests": self.TestMedia, "net": self.TestNet, "net_unittests": self.TestNet, "printing": self.TestPrinting, "printing_unittests": self.TestPrinting, "startup": self.TestStartup, "startup_tests": self.TestStartup, "test_shell": self.TestTestShell, "test_shell_tests": self.TestTestShell, "ui": self.TestUI, "ui_tests": self.TestUI, "unit": self.TestUnit, "unit_tests": self.TestUnit, "app": self.TestApp, "app_unittests": self.TestApp, }
4f5402498c3474a3bcee8cf7c0d48fbeb6cbb8b4 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/4f5402498c3474a3bcee8cf7c0d48fbeb6cbb8b4/chrome_tests.py
def SetupHtmlDirectory(html_directory, clean_html_directory):
def SetupHtmlDirectory(html_directory):
def SetupHtmlDirectory(html_directory, clean_html_directory): """Setup the directory to store html results. All html related files are stored in the "rebaseline_html" subdirectory. Args: html_directory: parent directory that stores the rebaselining results. If None, a temp directory is created. clean_html_directory: if True, all existing files in the html directory are removed before rebaselining. Returns: the directory that stores the html related rebaselining results. """ if not html_directory: html_directory = tempfile.mkdtemp() elif not os.path.exists(html_directory): os.mkdir(html_directory) html_directory = os.path.join(html_directory, 'rebaseline_html') logging.info('Html directory: "%s"', html_directory) if clean_html_directory and os.path.exists(html_directory): shutil.rmtree(html_directory, True) logging.info('Deleted file at html directory: "%s"', html_directory) if not os.path.exists(html_directory): os.mkdir(html_directory) return html_directory
a8cca2e7f89f106d5b2cdf9f818191a14c4afb8b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/a8cca2e7f89f106d5b2cdf9f818191a14c4afb8b/rebaseline.py
clean_html_directory: if True, all existing files in the html directory are removed before rebaselining.
def SetupHtmlDirectory(html_directory, clean_html_directory): """Setup the directory to store html results. All html related files are stored in the "rebaseline_html" subdirectory. Args: html_directory: parent directory that stores the rebaselining results. If None, a temp directory is created. clean_html_directory: if True, all existing files in the html directory are removed before rebaselining. Returns: the directory that stores the html related rebaselining results. """ if not html_directory: html_directory = tempfile.mkdtemp() elif not os.path.exists(html_directory): os.mkdir(html_directory) html_directory = os.path.join(html_directory, 'rebaseline_html') logging.info('Html directory: "%s"', html_directory) if clean_html_directory and os.path.exists(html_directory): shutil.rmtree(html_directory, True) logging.info('Deleted file at html directory: "%s"', html_directory) if not os.path.exists(html_directory): os.mkdir(html_directory) return html_directory
a8cca2e7f89f106d5b2cdf9f818191a14c4afb8b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/a8cca2e7f89f106d5b2cdf9f818191a14c4afb8b/rebaseline.py
if clean_html_directory and os.path.exists(html_directory):
if os.path.exists(html_directory):
def SetupHtmlDirectory(html_directory, clean_html_directory): """Setup the directory to store html results. All html related files are stored in the "rebaseline_html" subdirectory. Args: html_directory: parent directory that stores the rebaselining results. If None, a temp directory is created. clean_html_directory: if True, all existing files in the html directory are removed before rebaselining. Returns: the directory that stores the html related rebaselining results. """ if not html_directory: html_directory = tempfile.mkdtemp() elif not os.path.exists(html_directory): os.mkdir(html_directory) html_directory = os.path.join(html_directory, 'rebaseline_html') logging.info('Html directory: "%s"', html_directory) if clean_html_directory and os.path.exists(html_directory): shutil.rmtree(html_directory, True) logging.info('Deleted file at html directory: "%s"', html_directory) if not os.path.exists(html_directory): os.mkdir(html_directory) return html_directory
a8cca2e7f89f106d5b2cdf9f818191a14c4afb8b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/a8cca2e7f89f106d5b2cdf9f818191a14c4afb8b/rebaseline.py
archive_url = ('%s%s/%s.zip' % (url_base, latest_revision, self._options.archive_name))
archive_url = ('%s%s/layout-test-results.zip' % (url_base, latest_revision))
def _GetArchiveUrl(self): """Generate the url to download latest layout test archive.
a8cca2e7f89f106d5b2cdf9f818191a14c4afb8b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/a8cca2e7f89f106d5b2cdf9f818191a14c4afb8b/rebaseline.py
archive_test_name = '%s/%s-actual%s' % (self._options.archive_name, test_basename, suffix)
archive_test_name = 'layout-test-results/%s-actual%s' % (test_basename, suffix)
def _ExtractAndAddNewBaselines(self, archive_file): """Extract new baselines from archive and add them to SVN repository.
a8cca2e7f89f106d5b2cdf9f818191a14c4afb8b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/a8cca2e7f89f106d5b2cdf9f818191a14c4afb8b/rebaseline.py
if self._options.no_html_results: return
def _CreateHtmlBaselineFiles(self, baseline_fullpath): """Create baseline files (old, new and diff) in html directory.
a8cca2e7f89f106d5b2cdf9f818191a14c4afb8b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/a8cca2e7f89f106d5b2cdf9f818191a14c4afb8b/rebaseline.py
self._browser_path = options.browser_path
def __init__(self, options, platforms, rebaselining_tests): self._html_directory = options.html_directory self._browser_path = options.browser_path self._platforms = platforms self._rebaselining_tests = rebaselining_tests self._html_file = os.path.join(options.html_directory, 'rebaseline.html')
a8cca2e7f89f106d5b2cdf9f818191a14c4afb8b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/a8cca2e7f89f106d5b2cdf9f818191a14c4afb8b/rebaseline.py
if self._browser_path: RunShell([self._browser_path, html_uri], False) else: webbrowser.open(html_uri, 1)
webbrowser.open(html_uri, 1)
def ShowHtml(self): """Launch the rebaselining html in brwoser."""
a8cca2e7f89f106d5b2cdf9f818191a14c4afb8b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/a8cca2e7f89f106d5b2cdf9f818191a14c4afb8b/rebaseline.py
option_parser.add_option('-t', '--archive_name', default='layout-test-results', help='Layout test result archive name.')
def main(): """Main function to produce new baselines.""" option_parser = optparse.OptionParser() option_parser.add_option('-v', '--verbose', action='store_true', default=False, help='include debug-level logging.') option_parser.add_option('-p', '--platforms', default='mac,win,win-xp,win-vista,linux', help=('Comma delimited list of platforms that need ' 'rebaselining.')) option_parser.add_option('-u', '--archive_url', default=('http://build.chromium.org/buildbot/' 'layout_test_results'), help=('Url to find the layout test result archive ' 'file.')) option_parser.add_option('-t', '--archive_name', default='layout-test-results', help='Layout test result archive name.') option_parser.add_option('-w', '--webkit_canary', action='store_true', default=False, help=('If True, pull baselines from webkit.org ' 'canary bot.')) option_parser.add_option('-b', '--backup', action='store_true', default=False, help=('Whether or not to backup the original test ' 'expectations file after rebaseline.')) option_parser.add_option('-o', '--no_html_results', action='store_true', default=False, help=('If specified, do not generate html that ' 'compares the rebaselining results.')) option_parser.add_option('-d', '--html_directory', default='', help=('The directory that stores the results for ' 'rebaselining comparison.')) option_parser.add_option('-c', '--clean_html_directory', action='store_true', default=False, help=('If specified, delete all existing files in ' 'the html directory before rebaselining.')) option_parser.add_option('-e', '--browser_path', default='', help=('The browser path that you would like to ' 'use to launch the rebaselining result ' 'comparison html')) options = option_parser.parse_args()[0] # Set up our logging format. log_level = logging.INFO if options.verbose: log_level = logging.DEBUG logging.basicConfig(level=log_level, format=('%(asctime)s %(filename)s:%(lineno)-3d ' '%(levelname)s %(message)s'), datefmt='%y%m%d %H:%M:%S') # Verify 'platforms' option is valid if not options.platforms: logging.error('Invalid "platforms" option. --platforms must be specified ' 'in order to rebaseline.') sys.exit(1) platforms = [p.strip().lower() for p in options.platforms.split(',')] for platform in platforms: if not platform in REBASELINE_PLATFORM_ORDER: logging.error('Invalid platform: "%s"' % (platform)) sys.exit(1) # Adjust the platform order so rebaseline tool is running at the order of # 'mac', 'win' and 'linux'. This is in same order with layout test baseline # search paths. It simplifies how the rebaseline tool detects duplicate # baselines. Check _IsDupBaseline method for details. rebaseline_platforms = [] for platform in REBASELINE_PLATFORM_ORDER: if platform in platforms: rebaseline_platforms.append(platform) if not options.no_html_results: options.html_directory = SetupHtmlDirectory(options.html_directory, options.clean_html_directory) rebaselining_tests = set() backup = options.backup for platform in rebaseline_platforms: rebaseliner = Rebaseliner(platform, options) logging.info('') LogDashedString('Rebaseline started', platform) if rebaseliner.Run(backup): # Only need to backup one original copy of test expectation file. backup = False LogDashedString('Rebaseline done', platform) else: LogDashedString('Rebaseline failed', platform, logging.ERROR) rebaselining_tests |= set(rebaseliner.GetRebaseliningTests()) if not options.no_html_results: logging.info('') LogDashedString('Rebaselining result comparison started', None) html_generator = HtmlGenerator(options, rebaseline_platforms, rebaselining_tests) html_generator.GenerateHtml() html_generator.ShowHtml() LogDashedString('Rebaselining result comparison done', None) sys.exit(0)
a8cca2e7f89f106d5b2cdf9f818191a14c4afb8b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/a8cca2e7f89f106d5b2cdf9f818191a14c4afb8b/rebaseline.py
option_parser.add_option('-o', '--no_html_results', action='store_true', default=False, help=('If specified, do not generate html that ' 'compares the rebaselining results.'))
def main(): """Main function to produce new baselines.""" option_parser = optparse.OptionParser() option_parser.add_option('-v', '--verbose', action='store_true', default=False, help='include debug-level logging.') option_parser.add_option('-p', '--platforms', default='mac,win,win-xp,win-vista,linux', help=('Comma delimited list of platforms that need ' 'rebaselining.')) option_parser.add_option('-u', '--archive_url', default=('http://build.chromium.org/buildbot/' 'layout_test_results'), help=('Url to find the layout test result archive ' 'file.')) option_parser.add_option('-t', '--archive_name', default='layout-test-results', help='Layout test result archive name.') option_parser.add_option('-w', '--webkit_canary', action='store_true', default=False, help=('If True, pull baselines from webkit.org ' 'canary bot.')) option_parser.add_option('-b', '--backup', action='store_true', default=False, help=('Whether or not to backup the original test ' 'expectations file after rebaseline.')) option_parser.add_option('-o', '--no_html_results', action='store_true', default=False, help=('If specified, do not generate html that ' 'compares the rebaselining results.')) option_parser.add_option('-d', '--html_directory', default='', help=('The directory that stores the results for ' 'rebaselining comparison.')) option_parser.add_option('-c', '--clean_html_directory', action='store_true', default=False, help=('If specified, delete all existing files in ' 'the html directory before rebaselining.')) option_parser.add_option('-e', '--browser_path', default='', help=('The browser path that you would like to ' 'use to launch the rebaselining result ' 'comparison html')) options = option_parser.parse_args()[0] # Set up our logging format. log_level = logging.INFO if options.verbose: log_level = logging.DEBUG logging.basicConfig(level=log_level, format=('%(asctime)s %(filename)s:%(lineno)-3d ' '%(levelname)s %(message)s'), datefmt='%y%m%d %H:%M:%S') # Verify 'platforms' option is valid if not options.platforms: logging.error('Invalid "platforms" option. --platforms must be specified ' 'in order to rebaseline.') sys.exit(1) platforms = [p.strip().lower() for p in options.platforms.split(',')] for platform in platforms: if not platform in REBASELINE_PLATFORM_ORDER: logging.error('Invalid platform: "%s"' % (platform)) sys.exit(1) # Adjust the platform order so rebaseline tool is running at the order of # 'mac', 'win' and 'linux'. This is in same order with layout test baseline # search paths. It simplifies how the rebaseline tool detects duplicate # baselines. Check _IsDupBaseline method for details. rebaseline_platforms = [] for platform in REBASELINE_PLATFORM_ORDER: if platform in platforms: rebaseline_platforms.append(platform) if not options.no_html_results: options.html_directory = SetupHtmlDirectory(options.html_directory, options.clean_html_directory) rebaselining_tests = set() backup = options.backup for platform in rebaseline_platforms: rebaseliner = Rebaseliner(platform, options) logging.info('') LogDashedString('Rebaseline started', platform) if rebaseliner.Run(backup): # Only need to backup one original copy of test expectation file. backup = False LogDashedString('Rebaseline done', platform) else: LogDashedString('Rebaseline failed', platform, logging.ERROR) rebaselining_tests |= set(rebaseliner.GetRebaseliningTests()) if not options.no_html_results: logging.info('') LogDashedString('Rebaselining result comparison started', None) html_generator = HtmlGenerator(options, rebaseline_platforms, rebaselining_tests) html_generator.GenerateHtml() html_generator.ShowHtml() LogDashedString('Rebaselining result comparison done', None) sys.exit(0)
a8cca2e7f89f106d5b2cdf9f818191a14c4afb8b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/a8cca2e7f89f106d5b2cdf9f818191a14c4afb8b/rebaseline.py
option_parser.add_option('-c', '--clean_html_directory', action='store_true', default=False, help=('If specified, delete all existing files in ' 'the html directory before rebaselining.')) option_parser.add_option('-e', '--browser_path', default='', help=('The browser path that you would like to ' 'use to launch the rebaselining result ' 'comparison html'))
def main(): """Main function to produce new baselines.""" option_parser = optparse.OptionParser() option_parser.add_option('-v', '--verbose', action='store_true', default=False, help='include debug-level logging.') option_parser.add_option('-p', '--platforms', default='mac,win,win-xp,win-vista,linux', help=('Comma delimited list of platforms that need ' 'rebaselining.')) option_parser.add_option('-u', '--archive_url', default=('http://build.chromium.org/buildbot/' 'layout_test_results'), help=('Url to find the layout test result archive ' 'file.')) option_parser.add_option('-t', '--archive_name', default='layout-test-results', help='Layout test result archive name.') option_parser.add_option('-w', '--webkit_canary', action='store_true', default=False, help=('If True, pull baselines from webkit.org ' 'canary bot.')) option_parser.add_option('-b', '--backup', action='store_true', default=False, help=('Whether or not to backup the original test ' 'expectations file after rebaseline.')) option_parser.add_option('-o', '--no_html_results', action='store_true', default=False, help=('If specified, do not generate html that ' 'compares the rebaselining results.')) option_parser.add_option('-d', '--html_directory', default='', help=('The directory that stores the results for ' 'rebaselining comparison.')) option_parser.add_option('-c', '--clean_html_directory', action='store_true', default=False, help=('If specified, delete all existing files in ' 'the html directory before rebaselining.')) option_parser.add_option('-e', '--browser_path', default='', help=('The browser path that you would like to ' 'use to launch the rebaselining result ' 'comparison html')) options = option_parser.parse_args()[0] # Set up our logging format. log_level = logging.INFO if options.verbose: log_level = logging.DEBUG logging.basicConfig(level=log_level, format=('%(asctime)s %(filename)s:%(lineno)-3d ' '%(levelname)s %(message)s'), datefmt='%y%m%d %H:%M:%S') # Verify 'platforms' option is valid if not options.platforms: logging.error('Invalid "platforms" option. --platforms must be specified ' 'in order to rebaseline.') sys.exit(1) platforms = [p.strip().lower() for p in options.platforms.split(',')] for platform in platforms: if not platform in REBASELINE_PLATFORM_ORDER: logging.error('Invalid platform: "%s"' % (platform)) sys.exit(1) # Adjust the platform order so rebaseline tool is running at the order of # 'mac', 'win' and 'linux'. This is in same order with layout test baseline # search paths. It simplifies how the rebaseline tool detects duplicate # baselines. Check _IsDupBaseline method for details. rebaseline_platforms = [] for platform in REBASELINE_PLATFORM_ORDER: if platform in platforms: rebaseline_platforms.append(platform) if not options.no_html_results: options.html_directory = SetupHtmlDirectory(options.html_directory, options.clean_html_directory) rebaselining_tests = set() backup = options.backup for platform in rebaseline_platforms: rebaseliner = Rebaseliner(platform, options) logging.info('') LogDashedString('Rebaseline started', platform) if rebaseliner.Run(backup): # Only need to backup one original copy of test expectation file. backup = False LogDashedString('Rebaseline done', platform) else: LogDashedString('Rebaseline failed', platform, logging.ERROR) rebaselining_tests |= set(rebaseliner.GetRebaseliningTests()) if not options.no_html_results: logging.info('') LogDashedString('Rebaselining result comparison started', None) html_generator = HtmlGenerator(options, rebaseline_platforms, rebaselining_tests) html_generator.GenerateHtml() html_generator.ShowHtml() LogDashedString('Rebaselining result comparison done', None) sys.exit(0)
a8cca2e7f89f106d5b2cdf9f818191a14c4afb8b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/a8cca2e7f89f106d5b2cdf9f818191a14c4afb8b/rebaseline.py
if not options.no_html_results: options.html_directory = SetupHtmlDirectory(options.html_directory, options.clean_html_directory)
options.html_directory = SetupHtmlDirectory(options.html_directory)
def main(): """Main function to produce new baselines.""" option_parser = optparse.OptionParser() option_parser.add_option('-v', '--verbose', action='store_true', default=False, help='include debug-level logging.') option_parser.add_option('-p', '--platforms', default='mac,win,win-xp,win-vista,linux', help=('Comma delimited list of platforms that need ' 'rebaselining.')) option_parser.add_option('-u', '--archive_url', default=('http://build.chromium.org/buildbot/' 'layout_test_results'), help=('Url to find the layout test result archive ' 'file.')) option_parser.add_option('-t', '--archive_name', default='layout-test-results', help='Layout test result archive name.') option_parser.add_option('-w', '--webkit_canary', action='store_true', default=False, help=('If True, pull baselines from webkit.org ' 'canary bot.')) option_parser.add_option('-b', '--backup', action='store_true', default=False, help=('Whether or not to backup the original test ' 'expectations file after rebaseline.')) option_parser.add_option('-o', '--no_html_results', action='store_true', default=False, help=('If specified, do not generate html that ' 'compares the rebaselining results.')) option_parser.add_option('-d', '--html_directory', default='', help=('The directory that stores the results for ' 'rebaselining comparison.')) option_parser.add_option('-c', '--clean_html_directory', action='store_true', default=False, help=('If specified, delete all existing files in ' 'the html directory before rebaselining.')) option_parser.add_option('-e', '--browser_path', default='', help=('The browser path that you would like to ' 'use to launch the rebaselining result ' 'comparison html')) options = option_parser.parse_args()[0] # Set up our logging format. log_level = logging.INFO if options.verbose: log_level = logging.DEBUG logging.basicConfig(level=log_level, format=('%(asctime)s %(filename)s:%(lineno)-3d ' '%(levelname)s %(message)s'), datefmt='%y%m%d %H:%M:%S') # Verify 'platforms' option is valid if not options.platforms: logging.error('Invalid "platforms" option. --platforms must be specified ' 'in order to rebaseline.') sys.exit(1) platforms = [p.strip().lower() for p in options.platforms.split(',')] for platform in platforms: if not platform in REBASELINE_PLATFORM_ORDER: logging.error('Invalid platform: "%s"' % (platform)) sys.exit(1) # Adjust the platform order so rebaseline tool is running at the order of # 'mac', 'win' and 'linux'. This is in same order with layout test baseline # search paths. It simplifies how the rebaseline tool detects duplicate # baselines. Check _IsDupBaseline method for details. rebaseline_platforms = [] for platform in REBASELINE_PLATFORM_ORDER: if platform in platforms: rebaseline_platforms.append(platform) if not options.no_html_results: options.html_directory = SetupHtmlDirectory(options.html_directory, options.clean_html_directory) rebaselining_tests = set() backup = options.backup for platform in rebaseline_platforms: rebaseliner = Rebaseliner(platform, options) logging.info('') LogDashedString('Rebaseline started', platform) if rebaseliner.Run(backup): # Only need to backup one original copy of test expectation file. backup = False LogDashedString('Rebaseline done', platform) else: LogDashedString('Rebaseline failed', platform, logging.ERROR) rebaselining_tests |= set(rebaseliner.GetRebaseliningTests()) if not options.no_html_results: logging.info('') LogDashedString('Rebaselining result comparison started', None) html_generator = HtmlGenerator(options, rebaseline_platforms, rebaselining_tests) html_generator.GenerateHtml() html_generator.ShowHtml() LogDashedString('Rebaselining result comparison done', None) sys.exit(0)
a8cca2e7f89f106d5b2cdf9f818191a14c4afb8b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/a8cca2e7f89f106d5b2cdf9f818191a14c4afb8b/rebaseline.py
if not options.no_html_results: logging.info('') LogDashedString('Rebaselining result comparison started', None) html_generator = HtmlGenerator(options, rebaseline_platforms, rebaselining_tests) html_generator.GenerateHtml() html_generator.ShowHtml() LogDashedString('Rebaselining result comparison done', None)
logging.info('') LogDashedString('Rebaselining result comparison started', None) html_generator = HtmlGenerator(options, rebaseline_platforms, rebaselining_tests) html_generator.GenerateHtml() html_generator.ShowHtml() LogDashedString('Rebaselining result comparison done', None)
def main(): """Main function to produce new baselines.""" option_parser = optparse.OptionParser() option_parser.add_option('-v', '--verbose', action='store_true', default=False, help='include debug-level logging.') option_parser.add_option('-p', '--platforms', default='mac,win,win-xp,win-vista,linux', help=('Comma delimited list of platforms that need ' 'rebaselining.')) option_parser.add_option('-u', '--archive_url', default=('http://build.chromium.org/buildbot/' 'layout_test_results'), help=('Url to find the layout test result archive ' 'file.')) option_parser.add_option('-t', '--archive_name', default='layout-test-results', help='Layout test result archive name.') option_parser.add_option('-w', '--webkit_canary', action='store_true', default=False, help=('If True, pull baselines from webkit.org ' 'canary bot.')) option_parser.add_option('-b', '--backup', action='store_true', default=False, help=('Whether or not to backup the original test ' 'expectations file after rebaseline.')) option_parser.add_option('-o', '--no_html_results', action='store_true', default=False, help=('If specified, do not generate html that ' 'compares the rebaselining results.')) option_parser.add_option('-d', '--html_directory', default='', help=('The directory that stores the results for ' 'rebaselining comparison.')) option_parser.add_option('-c', '--clean_html_directory', action='store_true', default=False, help=('If specified, delete all existing files in ' 'the html directory before rebaselining.')) option_parser.add_option('-e', '--browser_path', default='', help=('The browser path that you would like to ' 'use to launch the rebaselining result ' 'comparison html')) options = option_parser.parse_args()[0] # Set up our logging format. log_level = logging.INFO if options.verbose: log_level = logging.DEBUG logging.basicConfig(level=log_level, format=('%(asctime)s %(filename)s:%(lineno)-3d ' '%(levelname)s %(message)s'), datefmt='%y%m%d %H:%M:%S') # Verify 'platforms' option is valid if not options.platforms: logging.error('Invalid "platforms" option. --platforms must be specified ' 'in order to rebaseline.') sys.exit(1) platforms = [p.strip().lower() for p in options.platforms.split(',')] for platform in platforms: if not platform in REBASELINE_PLATFORM_ORDER: logging.error('Invalid platform: "%s"' % (platform)) sys.exit(1) # Adjust the platform order so rebaseline tool is running at the order of # 'mac', 'win' and 'linux'. This is in same order with layout test baseline # search paths. It simplifies how the rebaseline tool detects duplicate # baselines. Check _IsDupBaseline method for details. rebaseline_platforms = [] for platform in REBASELINE_PLATFORM_ORDER: if platform in platforms: rebaseline_platforms.append(platform) if not options.no_html_results: options.html_directory = SetupHtmlDirectory(options.html_directory, options.clean_html_directory) rebaselining_tests = set() backup = options.backup for platform in rebaseline_platforms: rebaseliner = Rebaseliner(platform, options) logging.info('') LogDashedString('Rebaseline started', platform) if rebaseliner.Run(backup): # Only need to backup one original copy of test expectation file. backup = False LogDashedString('Rebaseline done', platform) else: LogDashedString('Rebaseline failed', platform, logging.ERROR) rebaselining_tests |= set(rebaseliner.GetRebaseliningTests()) if not options.no_html_results: logging.info('') LogDashedString('Rebaselining result comparison started', None) html_generator = HtmlGenerator(options, rebaseline_platforms, rebaselining_tests) html_generator.GenerateHtml() html_generator.ShowHtml() LogDashedString('Rebaselining result comparison done', None) sys.exit(0)
a8cca2e7f89f106d5b2cdf9f818191a14c4afb8b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/a8cca2e7f89f106d5b2cdf9f818191a14c4afb8b/rebaseline.py
if name == 'BufferData':
if name == 'BufferData' or name == 'BufferSubData':
def WriteGetDataSizeCode(self, func, file): """Overrriden from TypeHandler.""" # TODO(gman): Move this data to _FUNCTION_INFO? name = func.name if name.endswith("Immediate"): name = name[0:-9] if name == 'BufferData': file.Write(" uint32 data_size = size;\n") elif name == 'BufferSubData': file.Write(" uint32 data_size = size;\n") elif name == 'CompressedTexImage2D': file.Write(" uint32 data_size = imageSize;\n") elif name == 'CompressedTexSubImage2D': file.Write(" uint32 data_size = imageSize;\n") elif name == 'TexImage2D': file.Write(" uint32 data_size = GLES2Util::ComputeImageDataSize(\n") file.Write(" width, height, format, type, unpack_alignment_);\n") elif name == 'TexSubImage2D': file.Write(" uint32 data_size = GLES2Util::ComputeImageDataSize(\n") file.Write(" width, height, format, type, unpack_alignment_);\n") else: file.Write("// uint32 data_size = 0; // TODO(gman): get correct size!\n")
cc699ea9fb6cc5a83d4937f29c319ecab849997f /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/cc699ea9fb6cc5a83d4937f29c319ecab849997f/build_gles2_cmd_buffer.py
elif name == 'BufferSubData': file.Write(" uint32 data_size = size;\n") elif name == 'CompressedTexImage2D':
elif (name == 'CompressedTexImage2D' or name == 'CompressedTexSubImage2D'):
def WriteGetDataSizeCode(self, func, file): """Overrriden from TypeHandler.""" # TODO(gman): Move this data to _FUNCTION_INFO? name = func.name if name.endswith("Immediate"): name = name[0:-9] if name == 'BufferData': file.Write(" uint32 data_size = size;\n") elif name == 'BufferSubData': file.Write(" uint32 data_size = size;\n") elif name == 'CompressedTexImage2D': file.Write(" uint32 data_size = imageSize;\n") elif name == 'CompressedTexSubImage2D': file.Write(" uint32 data_size = imageSize;\n") elif name == 'TexImage2D': file.Write(" uint32 data_size = GLES2Util::ComputeImageDataSize(\n") file.Write(" width, height, format, type, unpack_alignment_);\n") elif name == 'TexSubImage2D': file.Write(" uint32 data_size = GLES2Util::ComputeImageDataSize(\n") file.Write(" width, height, format, type, unpack_alignment_);\n") else: file.Write("// uint32 data_size = 0; // TODO(gman): get correct size!\n")
cc699ea9fb6cc5a83d4937f29c319ecab849997f /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/cc699ea9fb6cc5a83d4937f29c319ecab849997f/build_gles2_cmd_buffer.py
elif name == 'CompressedTexSubImage2D': file.Write(" uint32 data_size = imageSize;\n") elif name == 'TexImage2D': file.Write(" uint32 data_size = GLES2Util::ComputeImageDataSize(\n") file.Write(" width, height, format, type, unpack_alignment_);\n") elif name == 'TexSubImage2D': file.Write(" uint32 data_size = GLES2Util::ComputeImageDataSize(\n") file.Write(" width, height, format, type, unpack_alignment_);\n")
elif name == 'TexImage2D' or name == 'TexSubImage2D': code = """ uint32 data_size; if (!GLES2Util::ComputeImageDataSize( width, height, format, type, unpack_alignment_, &data_size)) { return error::kOutOfBounds; } """ file.Write(code)
def WriteGetDataSizeCode(self, func, file): """Overrriden from TypeHandler.""" # TODO(gman): Move this data to _FUNCTION_INFO? name = func.name if name.endswith("Immediate"): name = name[0:-9] if name == 'BufferData': file.Write(" uint32 data_size = size;\n") elif name == 'BufferSubData': file.Write(" uint32 data_size = size;\n") elif name == 'CompressedTexImage2D': file.Write(" uint32 data_size = imageSize;\n") elif name == 'CompressedTexSubImage2D': file.Write(" uint32 data_size = imageSize;\n") elif name == 'TexImage2D': file.Write(" uint32 data_size = GLES2Util::ComputeImageDataSize(\n") file.Write(" width, height, format, type, unpack_alignment_);\n") elif name == 'TexSubImage2D': file.Write(" uint32 data_size = GLES2Util::ComputeImageDataSize(\n") file.Write(" width, height, format, type, unpack_alignment_);\n") else: file.Write("// uint32 data_size = 0; // TODO(gman): get correct size!\n")
cc699ea9fb6cc5a83d4937f29c319ecab849997f /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/cc699ea9fb6cc5a83d4937f29c319ecab849997f/build_gles2_cmd_buffer.py
file.Write(" uint32 data_size = n * sizeof(GLuint);\n")
code = """ uint32 data_size; if (!SafeMultiplyUint32(n, sizeof(GLuint), &data_size)) { return error::kOutOfBounds; } """ file.Write(code)
def WriteGetDataSizeCode(self, func, file): """Overrriden from TypeHandler.""" file.Write(" uint32 data_size = n * sizeof(GLuint);\n")
cc699ea9fb6cc5a83d4937f29c319ecab849997f /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/cc699ea9fb6cc5a83d4937f29c319ecab849997f/build_gles2_cmd_buffer.py
file.Write(" uint32 data_size = n * sizeof(GLuint);\n")
code = """ uint32 data_size; if (!SafeMultiplyUint32(n, sizeof(GLuint), &data_size)) { return error::kOutOfBounds; } """ file.Write(code)
def WriteGetDataSizeCode(self, func, file): """Overrriden from TypeHandler.""" file.Write(" uint32 data_size = n * sizeof(GLuint);\n")
cc699ea9fb6cc5a83d4937f29c319ecab849997f /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/cc699ea9fb6cc5a83d4937f29c319ecab849997f/build_gles2_cmd_buffer.py
file.Write(" %s params;\n" % last_arg.type) file.Write(" GLsizei num_values = util_.GLGetNumValuesReturned(pname);\n") file.Write(" uint32 params_size = num_values * sizeof(*params);\n") file.Write(" params = GetSharedMemoryAs<%s>(\n" % last_arg.type) file.Write(" c.params_shm_id, c.params_shm_offset, params_size);\n")
code = """ %(last_arg_type)s params; GLsizei num_values = util_.GLGetNumValuesReturned(pname); uint32 params_size; if (!SafeMultiplyUint32(num_values, sizeof(*params), &params_size)) { return error::kOutOfBounds; } params = GetSharedMemoryAs<%(last_arg_type)s>( c.params_shm_id, c.params_shm_offset, params_size); """ file.Write(code % {'last_arg_type': last_arg.type})
def WriteServiceImplementation(self, func, file): """Overrriden from TypeHandler.""" file.Write( "error::Error GLES2DecoderImpl::Handle%s(\n" % func.name) file.Write( " uint32 immediate_data_size, const gles2::%s& c) {\n" % func.name) last_arg = func.GetLastOriginalArg()
cc699ea9fb6cc5a83d4937f29c319ecab849997f /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/cc699ea9fb6cc5a83d4937f29c319ecab849997f/build_gles2_cmd_buffer.py
file.Write(" uint32 data_size = ComputeImmediateDataSize(" "immediate_data_size, 1, sizeof(%s), %d);\n" % (func.info.data_type, func.info.count))
code = """ uint32 data_size; if (!ComputeDataSize(1, sizeof(%s), %d, &data_size)) { return error::kOutOfBounds; } """ file.Write(code % (func.info.data_type, func.info.count)) if func.is_immediate: file.Write(" if (data_size > immediate_data_size) {\n") file.Write(" return error::kOutOfBounds;\n") file.Write(" }\n")
def WriteGetDataSizeCode(self, func, file): """Overrriden from TypeHandler.""" file.Write(" uint32 data_size = ComputeImmediateDataSize(" "immediate_data_size, 1, sizeof(%s), %d);\n" % (func.info.data_type, func.info.count))
cc699ea9fb6cc5a83d4937f29c319ecab849997f /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/cc699ea9fb6cc5a83d4937f29c319ecab849997f/build_gles2_cmd_buffer.py
file.Write(" uint32 data_size = ComputeImmediateDataSize(" "immediate_data_size, 1, sizeof(%s), %d);\n" % (func.info.data_type, func.info.count))
code = """ uint32 data_size; if (!ComputeDataSize(1, sizeof(%s), %d, &data_size)) { return error::kOutOfBounds; } """ file.Write(code % (func.info.data_type, func.info.count)) if func.is_immediate: file.Write(" if (data_size > immediate_data_size) {\n") file.Write(" return error::kOutOfBounds;\n") file.Write(" }\n")
def WriteGetDataSizeCode(self, func, file): """Overrriden from TypeHandler.""" file.Write(" uint32 data_size = ComputeImmediateDataSize(" "immediate_data_size, 1, sizeof(%s), %d);\n" % (func.info.data_type, func.info.count))
cc699ea9fb6cc5a83d4937f29c319ecab849997f /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/cc699ea9fb6cc5a83d4937f29c319ecab849997f/build_gles2_cmd_buffer.py
std::string str; if (GetBucketAsString(kResultBucketId, &str)) { GLsizei max_size = std::min(static_cast<size_t>(%(bufsize_name)s) - 1, str.size()); if (%(length_name)s != NULL) { *%(length_name)s = max_size;
if (bufsize > 0) { std::string str; if (GetBucketAsString(kResultBucketId, &str)) { GLsizei max_size = std::min(static_cast<size_t>(%(bufsize_name)s) - 1, str.size()); if (%(length_name)s != NULL) { *%(length_name)s = max_size; } memcpy(%(dest_name)s, str.c_str(), max_size); %(dest_name)s[max_size] = '\\0';
def WriteGLES2ImplementationHeader(self, func, file): """Overrriden from TypeHandler.""" code = """%(return_type)s %(func_name)s(%(args)s) {
cc699ea9fb6cc5a83d4937f29c319ecab849997f /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/cc699ea9fb6cc5a83d4937f29c319ecab849997f/build_gles2_cmd_buffer.py
memcpy(%(dest_name)s, str.c_str(), max_size); %(dest_name)s[max_size] = '\\0';
def WriteGLES2ImplementationHeader(self, func, file): """Overrriden from TypeHandler.""" code = """%(return_type)s %(func_name)s(%(args)s) {
cc699ea9fb6cc5a83d4937f29c319ecab849997f /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/cc699ea9fb6cc5a83d4937f29c319ecab849997f/build_gles2_cmd_buffer.py
return tsan_analyze.TsanAnalyzer(self._source_dir)
return tsan_analyze.TsanAnalyzer(self._source_dir, use_gdb)
def CreateAnalyzer(self): use_gdb = common.IsMac() return tsan_analyze.TsanAnalyzer(self._source_dir)
8aee1ec38391e0dbdac9b0aa372898f341fba5ff /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/8aee1ec38391e0dbdac9b0aa372898f341fba5ff/valgrind_test.py
class _GTestTextTestResult(unittest._TextTestResult): """A test result class that can print formatted text results to a stream. Results printed in conformance with gtest output format, like: [ RUN ] autofill.AutoFillTest.testAutofillInvalid: "test desc." [ OK ] autofill.AutoFillTest.testAutofillInvalid [ RUN ] autofill.AutoFillTest.testFillProfile: "test desc." [ OK ] autofill.AutoFillTest.testFillProfile [ RUN ] autofill.AutoFillTest.testFillProfileCrazyCharacters: "Test." [ OK ] autofill.AutoFillTest.testFillProfileCrazyCharacters """ def __init__(self, stream, descriptions, verbosity): unittest._TextTestResult.__init__(self, stream, descriptions, verbosity) def _GetTestURI(self, test): return '%s.%s' % (unittest._strclass(test.__class__), test._testMethodName) def getDescription(self, test): return '%s: "%s"' % (self._GetTestURI(test), test.shortDescription()) def startTest(self, test): unittest.TestResult.startTest(self, test) self.stream.writeln('[ RUN ] %s' % self.getDescription(test)) def addSuccess(self, test): unittest.TestResult.addSuccess(self, test) self.stream.writeln('[ OK ] %s' % self._GetTestURI(test)) def addError(self, test, err): unittest.TestResult.addError(self, test, err) self.stream.writeln('[ ERROR ] %s' % self._GetTestURI(test)) def addFailure(self, test, err): unittest.TestResult.addFailure(self, test, err) self.stream.writeln('[ FAILED ] %s' % self._GetTestURI(test)) class PyAutoTextTestRuner(unittest.TextTestRunner): """Test Runner for PyAuto tests that displays results in textual format. Results are displayed in conformance with gtest output. """ def __init__(self, verbosity=1): unittest.TextTestRunner.__init__(self, stream=sys.stderr, verbosity=verbosity) def _makeResult(self): return _GTestTextTestResult(self.stream, self.descriptions, self.verbosity)
def __del__(self): # python unittest module is setup such that the suite gets deleted before # the test cases, which is odd because our test cases depend on # initializtions like exitmanager, autorelease pool provided by the # suite. Forcibly delete the test cases before the suite. del self._tests pyautolib.PyUITestSuiteBase.__del__(self)
bfd181c1d069c9312211a07462b7d9a3f7f79f53 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/bfd181c1d069c9312211a07462b7d9a3f7f79f53/pyauto.py
result = PyAutoTextTestRuner(verbosity=verbosity).run(pyauto_suite)
result = unittest.TextTestRunner(verbosity=verbosity).run(pyauto_suite)
def _Run(self): """Run the tests.""" if self._options.wait_for_debugger: raw_input('Attach debugger to process %s and hit <enter> ' % os.getpid())
bfd181c1d069c9312211a07462b7d9a3f7f79f53 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/bfd181c1d069c9312211a07462b7d9a3f7f79f53/pyauto.py
self.file = open(filename, "w")
self.file = open(filename, "wb")
def __init__(self, filename): self.filename = filename self.file = open(filename, "w")
4a08049025da305e4e247c60384e0e78c7f5a894 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/4a08049025da305e4e247c60384e0e78c7f5a894/build_gles2_cmd_buffer.py
start = time.time()
global AnalyzeStartTime if AnalyzeStartTime == None: AnalyzeStartTime = time.time()
def __init__(self, source_dir, files, show_all_leaks=False, use_gdb=False): '''Reads in a set of files.
5295117dfc5fb8e7cea83ab8fff1e2847cc6a863 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/5295117dfc5fb8e7cea83ab8fff1e2847cc6a863/memcheck_analyze.py
(firstrun or ((time.time() - start) < 180.0))):
(firstrun or ((time.time() - AnalyzeStartTime) < LOG_COMPLETION_TIMEOUT))):
def __init__(self, source_dir, files, show_all_leaks=False, use_gdb=False): '''Reads in a set of files.
5295117dfc5fb8e7cea83ab8fff1e2847cc6a863 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/5295117dfc5fb8e7cea83ab8fff1e2847cc6a863/memcheck_analyze.py
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDED_PATHS
black_list = input_api.DEFAULT_BLACK_LIST + _EXCLUDED_PATHS white_list = input_api.DEFAULT_WHITE_LIST + _TEXT_FILES
def _CommonChecks(input_api, output_api): results = [] # What does this code do? # It loads the default black list (e.g. third_party, experimental, etc) and # add our black list (breakpad, skia and v8 are still not following # google style and are not really living this repository). # See presubmit_support.py InputApi.FilterSourceFile for the (simple) usage. black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDED_PATHS sources = lambda x: input_api.FilterSourceFile(x, black_list=black_list) results.extend(input_api.canned_checks.CheckLongLines( input_api, output_api, sources)) results.extend(input_api.canned_checks.CheckChangeHasNoTabs( input_api, output_api, sources)) results.extend(input_api.canned_checks.CheckChangeHasNoStrayWhitespace( input_api, output_api, sources)) results.extend(input_api.canned_checks.CheckChangeHasBugField( input_api, output_api)) results.extend(input_api.canned_checks.CheckChangeHasTestField( input_api, output_api)) results.extend(input_api.canned_checks.CheckChangeSvnEolStyle( input_api, output_api, sources)) results.extend(input_api.canned_checks.CheckSvnForCommonMimeTypes( input_api, output_api)) results.extend(input_api.canned_checks.CheckLicense( input_api, output_api, _LICENSE_HEADER, sources)) return results
20eb0e5f7f780be5842a7c20f895d27feeb444bb /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/20eb0e5f7f780be5842a7c20f895d27feeb444bb/PRESUBMIT.py
input_api, output_api, sources))
input_api, output_api, text_files))
def _CommonChecks(input_api, output_api): results = [] # What does this code do? # It loads the default black list (e.g. third_party, experimental, etc) and # add our black list (breakpad, skia and v8 are still not following # google style and are not really living this repository). # See presubmit_support.py InputApi.FilterSourceFile for the (simple) usage. black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDED_PATHS sources = lambda x: input_api.FilterSourceFile(x, black_list=black_list) results.extend(input_api.canned_checks.CheckLongLines( input_api, output_api, sources)) results.extend(input_api.canned_checks.CheckChangeHasNoTabs( input_api, output_api, sources)) results.extend(input_api.canned_checks.CheckChangeHasNoStrayWhitespace( input_api, output_api, sources)) results.extend(input_api.canned_checks.CheckChangeHasBugField( input_api, output_api)) results.extend(input_api.canned_checks.CheckChangeHasTestField( input_api, output_api)) results.extend(input_api.canned_checks.CheckChangeSvnEolStyle( input_api, output_api, sources)) results.extend(input_api.canned_checks.CheckSvnForCommonMimeTypes( input_api, output_api)) results.extend(input_api.canned_checks.CheckLicense( input_api, output_api, _LICENSE_HEADER, sources)) return results
20eb0e5f7f780be5842a7c20f895d27feeb444bb /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/20eb0e5f7f780be5842a7c20f895d27feeb444bb/PRESUBMIT.py
server_data = { 'port': listen_port } server_data_json = simplejson.dumps(server_data) debug('sending server_data: %s' % server_data_json) server_data_len = len(server_data_json)
def main(options, args): logfile = open('testserver.log', 'w') sys.stdout = FileMultiplexer(sys.stdout, logfile) sys.stderr = FileMultiplexer(sys.stderr, logfile) port = options.port if options.server_type == SERVER_HTTP: if options.cert: # let's make sure the cert file exists. if not os.path.isfile(options.cert): print 'specified server cert file not found: ' + options.cert + \ ' exiting...' return for ca_cert in options.ssl_client_ca: if not os.path.isfile(ca_cert): print 'specified trusted client CA file not found: ' + ca_cert + \ ' exiting...' return server = HTTPSServer(('127.0.0.1', port), TestPageHandler, options.cert, options.ssl_client_auth, options.ssl_client_ca, options.ssl_bulk_cipher) print 'HTTPS server started on port %d...' % server.server_port else: server = StoppableHTTPServer(('127.0.0.1', port), TestPageHandler) print 'HTTP server started on port %d...' % server.server_port server.data_dir = MakeDataDir() server.file_root_url = options.file_root_url listen_port = server.server_port server._device_management_handler = None elif options.server_type == SERVER_SYNC: server = SyncHTTPServer(('127.0.0.1', port), SyncPageHandler) print 'Sync HTTP server started on port %d...' % server.server_port listen_port = server.server_port # means FTP Server else: my_data_dir = MakeDataDir() # Instantiate a dummy authorizer for managing 'virtual' users authorizer = pyftpdlib.ftpserver.DummyAuthorizer() # Define a new user having full r/w permissions and a read-only # anonymous user authorizer.add_user('chrome', 'chrome', my_data_dir, perm='elradfmw') authorizer.add_anonymous(my_data_dir) # Instantiate FTP handler class ftp_handler = pyftpdlib.ftpserver.FTPHandler ftp_handler.authorizer = authorizer # Define a customized banner (string returned when client connects) ftp_handler.banner = ("pyftpdlib %s based ftpd ready." % pyftpdlib.ftpserver.__ver__) # Instantiate FTP server class and listen to 127.0.0.1:port address = ('127.0.0.1', port) server = pyftpdlib.ftpserver.FTPServer(address, ftp_handler) listen_port = server.socket.getsockname()[1] print 'FTP server started on port %d...' % listen_port # Notify the parent that we've started. (BaseServer subclasses # bind their sockets on construction.) if options.startup_pipe is not None: server_data = { 'port': listen_port } server_data_json = simplejson.dumps(server_data) debug('sending server_data: %s' % server_data_json) server_data_len = len(server_data_json) if sys.platform == 'win32': fd = msvcrt.open_osfhandle(options.startup_pipe, 0) else: fd = options.startup_pipe startup_pipe = os.fdopen(fd, "w") # Write the listening port as a 2 byte value. This is _not_ using # network byte ordering since the other end of the pipe is on the same # machine. startup_pipe.write(struct.pack('@H', server_data['port'])) startup_pipe.close() try: server.serve_forever() except KeyboardInterrupt: print 'shutting down server' server.stop = True
74efa69e81feb5d7e49a4e54ad390e0e4ea2c0ab /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/74efa69e81feb5d7e49a4e54ad390e0e4ea2c0ab/testserver.py
startup_pipe.write(struct.pack('@H', server_data['port']))
startup_pipe.write(struct.pack('@H', listen_port))
def main(options, args): logfile = open('testserver.log', 'w') sys.stdout = FileMultiplexer(sys.stdout, logfile) sys.stderr = FileMultiplexer(sys.stderr, logfile) port = options.port if options.server_type == SERVER_HTTP: if options.cert: # let's make sure the cert file exists. if not os.path.isfile(options.cert): print 'specified server cert file not found: ' + options.cert + \ ' exiting...' return for ca_cert in options.ssl_client_ca: if not os.path.isfile(ca_cert): print 'specified trusted client CA file not found: ' + ca_cert + \ ' exiting...' return server = HTTPSServer(('127.0.0.1', port), TestPageHandler, options.cert, options.ssl_client_auth, options.ssl_client_ca, options.ssl_bulk_cipher) print 'HTTPS server started on port %d...' % server.server_port else: server = StoppableHTTPServer(('127.0.0.1', port), TestPageHandler) print 'HTTP server started on port %d...' % server.server_port server.data_dir = MakeDataDir() server.file_root_url = options.file_root_url listen_port = server.server_port server._device_management_handler = None elif options.server_type == SERVER_SYNC: server = SyncHTTPServer(('127.0.0.1', port), SyncPageHandler) print 'Sync HTTP server started on port %d...' % server.server_port listen_port = server.server_port # means FTP Server else: my_data_dir = MakeDataDir() # Instantiate a dummy authorizer for managing 'virtual' users authorizer = pyftpdlib.ftpserver.DummyAuthorizer() # Define a new user having full r/w permissions and a read-only # anonymous user authorizer.add_user('chrome', 'chrome', my_data_dir, perm='elradfmw') authorizer.add_anonymous(my_data_dir) # Instantiate FTP handler class ftp_handler = pyftpdlib.ftpserver.FTPHandler ftp_handler.authorizer = authorizer # Define a customized banner (string returned when client connects) ftp_handler.banner = ("pyftpdlib %s based ftpd ready." % pyftpdlib.ftpserver.__ver__) # Instantiate FTP server class and listen to 127.0.0.1:port address = ('127.0.0.1', port) server = pyftpdlib.ftpserver.FTPServer(address, ftp_handler) listen_port = server.socket.getsockname()[1] print 'FTP server started on port %d...' % listen_port # Notify the parent that we've started. (BaseServer subclasses # bind their sockets on construction.) if options.startup_pipe is not None: server_data = { 'port': listen_port } server_data_json = simplejson.dumps(server_data) debug('sending server_data: %s' % server_data_json) server_data_len = len(server_data_json) if sys.platform == 'win32': fd = msvcrt.open_osfhandle(options.startup_pipe, 0) else: fd = options.startup_pipe startup_pipe = os.fdopen(fd, "w") # Write the listening port as a 2 byte value. This is _not_ using # network byte ordering since the other end of the pipe is on the same # machine. startup_pipe.write(struct.pack('@H', server_data['port'])) startup_pipe.close() try: server.serve_forever() except KeyboardInterrupt: print 'shutting down server' server.stop = True
74efa69e81feb5d7e49a4e54ad390e0e4ea2c0ab /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/74efa69e81feb5d7e49a4e54ad390e0e4ea2c0ab/testserver.py
EXPECT_EQ(GetServiceId(kNewClientId), kNewServiceId); EXPECT_TRUE(Get%(resource_type)sInfo(kNewServiceId) != NULL);
EXPECT_TRUE(Get%(resource_type)sInfo(kNewClientId) != NULL);
def WriteServiceUnitTest(self, func, file): """Overrriden from TypeHandler.""" valid_test = """
9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619/build_gles2_cmd_buffer.py
code = """ if (IsReservedId(%(id)s)) {
code = """ if (Is%(type)sReservedId(%(id)s)) {
def WriteGLES2ImplementationHeader(self, func, file): """Writes the GLES2 Implemention.""" impl_func = func.GetInfo('impl_func') impl_decl = func.GetInfo('impl_decl') if (func.can_auto_generate and (impl_func == None or impl_func == True) and (impl_decl == None or impl_decl == True)): file.Write("%s %s(%s) {\n" % (func.return_type, func.original_name, func.MakeTypedOriginalArgString(""))) for arg in func.GetOriginalArgs(): arg.WriteClientSideValidationCode(file) code = """ if (IsReservedId(%(id)s)) { SetGLError(GL_INVALID_OPERATION); return;
9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619/build_gles2_cmd_buffer.py
id_allocator_.MarkAsUsed(%(id)s);
%(lc_type)s_id_allocator_.MarkAsUsed(%(id)s);
def WriteGLES2ImplementationHeader(self, func, file): """Writes the GLES2 Implemention.""" impl_func = func.GetInfo('impl_func') impl_decl = func.GetInfo('impl_decl') if (func.can_auto_generate and (impl_func == None or impl_func == True) and (impl_decl == None or impl_decl == True)): file.Write("%s %s(%s) {\n" % (func.return_type, func.original_name, func.MakeTypedOriginalArgString(""))) for arg in func.GetOriginalArgs(): arg.WriteClientSideValidationCode(file) code = """ if (IsReservedId(%(id)s)) { SetGLError(GL_INVALID_OPERATION); return;
9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619/build_gles2_cmd_buffer.py
file.Write(" if (!GenGLObjects<GL%sHelper>(n, %s)) {\n"
file.Write(" if (!%sHelper(n, %s)) {\n"
def WriteHandlerImplementation (self, func, file): """Overrriden from TypeHandler.""" file.Write(" if (!GenGLObjects<GL%sHelper>(n, %s)) {\n" " return error::kInvalidArguments;\n" " }\n" % (func.name, func.GetLastOriginalArg().name))
9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619/build_gles2_cmd_buffer.py
file.Write(" if (!GenGLObjects<GL%sHelper>(n, %s)) {\n"
file.Write(" if (!%sHelper(n, %s)) {\n"
def WriteImmediateHandlerImplementation(self, func, file): """Overrriden from TypeHandler.""" file.Write(" if (!GenGLObjects<GL%sHelper>(n, %s)) {\n" " return error::kInvalidArguments;\n" " }\n" % (func.original_name, func.GetLastOriginalArg().name))
9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619/build_gles2_cmd_buffer.py
file.Write("%s %s(%s) {\n" % (func.return_type, func.original_name, func.MakeTypedOriginalArgString(""))) file.Write(" MakeIds(%s);\n" % func.MakeOriginalArgString("")) file.Write(" helper_->%sImmediate(%s);\n" % (func.name, func.MakeOriginalArgString(""))) file.Write("}\n") file.Write("\n")
code = """%(return_type)s %(name)s(%(typed_args)s) { MakeIds(&%(resource_type)s_id_allocator_, %(args)s); helper_->%(name)sImmediate(%(args)s); } """ file.Write(code % { 'return_type': func.return_type, 'name': func.original_name, 'typed_args': func.MakeTypedOriginalArgString(""), 'args': func.MakeOriginalArgString(""), 'resource_type': func.name[3:-1].lower() })
def WriteGLES2ImplementationHeader(self, func, file): """Overrriden from TypeHandler.""" file.Write("%s %s(%s) {\n" % (func.return_type, func.original_name, func.MakeTypedOriginalArgString(""))) file.Write(" MakeIds(%s);\n" % func.MakeOriginalArgString("")) file.Write(" helper_->%sImmediate(%s);\n" % (func.name, func.MakeOriginalArgString(""))) file.Write("}\n") file.Write("\n")
9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619/build_gles2_cmd_buffer.py
EXPECT_EQ(GetServiceId(kNewClientId), kNewServiceId);
EXPECT_TRUE(Get%(resource_name)sInfo(kNewClientId) != NULL);
def WriteServiceUnitTest(self, func, file): """Overrriden from TypeHandler.""" valid_test = """
9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619/build_gles2_cmd_buffer.py
self.WriteValidUnitTest(func, file, valid_test)
self.WriteValidUnitTest(func, file, valid_test, { 'resource_name': func.name[3:-1], })
def WriteServiceUnitTest(self, func, file): """Overrriden from TypeHandler.""" valid_test = """
9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619/build_gles2_cmd_buffer.py
EXPECT_EQ(GetServiceId(kNewClientId), kNewServiceId);
EXPECT_TRUE(Get%(resource_name)sInfo(kNewClientId) != NULL);
def WriteImmediateServiceUnitTest(self, func, file): """Overrriden from TypeHandler.""" valid_test = """
9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619/build_gles2_cmd_buffer.py
self.WriteValidUnitTest(func, file, valid_test)
self.WriteValidUnitTest(func, file, valid_test, { 'resource_name': func.original_name[3:-1], })
def WriteImmediateServiceUnitTest(self, func, file): """Overrriden from TypeHandler.""" valid_test = """
9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619/build_gles2_cmd_buffer.py
EXPECT_EQ(GetServiceId(kNewClientId), kNewServiceId);
EXPECT_TRUE(Get%(resource_type)sInfo(kNewClientId) != NULL);
def WriteServiceUnitTest(self, func, file): """Overrriden from TypeHandler.""" valid_test = """
9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619/build_gles2_cmd_buffer.py
file.Write(" %sHelper(%s);\n" %
file.Write(" if (!%sHelper(%s)) {\n" %
def WriteHandlerImplementation (self, func, file): """Overrriden from TypeHandler.""" file.Write(" uint32 client_id = c.client_id;\n") file.Write(" %sHelper(%s);\n" % (func.name, func.MakeCmdArgString("")))
9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619/build_gles2_cmd_buffer.py
file.Write(" MakeIds(1, &client_id);\n")
file.Write(" MakeIds(&program_and_shader_id_allocator_, 1, &client_id);\n")
def WriteGLES2ImplementationHeader(self, func, file): """Overrriden from TypeHandler.""" file.Write("%s %s(%s) {\n" % (func.return_type, func.original_name, func.MakeTypedOriginalArgString(""))) file.Write(" GLuint client_id;\n") file.Write(" MakeIds(1, &client_id);\n") file.Write(" helper_->%s(%s);\n" % (func.name, func.MakeCmdArgString(""))) file.Write(" return client_id;\n") file.Write("}\n") file.Write("\n")
9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619/build_gles2_cmd_buffer.py
EXPECT_EQ(GetServiceId(kNewClientId), 0u);
EXPECT_TRUE( Get%(upper_resource_name)sInfo(client_%(resource_name)s_id_) == NULL);
def WriteServiceUnitTest(self, func, file): """Overrriden from TypeHandler.""" valid_test = """
9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619/build_gles2_cmd_buffer.py
EXPECT_CALL(*gl_, %(gl_func_name)s(1, Pointee(0))) .Times(1);
def WriteServiceUnitTest(self, func, file): """Overrriden from TypeHandler.""" valid_test = """
9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619/build_gles2_cmd_buffer.py
EXPECT_EQ(GetServiceId(kNewClientId), 0u);
EXPECT_TRUE( Get%(upper_resource_name)sInfo(client_%(resource_name)s_id_) == NULL);
def WriteImmediateServiceUnitTest(self, func, file): """Overrriden from TypeHandler.""" valid_test = """
9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619/build_gles2_cmd_buffer.py
EXPECT_CALL(*gl_, %(gl_func_name)s(1, Pointee(0))) .Times(1);
def WriteImmediateServiceUnitTest(self, func, file): """Overrriden from TypeHandler.""" valid_test = """
9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619/build_gles2_cmd_buffer.py
file.Write(" DeleteGLObjects<GL%sHelper>(n, %s);\n" %
file.Write(" %sHelper(n, %s);\n" %
def WriteHandlerImplementation (self, func, file): """Overrriden from TypeHandler.""" file.Write(" DeleteGLObjects<GL%sHelper>(n, %s);\n" % (func.name, func.GetLastOriginalArg().name))
9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619/build_gles2_cmd_buffer.py
file.Write(" DeleteGLObjects<GL%sHelper>(n, %s);\n" %
file.Write(" %sHelper(n, %s);\n" %
def WriteImmediateHandlerImplementation (self, func, file): """Overrriden from TypeHandler.""" file.Write(" DeleteGLObjects<GL%sHelper>(n, %s);\n" % (func.original_name, func.GetLastOriginalArg().name))
9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619/build_gles2_cmd_buffer.py
file.Write(" FreeIds(%s);\n" % func.MakeOriginalArgString(""))
file.Write(" FreeIds(&%s_id_allocator_, %s);\n" % (func.name[6:-1].lower(), func.MakeOriginalArgString("")))
def WriteGLES2ImplementationHeader(self, func, file): """Overrriden from TypeHandler.""" impl_decl = func.GetInfo('impl_decl') if impl_decl == None or impl_decl == True: file.Write("%s %s(%s) {\n" % (func.return_type, func.original_name, func.MakeTypedOriginalArgString(""))) file.Write(" FreeIds(%s);\n" % func.MakeOriginalArgString("")) file.Write(" helper_->%sImmediate(%s);\n" % (func.name, func.MakeOriginalArgString(""))) file.Write("}\n") file.Write("\n")
9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619/build_gles2_cmd_buffer.py
file.Write( "error::Error GLES2DecoderImpl::Handle%s(\n" % func.name) file.Write( " uint32 immediate_data_size, const gles2::%s& c) {\n" % func.name) args = func.GetCmdArgs() id_arg = args[0] bucket_arg = args[1] id_arg.WriteGetCode(file) bucket_arg.WriteGetCode(file) id_arg.WriteValidationCode(file) file.Write(" GLint len = 0;\n") file.Write(" %s(%s, %s, &len);\n" % ( func.GetInfo('get_len_func'), id_arg.name, func.GetInfo('get_len_enum'))) file.Write(" Bucket* bucket = CreateBucket(%s);\n" % bucket_arg.name) file.Write(" bucket->SetSize(len + 1);\n"); file.Write( " %s(%s, len + 1, &len, bucket->GetDataAs<GLchar*>(0, len + 1));\n" % (func.GetGLFunctionName(), id_arg.name)) file.Write(" return error::kNoError;\n") file.Write("}\n") file.Write("\n")
pass
def WriteServiceImplementation(self, func, file): """Overrriden from TypeHandler.""" file.Write( "error::Error GLES2DecoderImpl::Handle%s(\n" % func.name) file.Write( " uint32 immediate_data_size, const gles2::%s& c) {\n" % func.name) args = func.GetCmdArgs() id_arg = args[0] bucket_arg = args[1] id_arg.WriteGetCode(file) bucket_arg.WriteGetCode(file) id_arg.WriteValidationCode(file) file.Write(" GLint len = 0;\n") file.Write(" %s(%s, %s, &len);\n" % ( func.GetInfo('get_len_func'), id_arg.name, func.GetInfo('get_len_enum'))) file.Write(" Bucket* bucket = CreateBucket(%s);\n" % bucket_arg.name) file.Write(" bucket->SetSize(len + 1);\n"); file.Write( " %s(%s, len + 1, &len, bucket->GetDataAs<GLchar*>(0, len + 1));\n" % (func.GetGLFunctionName(), id_arg.name)) file.Write(" return error::kNoError;\n") file.Write("}\n") file.Write("\n")
9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619/build_gles2_cmd_buffer.py
file.Write(" %s %s;\n" % (self.type, self.name)) file.Write(" if (!id_manager()->GetServiceId(c.%s, &%s)) {\n" % (self.name, self.name)) file.Write(" SetGLError(GL_INVALID_VALUE);\n") file.Write(" return error::kNoError;\n") file.Write(" }\n")
file.Write(" %s %s = c.%s;\n" % (self.type, self.name, self.name))
def WriteGetCode(self, file): """Overridden from Argument.""" file.Write(" %s %s;\n" % (self.type, self.name)) file.Write(" if (!id_manager()->GetServiceId(c.%s, &%s)) {\n" % (self.name, self.name)) file.Write(" SetGLError(GL_INVALID_VALUE);\n") file.Write(" return error::kNoError;\n") file.Write(" }\n")
9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619/build_gles2_cmd_buffer.py
def GetNumInvalidValues(self, func): """returns the number of invalid values to be tested.""" return 1 def GetInvalidArg(self, offset, index): """returns an invalid value by index.""" if self.resource_type == "Texture": return ("client_buffer_id_", "kNoError", "GL_INVALID_OPERATION") return ("client_texture_id_", "kNoError", "GL_INVALID_OPERATION")
def GetNumInvalidValues(self, func): """returns the number of invalid values to be tested.""" return 1
9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619/build_gles2_cmd_buffer.py
file.Write(" if (%s != 0 && !id_manager()->GetServiceId(%s, &%s)) {\n" % (self.name, self.name, self.name)) file.Write(" SetGLError(GL_INVALID_VALUE);\n") file.Write(" return error::kNoError;\n") file.Write(" }\n")
def WriteGetCode(self, file): """Overridden from Argument.""" file.Write(" %s %s = c.%s;\n" % (self.type, self.name, self.name)) file.Write(" if (%s != 0 && !id_manager()->GetServiceId(%s, &%s)) {\n" % (self.name, self.name, self.name)) file.Write(" SetGLError(GL_INVALID_VALUE);\n") file.Write(" return error::kNoError;\n") file.Write(" }\n")
9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619/build_gles2_cmd_buffer.py
if self.resource_type == "Texture": return ("client_buffer_id_", "kNoError", "GL_INVALID_OPERATION") return ("client_texture_id_", "kNoError", "GL_INVALID_OPERATION")
return ("kInvalidClientId", "kNoError", "GL_INVALID_VALUE")
def GetInvalidArg(self, offset, index): """returns an invalid value by index.""" if self.resource_type == "Texture": return ("client_buffer_id_", "kNoError", "GL_INVALID_OPERATION") return ("client_texture_id_", "kNoError", "GL_INVALID_OPERATION")
9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/5060/9fa7a2b02d77ec300c60a11cdfaf23fc9c50a619/build_gles2_cmd_buffer.py