rem
stringlengths 1
322k
| add
stringlengths 0
2.05M
| context
stringlengths 4
228k
| meta
stringlengths 156
215
|
---|---|---|---|
try: str(u'Hello \u00E1') except Exception, e: sampleUnicodeEncodeError = e try: unicode('\xff') except Exception, e: sampleUnicodeDecodeError = e | def testAttributes(self): # test that exception attributes are happy try: str(u'Hello \u00E1') except Exception, e: sampleUnicodeEncodeError = e | 0ab94afc7a8f9a3238729fdb056a83090166af3f /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/0ab94afc7a8f9a3238729fdb056a83090166af3f/test_exceptions.py |
|
(sampleUnicodeEncodeError, {'message' : '', 'args' : ('ascii', u'Hello \xe1', 6, 7, 'ordinal not in range(128)'), 'encoding' : 'ascii', 'object' : u'Hello \xe1', 'start' : 6, 'reason' : 'ordinal not in range(128)'}), (sampleUnicodeDecodeError, | (UnicodeEncodeError, ('ascii', u'a', 0, 1, 'ordinal not in range'), {'message' : '', 'args' : ('ascii', u'a', 0, 1, 'ordinal not in range'), 'encoding' : 'ascii', 'object' : u'a', 'start' : 0, 'reason' : 'ordinal not in range'}), (UnicodeDecodeError, ('ascii', '\xff', 0, 1, 'ordinal not in range'), | def testAttributes(self): # test that exception attributes are happy try: str(u'Hello \u00E1') except Exception, e: sampleUnicodeEncodeError = e | 0ab94afc7a8f9a3238729fdb056a83090166af3f /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/0ab94afc7a8f9a3238729fdb056a83090166af3f/test_exceptions.py |
'ordinal not in range(128)'), | 'ordinal not in range'), | def testAttributes(self): # test that exception attributes are happy try: str(u'Hello \u00E1') except Exception, e: sampleUnicodeEncodeError = e | 0ab94afc7a8f9a3238729fdb056a83090166af3f /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/0ab94afc7a8f9a3238729fdb056a83090166af3f/test_exceptions.py |
'start' : 0, 'reason' : 'ordinal not in range(128)'}), | 'start' : 0, 'reason' : 'ordinal not in range'}), | def testAttributes(self): # test that exception attributes are happy try: str(u'Hello \u00E1') except Exception, e: sampleUnicodeEncodeError = e | 0ab94afc7a8f9a3238729fdb056a83090166af3f /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/0ab94afc7a8f9a3238729fdb056a83090166af3f/test_exceptions.py |
for args in exceptionList: expected = args[-1] try: exc = args[0] if len(args) == 2: raise exc else: raise exc(*args[1]) | for exc, args, expected in exceptionList: try: raise exc(*args) | def testAttributes(self): # test that exception attributes are happy try: str(u'Hello \u00E1') except Exception, e: sampleUnicodeEncodeError = e | 0ab94afc7a8f9a3238729fdb056a83090166af3f /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/0ab94afc7a8f9a3238729fdb056a83090166af3f/test_exceptions.py |
if (e is not exc and type(e) is not exc): | if type(e) is not exc: | def testAttributes(self): # test that exception attributes are happy try: str(u'Hello \u00E1') except Exception, e: sampleUnicodeEncodeError = e | 0ab94afc7a8f9a3238729fdb056a83090166af3f /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/0ab94afc7a8f9a3238729fdb056a83090166af3f/test_exceptions.py |
if timer is None: if os.name == 'mac': | if not timer: if _has_res: self.timer = resgetrusage self.dispatcher = self.trace_dispatch self.get_time = _get_time_resource elif os.name == 'mac': | def __init__(self, timer=None, bias=None): self.timings = {} self.cur = None self.cmd = "" self.c_func_name = "" | 31fcde5b2c5e001a0082c7c3281ca8b1a7fcc545 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/31fcde5b2c5e001a0082c7c3281ca8b1a7fcc545/profile.py |
self._ssnd_chunk.setpos(pos + 8) | self._ssnd_chunk.seek(pos + 8) | def readframes(self, nframes): if self._ssnd_seek_needed: self._ssnd_chunk.seek(0) dummy = self._ssnd_chunk.read(8) pos = self._soundpos * self._framesize if pos: self._ssnd_chunk.setpos(pos + 8) self._ssnd_seek_needed = 0 if nframes == 0: return '' data = self._ssnd_chunk.read(nframes * self._framesize) if self._convert and data: data = self._convert(data) self._soundpos = self._soundpos + len(data) / (self._nchannels * self._sampwidth) return data | 2b7c6c6aba980bcafd52088c7232b07929cae35a /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2b7c6c6aba980bcafd52088c7232b07929cae35a/aifc.py |
start = min(start, len(lines) - context) | start = max(0, min(start, len(lines) - context)) | def getframeinfo(frame, context=1): """Get information about a frame or traceback object. A tuple of five things is returned: the filename, the line number of the current line, the function name, a list of lines of context from the source code, and the index of the current line within that list. The optional second argument specifies the number of lines of context to return, which are centered around the current line.""" if istraceback(frame): lineno = frame.tb_lineno frame = frame.tb_frame else: lineno = frame.f_lineno if not isframe(frame): raise TypeError('arg is not a frame or traceback object') filename = getsourcefile(frame) or getfile(frame) if context > 0: start = lineno - 1 - context//2 try: lines, lnum = findsource(frame) except IOError: lines = index = None else: start = max(start, 1) start = min(start, len(lines) - context) lines = lines[start:start+context] index = lineno - 1 - start else: lines = index = None return (filename, lineno, frame.f_code.co_name, lines, index) | e5411c6def5dc1a466ad62449156bb020aabbab5 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/e5411c6def5dc1a466ad62449156bb020aabbab5/inspect.py |
print "db.h: found", db_ver, "in", d | if db_setup_debug: print "db.h: found", db_ver, "in", d | def detect_modules(self): # Ensure that /usr/local is always used add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') | 41fa2fc8450be53624a99dd73f70788ff44dca9e /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/41fa2fc8450be53624a99dd73f70788ff44dca9e/setup.py |
print "db lib: using", db_ver, dblib if db_setup_debug: print "db: lib dir", dblib_dir, "inc dir", db_incdir | if db_setup_debug: print "db lib: using", db_ver, dblib print "db: lib dir", dblib_dir, "inc dir", db_incdir | def detect_modules(self): # Ensure that /usr/local is always used add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') | 41fa2fc8450be53624a99dd73f70788ff44dca9e /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/41fa2fc8450be53624a99dd73f70788ff44dca9e/setup.py |
sqlite_setup_debug = True | sqlite_setup_debug = False | def detect_modules(self): # Ensure that /usr/local is always used add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') | 41fa2fc8450be53624a99dd73f70788ff44dca9e /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/41fa2fc8450be53624a99dd73f70788ff44dca9e/setup.py |
print "%s/sqlite3.h: version %s"%(d, sqlite_version) | if sqlite_setup_debug: print "%s/sqlite3.h: version %s"%(d, sqlite_version) | def detect_modules(self): # Ensure that /usr/local is always used add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') | 41fa2fc8450be53624a99dd73f70788ff44dca9e /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/41fa2fc8450be53624a99dd73f70788ff44dca9e/setup.py |
if self.license: self.licence = 1 | def parse_command_line (self, args): """Parse the setup script's command line: set any Distribution attributes tied to command-line options, create all command objects, and set their options from the command-line. 'args' must be a list of command-line arguments, most likely 'sys.argv[1:]' (see the 'setup()' function). This list is first processed for "global options" -- options that set attributes of the Distribution instance. Then, it is alternately scanned for Distutils command and options for that command. Each new command terminates the options for the previous command. The allowed options for a command are determined by the 'options' attribute of the command object -- thus, we instantiate (and cache) every command object here, in order to access its 'options' attribute. Any error in that 'options' attribute raises DistutilsGetoptError; any error on the command-line raises DistutilsArgError. If no Distutils commands were found on the command line, raises DistutilsArgError. Return true if command-line successfully parsed and we should carry on with executing commands; false if no errors but we shouldn't execute commands (currently, this only happens if user asks for help).""" | b5cf2663039d3428dbbcf860e4999fbe3561b2ab /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/b5cf2663039d3428dbbcf860e4999fbe3561b2ab/dist.py |
|
self.write(STOP) def dump_special(self, callable, args, state = None): if type(args) is not TupleType and args is not None: raise PicklingError, "Second argument to dump_special " \ "must be a tuple" self.save_reduce(callable, args, state) | def dump(self, object): self.save(object) self.write(STOP) | b71c45dff2998a741337d10d17805f110aef88be /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/b71c45dff2998a741337d10d17805f110aef88be/pickle.py |
|
def save_float(self, object): self.write(FLOAT + `object` + '\n') | def save_float(self, object, pack=struct.pack): if self.bin: self.write(BINFLOAT + pack('>d', object)) else: self.write(FLOAT + `object` + '\n') | def save_float(self, object): self.write(FLOAT + `object` + '\n') | b71c45dff2998a741337d10d17805f110aef88be /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/b71c45dff2998a741337d10d17805f110aef88be/pickle.py |
return self.reader.next() | data = self.reader.next() data, bytesencoded = self.encode(data, self.errors) return data | def next(self): | c9230f90ba16cbac6ae85ac9a6fc2d7245d03482 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/c9230f90ba16cbac6ae85ac9a6fc2d7245d03482/codecs.py |
msg['Content-Transfer-Encoding'] = '8bit' | charset = msg.get_charset() output_cset = charset and charset.output_charset if output_cset and output_cset.lower().startswith('iso-2202-'): msg['Content-Transfer-Encoding'] = '7bit' else: msg['Content-Transfer-Encoding'] = '8bit' | def encode_7or8bit(msg): """Set the Content-Transfer-Encoding header to 7bit or 8bit.""" orig = msg.get_payload() if orig is None: # There's no payload. For backwards compatibility we use 7bit msg['Content-Transfer-Encoding'] = '7bit' return # We play a trick to make this go fast. If encoding to ASCII succeeds, we # know the data must be 7bit, otherwise treat it as 8bit. try: orig.encode('ascii') except UnicodeError: msg['Content-Transfer-Encoding'] = '8bit' else: msg['Content-Transfer-Encoding'] = '7bit' | eeaee2e300e46ee84343b5514c2ecab3c6138995 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/eeaee2e300e46ee84343b5514c2ecab3c6138995/Encoders.py |
print '-%20.20s %20.20 %-30.30s'%(f, d[5:], s) | print '-%20.20s %20.20s %-30.30s'%(f, d[5:], s) | def _test(): import time import sys import string import os args = sys.argv[1:] if not args: for key in 'MAILDIR', 'MAIL', 'LOGNAME', 'USER': if os.environ.has_key(key): mbox = os.environ[key] break else: print "$MAIL, $LOGNAME nor $USER set -- who are you?" return else: mbox = args[0] if mbox[:1] == '+': mbox = os.environ['HOME'] + '/Mail/' + mbox[1:] elif not '/' in mbox: mbox = '/usr/mail/' + mbox if os.path.isdir(mbox): if os.path.isdir(os.path.join(mbox, 'cur')): mb = Maildir(mbox) else: mb = MHMailbox(mbox) else: fp = open(mbox, 'r') mb = UnixMailbox(fp) msgs = [] while 1: msg = mb.next() if msg is None: break msgs.append(msg) if len(args) <= 1: msg.fp = None if len(args) > 1: num = string.atoi(args[1]) print 'Message %d body:'%num msg = msgs[num-1] msg.rewindbody() sys.stdout.write(msg.fp.read()) else: print 'Mailbox',mbox,'has',len(msgs),'messages:' for msg in msgs: f = msg.getheader('from') or "" s = msg.getheader('subject') or "" d = msg.getheader('date') or "" print '-%20.20s %20.20 %-30.30s'%(f, d[5:], s) | 779c338bfa7e134db9e4ce8d79173cd80f4f652e /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/779c338bfa7e134db9e4ce8d79173cd80f4f652e/mailbox.py |
self.pimpinstaller = pimp.PimpInstaller(self.pimpdb) | def setuppimp(self, url): self.pimpprefs = pimp.PimpPreferences() self.pimpdb = pimp.PimpDatabase(self.pimpprefs) self.pimpinstaller = pimp.PimpInstaller(self.pimpdb) if not url: url = self.pimpprefs.pimpDatabase try: self.pimpdb.appendURL(url) except IOError, arg: rv = "Cannot open %s: %s\n" % (url, arg) rv += "\nSee MacPython Package Manager help page." return rv except: rv = "Unspecified error while parsing database: %s\n" % url rv += "Usually, this means the database is not correctly formatted.\n" rv += "\nSee MacPython Package Manager help page." return rv # Check whether we can write the installation directory. # If not, set to the per-user directory, possibly # creating it, if needed. installDir = self.pimpprefs.installDir if not os.access(installDir, os.R_OK|os.W_OK|os.X_OK): rv = self.setuserinstall(1) if rv: return rv return self.pimpprefs.check() | 8a7c1c518eafd321426cd09e70c7c4c0ea5256ab /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/8a7c1c518eafd321426cd09e70c7c4c0ea5256ab/PackageManager.py |
|
self.pimpinstaller = None | def closepimp(self): self.pimpdb.close() self.pimpprefs = None self.pimpdb = None self.pimpinstaller = None self.packages = [] | 8a7c1c518eafd321426cd09e70c7c4c0ea5256ab /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/8a7c1c518eafd321426cd09e70c7c4c0ea5256ab/PackageManager.py |
|
list, messages = self.pimpinstaller.prepareInstall(pkg, force, recursive) | pimpinstaller = pimp.PimpInstaller(self.pimpdb) list, messages = pimpinstaller.prepareInstall(pkg, force, recursive) | def installpackage(self, sel, output, recursive, force): pkg = self.packages[sel] list, messages = self.pimpinstaller.prepareInstall(pkg, force, recursive) if messages: return messages messages = self.pimpinstaller.install(list, output) return messages | 8a7c1c518eafd321426cd09e70c7c4c0ea5256ab /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/8a7c1c518eafd321426cd09e70c7c4c0ea5256ab/PackageManager.py |
messages = self.pimpinstaller.install(list, output) | messages = pimpinstaller.install(list, output) | def installpackage(self, sel, output, recursive, force): pkg = self.packages[sel] list, messages = self.pimpinstaller.prepareInstall(pkg, force, recursive) if messages: return messages messages = self.pimpinstaller.install(list, output) return messages | 8a7c1c518eafd321426cd09e70c7c4c0ea5256ab /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/8a7c1c518eafd321426cd09e70c7c4c0ea5256ab/PackageManager.py |
self.libs = None | self.libraries = None | self.undef = None | b88ce739cc9c3e3ea5ab098a7c55dd9a8e7bd501 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/b88ce739cc9c3e3ea5ab098a7c55dd9a8e7bd501/build_ext.py |
if type (self.libs) is StringType: self.libs = [self.libs] | if type (self.libraries) is StringType: self.libraries = [self.libraries] | def finalize_options (self): from distutils import sysconfig | b88ce739cc9c3e3ea5ab098a7c55dd9a8e7bd501 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/b88ce739cc9c3e3ea5ab098a7c55dd9a8e7bd501/build_ext.py |
if self.libs is not None: self.compiler.set_libraries (self.libs) | if self.libraries is not None: self.compiler.set_libraries (self.libraries) | if self.undef is not None: for macro in self.undef: self.compiler.undefine_macro (macro) | b88ce739cc9c3e3ea5ab098a7c55dd9a8e7bd501 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/b88ce739cc9c3e3ea5ab098a7c55dd9a8e7bd501/build_ext.py |
if self.distribution.libraries: build_clib = self.find_peer ('build_clib') self.libraries = build_clib.get_library_names () or [] self.library_dirs = [build_clib.build_clib] else: self.libraries = [] self.library_dirs = [] | if self.undef is not None: for macro in self.undef: self.compiler.undefine_macro (macro) | b88ce739cc9c3e3ea5ab098a7c55dd9a8e7bd501 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/b88ce739cc9c3e3ea5ab098a7c55dd9a8e7bd501/build_ext.py |
|
libraries = (self.libraries + (build_info.get ('libraries') or [])) library_dirs = (self.library_dirs + (build_info.get ('library_dirs') or [])) | libraries = build_info.get ('libraries') library_dirs = build_info.get ('library_dirs') rpath = build_info.get ('rpath') | def build_extensions (self, extensions): | b88ce739cc9c3e3ea5ab098a7c55dd9a8e7bd501 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/b88ce739cc9c3e3ea5ab098a7c55dd9a8e7bd501/build_ext.py |
"math.floor(huge)", "math.floor(mhuge)", "float(shuge) == int(shuge)"]: | "math.floor(huge)", "math.floor(mhuge)"]: | def test_float_overflow(): import math if verbose: print "long->float overflow" for x in -2.0, -1.0, 0.0, 1.0, 2.0: verify(float(long(x)) == x) shuge = '12345' * 120 huge = 1L << 30000 mhuge = -huge namespace = {'huge': huge, 'mhuge': mhuge, 'shuge': shuge, 'math': math} for test in ["float(huge)", "float(mhuge)", "complex(huge)", "complex(mhuge)", "complex(huge, 1)", "complex(mhuge, 1)", "complex(1, huge)", "complex(1, mhuge)", "1. + huge", "huge + 1.", "1. + mhuge", "mhuge + 1.", "1. - huge", "huge - 1.", "1. - mhuge", "mhuge - 1.", "1. * huge", "huge * 1.", "1. * mhuge", "mhuge * 1.", "1. // huge", "huge // 1.", "1. // mhuge", "mhuge // 1.", "1. / huge", "huge / 1.", "1. / mhuge", "mhuge / 1.", "1. ** huge", "huge ** 1.", "1. ** mhuge", "mhuge ** 1.", "math.sin(huge)", "math.sin(mhuge)", "math.sqrt(huge)", "math.sqrt(mhuge)", # should do better "math.floor(huge)", "math.floor(mhuge)", "float(shuge) == int(shuge)"]: try: eval(test, namespace) except OverflowError: pass else: raise TestFailed("expected OverflowError from %s" % test) | 0ce20fe76112d4e6ed624643ec1eb01c6c456cb0 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/0ce20fe76112d4e6ed624643ec1eb01c6c456cb0/test_long.py |
print __doc__ % globals() | def usage(status, msg=''): if msg: print msg print __doc__ % globals() sys.exit(status) | 1ee02ac8d14d271ef84b6fc6d1242e8698834b8f /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/1ee02ac8d14d271ef84b6fc6d1242e8698834b8f/Main.py |
|
break | if colordb: break | def main(): try: opts, args = getopt.getopt( sys.argv[1:], 'hd:', ['database=', 'help']) except getopt.error, msg: usage(1, msg) if len(args) == 0: initialcolor = 'grey50' elif len(args) == 1: initialcolor = args[0] else: usage(1) for opt, arg in opts: if opt in ('-h', '--help'): usage(0) elif opt in ('-d', '--database'): RGB_TXT.insert(0, arg) # create the windows and go for f in RGB_TXT: try: colordb = ColorDB.get_colordb(f) break except IOError: pass else: raise IOError('No color database file found') # get triplet for initial color try: red, green, blue = colordb.find_byname(initialcolor) except ColorDB.BadColor: # must be a #rrggbb style color try: red, green, blue = ColorDB.rrggbb_to_triplet(initialcolor) except ColorDB.BadColor: print 'Bad initial color, using default: %s' % initialcolor initialcolor = 'grey50' try: red, green, blue = ColorDB.rrggbb_to_triplet(initialcolor) except ColorDB.BadColor: usage(1, 'Cannot find an initial color to use') # create all output widgets s = Switchboard(colordb) # create the application window decorations app = PyncheWidget(__version__, s) parent = app.parent() s.add_view(StripViewer(s, parent)) s.add_view(ChipViewer(s, parent)) s.add_view(TypeinViewer(s, parent)) s.update_views(red, green, blue) try: app.start() except KeyboardInterrupt: pass | 1ee02ac8d14d271ef84b6fc6d1242e8698834b8f /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/1ee02ac8d14d271ef84b6fc6d1242e8698834b8f/Main.py |
raise IOError('No color database file found') | usage(1, 'No color database file found, see the -d option.') | def main(): try: opts, args = getopt.getopt( sys.argv[1:], 'hd:', ['database=', 'help']) except getopt.error, msg: usage(1, msg) if len(args) == 0: initialcolor = 'grey50' elif len(args) == 1: initialcolor = args[0] else: usage(1) for opt, arg in opts: if opt in ('-h', '--help'): usage(0) elif opt in ('-d', '--database'): RGB_TXT.insert(0, arg) # create the windows and go for f in RGB_TXT: try: colordb = ColorDB.get_colordb(f) break except IOError: pass else: raise IOError('No color database file found') # get triplet for initial color try: red, green, blue = colordb.find_byname(initialcolor) except ColorDB.BadColor: # must be a #rrggbb style color try: red, green, blue = ColorDB.rrggbb_to_triplet(initialcolor) except ColorDB.BadColor: print 'Bad initial color, using default: %s' % initialcolor initialcolor = 'grey50' try: red, green, blue = ColorDB.rrggbb_to_triplet(initialcolor) except ColorDB.BadColor: usage(1, 'Cannot find an initial color to use') # create all output widgets s = Switchboard(colordb) # create the application window decorations app = PyncheWidget(__version__, s) parent = app.parent() s.add_view(StripViewer(s, parent)) s.add_view(ChipViewer(s, parent)) s.add_view(TypeinViewer(s, parent)) s.update_views(red, green, blue) try: app.start() except KeyboardInterrupt: pass | 1ee02ac8d14d271ef84b6fc6d1242e8698834b8f /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/1ee02ac8d14d271ef84b6fc6d1242e8698834b8f/Main.py |
if msilib.msi_type=="Intel64;1033": sqlite_arch = "/ia64" elif msilib.msi_type=="x64;1033": sqlite_arch = "/amd64" else: sqlite_arch = "" lib.add_file(srcdir+"/"+sqlite_dir+sqlite_arch+"/sqlite3.dll") | def add_files(db): cab = CAB("python") tmpfiles = [] # Add all executables, icons, text files into the TARGETDIR component root = PyDirectory(db, cab, None, srcdir, "TARGETDIR", "SourceDir") default_feature.set_current() if not msilib.Win64: root.add_file("PCBuild/w9xpopen.exe") root.add_file("README.txt", src="README") root.add_file("NEWS.txt", src="Misc/NEWS") root.add_file("LICENSE.txt", src="LICENSE") root.start_component("python.exe", keyfile="python.exe") root.add_file("PCBuild/python.exe") root.start_component("pythonw.exe", keyfile="pythonw.exe") root.add_file("PCBuild/pythonw.exe") # msidbComponentAttributesSharedDllRefCount = 8, see "Component Table" dlldir = PyDirectory(db, cab, root, srcdir, "DLLDIR", ".") pydll = "python%s%s.dll" % (major, minor) pydllsrc = srcdir + "/PCBuild/" + pydll dlldir.start_component("DLLDIR", flags = 8, keyfile = pydll, uuid = pythondll_uuid) installer = msilib.MakeInstaller() pyversion = installer.FileVersion(pydllsrc, 0) if not snapshot: # For releases, the Python DLL has the same version as the # installer package. assert pyversion.split(".")[:3] == current_version.split(".") dlldir.add_file("PCBuild/python%s%s.dll" % (major, minor), version=pyversion, language=installer.FileVersion(pydllsrc, 1)) # XXX determine dependencies version, lang = extract_msvcr71() dlldir.start_component("msvcr71", flags=8, keyfile="msvcr71.dll", uuid=msvcr71_uuid) dlldir.add_file("msvcr71.dll", src=os.path.abspath("msvcr71.dll"), version=version, language=lang) tmpfiles.append("msvcr71.dll") # Add all .py files in Lib, except lib-tk, test dirs={} pydirs = [(root,"Lib")] while pydirs: parent, dir = pydirs.pop() if dir == ".svn" or dir.startswith("plat-"): continue elif dir in ["lib-tk", "idlelib", "Icons"]: if not have_tcl: continue tcltk.set_current() elif dir in ['test', 'tests', 'data', 'output']: # test: Lib, Lib/email, Lib/bsddb, Lib/ctypes, Lib/sqlite3 # tests: Lib/distutils # data: Lib/email/test # output: Lib/test testsuite.set_current() else: default_feature.set_current() lib = PyDirectory(db, cab, parent, dir, dir, "%s|%s" % (parent.make_short(dir), dir)) # Add additional files dirs[dir]=lib lib.glob("*.txt") if dir=='site-packages': lib.add_file("README.txt", src="README") continue files = lib.glob("*.py") files += lib.glob("*.pyw") if files: # Add an entry to the RemoveFile table to remove bytecode files. lib.remove_pyc() if dir.endswith('.egg-info'): lib.add_file('entry_points.txt') lib.add_file('PKG-INFO') lib.add_file('top_level.txt') lib.add_file('zip-safe') continue if dir=='test' and parent.physical=='Lib': lib.add_file("185test.db") lib.add_file("audiotest.au") lib.add_file("cfgparser.1") lib.add_file("test.xml") lib.add_file("test.xml.out") lib.add_file("testtar.tar") lib.add_file("test_difflib_expect.html") lib.add_file("check_soundcard.vbs") lib.add_file("empty.vbs") lib.glob("*.uue") lib.add_file("readme.txt", src="README") if dir=='decimaltestdata': lib.glob("*.decTest") if dir=='output': lib.glob("test_*") if dir=='idlelib': lib.glob("*.def") lib.add_file("idle.bat") if dir=="Icons": lib.glob("*.gif") lib.add_file("idle.icns") if dir=="command" and parent.physical=="distutils": lib.add_file("wininst-6.exe") lib.add_file("wininst-7.1.exe") if dir=="setuptools": lib.add_file("cli.exe") lib.add_file("gui.exe") if dir=="data" and parent.physical=="test" and parent.basedir.physical=="email": # This should contain all non-.svn files listed in subversion for f in os.listdir(lib.absolute): if f.endswith(".txt") or f==".svn":continue if f.endswith(".au") or f.endswith(".gif"): lib.add_file(f) else: print "WARNING: New file %s in email/test/data" % f for f in os.listdir(lib.absolute): if os.path.isdir(os.path.join(lib.absolute, f)): pydirs.append((lib, f)) # Add DLLs default_feature.set_current() lib = PyDirectory(db, cab, root, srcdir+"/PCBuild", "DLLs", "DLLS|DLLs") lib.add_file("py.ico", src="../PC/py.ico") lib.add_file("pyc.ico", src="../PC/pyc.ico") dlls = [] tclfiles = [] for f in extensions: if f=="_tkinter.pyd": continue if not os.path.exists(srcdir+"/PCBuild/"+f): print "WARNING: Missing extension", f continue dlls.append(f) lib.add_file(f) if have_tcl: if not os.path.exists(srcdir+"/PCBuild/_tkinter.pyd"): print "WARNING: Missing _tkinter.pyd" else: lib.start_component("TkDLLs", tcltk) lib.add_file("_tkinter.pyd") dlls.append("_tkinter.pyd") tcldir = os.path.normpath(srcdir+"/../tcltk/bin") for f in glob.glob1(tcldir, "*.dll"): lib.add_file(f, src=os.path.join(tcldir, f)) # Add sqlite if msilib.msi_type=="Intel64;1033": sqlite_arch = "/ia64" elif msilib.msi_type=="x64;1033": sqlite_arch = "/amd64" else: sqlite_arch = "" lib.add_file(srcdir+"/"+sqlite_dir+sqlite_arch+"/sqlite3.dll") # check whether there are any unknown extensions for f in glob.glob1(srcdir+"/PCBuild", "*.pyd"): if f.endswith("_d.pyd"): continue # debug version if f in dlls: continue print "WARNING: Unknown extension", f # Add headers default_feature.set_current() lib = PyDirectory(db, cab, root, "include", "include", "INCLUDE|include") lib.glob("*.h") lib.add_file("pyconfig.h", src="../PC/pyconfig.h") # Add import libraries lib = PyDirectory(db, cab, root, "PCBuild", "libs", "LIBS|libs") for f in dlls: lib.add_file(f.replace('pyd','lib')) lib.add_file('python%s%s.lib' % (major, minor)) # Add the mingw-format library if have_mingw: lib.add_file('libpython%s%s.a' % (major, minor)) if have_tcl: # Add Tcl/Tk tcldirs = [(root, '../tcltk/lib', 'tcl')] tcltk.set_current() while tcldirs: parent, phys, dir = tcldirs.pop() lib = PyDirectory(db, cab, parent, phys, dir, "%s|%s" % (parent.make_short(dir), dir)) if not os.path.exists(lib.absolute): continue for f in os.listdir(lib.absolute): if os.path.isdir(os.path.join(lib.absolute, f)): tcldirs.append((lib, f, f)) else: lib.add_file(f) # Add tools tools.set_current() tooldir = PyDirectory(db, cab, root, "Tools", "Tools", "TOOLS|Tools") for f in ['i18n', 'pynche', 'Scripts', 'versioncheck', 'webchecker']: lib = PyDirectory(db, cab, tooldir, f, f, "%s|%s" % (tooldir.make_short(f), f)) lib.glob("*.py") lib.glob("*.pyw", exclude=['pydocgui.pyw']) lib.remove_pyc() lib.glob("*.txt") if f == "pynche": x = PyDirectory(db, cab, lib, "X", "X", "X|X") x.glob("*.txt") if os.path.exists(os.path.join(lib.absolute, "README")): lib.add_file("README.txt", src="README") if f == 'Scripts': if have_tcl: lib.start_component("pydocgui.pyw", tcltk, keyfile="pydocgui.pyw") lib.add_file("pydocgui.pyw") # Add documentation htmlfiles.set_current() lib = PyDirectory(db, cab, root, "Doc", "Doc", "DOC|Doc") lib.start_component("documentation", keyfile="Python%s%s.chm" % (major,minor)) lib.add_file("Python%s%s.chm" % (major, minor)) cab.commit(db) for f in tmpfiles: os.unlink(f) | d0fd5857e7d882bd6ce5e11c648cffa89a2e1684 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/d0fd5857e7d882bd6ce5e11c648cffa89a2e1684/msi.py |
|
pardir_fsr = Carbon.File.FSRef(fss) | pardir_fsr = Carbon.File.FSRef(pardir_fss) | def AskFileForSave(**args): default_flags = 0x07 args, tpwanted = _process_Nav_args(args, _ALLOWED_KEYS, default_flags) try: rr = Nav.NavPutFile(args) good = 1 except Nav.error, arg: if arg[0] != -128: # userCancelledErr raise Nav.error, arg return None if not rr.validRecord or not rr.selection: return None if issubclass(tpwanted, Carbon.File.FSRef): raise TypeError, "Cannot pass wanted=FSRef to AskFileForSave" if issubclass(tpwanted, Carbon.File.FSSpec): return tpwanted(rr.selection[0]) if issubclass(tpwanted, (str, unicode)): # This is gross, and probably incorrect too vrefnum, dirid, name = rr.selection[0].as_tuple() pardir_fss = Carbon.File.FSSpec((vrefnum, dirid, '')) pardir_fsr = Carbon.File.FSRef(fss) pardir_path = pardir_fsr.FSRefMakePath() # This is utf-8 name_utf8 = unicode(name, 'macroman').encode('utf8') fullpath = os.path.join(pardir_path, name_utf8) if issubclass(tpwanted, unicode): return unicode(fullpath, 'utf8') return tpwanted(fullpath) raise TypeError, "Unknown value for argument 'wanted': %s" % repr(tpwanted) | 00340bc71b641d03738eb31d4a9d4567682de3c7 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/00340bc71b641d03738eb31d4a9d4567682de3c7/EasyDialogs.py |
return sys.modules[fqname] | module = sys.modules[fqname] module.__name__ = fqname return module | def _process_result(self, (ispkg, code, values), fqname): # did get_code() return an actual module? (rather than a code object) is_module = isinstance(code, _ModuleType) | 1db845d44183885cb9b6d2470e8007e63688c44b /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/1db845d44183885cb9b6d2470e8007e63688c44b/imputil.py |
result = interact(handler.load(), 'System-wide preferences') | options = handler.load() if options['noargs']: EasyDialogs.Message('Warning: system-wide sys.argv processing is off.\nIf you dropped an applet I have not seen it.') result = interact(options, 'System-wide preferences') | def edit_preferences(): handler = pythonprefs.PythonOptions() result = interact(handler.load(), 'System-wide preferences') if result: handler.save(result) | df34f5a8e0a42e0d6825e64214c2eaf4dc105f7f /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/df34f5a8e0a42e0d6825e64214c2eaf4dc105f7f/EditPythonPrefs.py |
import pprint pprint.pprint(self.__dict__) | pass | def report(self): # XXX something decent import pprint pprint.pprint(self.__dict__) | 1d9481c0312908f2dc99ed25ccd52498b1818355 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/1d9481c0312908f2dc99ed25ccd52498b1818355/bundlebuilder.py |
SITE_CO = compile(SITE_PY, "<-bundlebuilder->", "exec") | SITE_CO = compile(SITE_PY, "<-bundlebuilder.py->", "exec") | def report(self): # XXX something decent import pprint pprint.pprint(self.__dict__) | 1d9481c0312908f2dc99ed25ccd52498b1818355 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/1d9481c0312908f2dc99ed25ccd52498b1818355/bundlebuilder.py |
EXECVE_WRAPPER = """\ import os from sys import argv, executable resources = os.path.join(os.path.dirname(os.path.dirname(argv[0])), "Resources") mainprogram = os.path.join(resources, "%(mainprogram)s") assert os.path.exists(mainprogram) argv.insert(1, mainprogram) os.environ["PYTHONPATH"] = resources %(setexecutable)s os.execve(executable, argv, os.environ) | BOOTSTRAP_SCRIPT = """\ execdir=$(dirname ${0}) executable=${execdir}/%(executable)s resdir=$(dirname ${execdir})/Resources main=${resdir}/%(mainprogram)s PYTHONPATH=$resdir export PYTHONPATH exec ${executable} ${main} ${1} | def report(self): # XXX something decent import pprint pprint.pprint(self.__dict__) | 1d9481c0312908f2dc99ed25ccd52498b1818355 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/1d9481c0312908f2dc99ed25ccd52498b1818355/bundlebuilder.py |
setExecutableTemplate = """executable = os.path.join(resources, "%s")""" pythonhomeSnippet = """os.environ["home"] = resources""" | def report(self): # XXX something decent import pprint pprint.pprint(self.__dict__) | 1d9481c0312908f2dc99ed25ccd52498b1818355 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/1d9481c0312908f2dc99ed25ccd52498b1818355/bundlebuilder.py |
|
if self.executable is None: self.executable = sys.executable | def setup(self): if self.standalone and self.mainprogram is None: raise BundleBuilderError, ("must specify 'mainprogram' when " "building a standalone application.") if self.mainprogram is None and self.executable is None: raise BundleBuilderError, ("must specify either or both of " "'executable' and 'mainprogram'") | 1d9481c0312908f2dc99ed25ccd52498b1818355 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/1d9481c0312908f2dc99ed25ccd52498b1818355/bundlebuilder.py |
|
execpath = pathjoin(self.execdir, self.name) | execname = self.name | def preProcess(self): resdir = "Contents/Resources" if self.executable is not None: if self.mainprogram is None: execpath = pathjoin(self.execdir, self.name) else: execpath = pathjoin(resdir, os.path.basename(self.executable)) if not self.symlink_exec: self.files.append((self.executable, execpath)) self.binaries.append(execpath) self.execpath = execpath # For execve wrapper setexecutable = setExecutableTemplate % os.path.basename(self.executable) else: setexecutable = "" # XXX for locals() call | 1d9481c0312908f2dc99ed25ccd52498b1818355 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/1d9481c0312908f2dc99ed25ccd52498b1818355/bundlebuilder.py |
execpath = pathjoin(resdir, os.path.basename(self.executable)) | execname = os.path.basename(self.executable) execpath = pathjoin(self.execdir, execname) | def preProcess(self): resdir = "Contents/Resources" if self.executable is not None: if self.mainprogram is None: execpath = pathjoin(self.execdir, self.name) else: execpath = pathjoin(resdir, os.path.basename(self.executable)) if not self.symlink_exec: self.files.append((self.executable, execpath)) self.binaries.append(execpath) self.execpath = execpath # For execve wrapper setexecutable = setExecutableTemplate % os.path.basename(self.executable) else: setexecutable = "" # XXX for locals() call | 1d9481c0312908f2dc99ed25ccd52498b1818355 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/1d9481c0312908f2dc99ed25ccd52498b1818355/bundlebuilder.py |
setexecutable = setExecutableTemplate % os.path.basename(self.executable) else: setexecutable = "" | def preProcess(self): resdir = "Contents/Resources" if self.executable is not None: if self.mainprogram is None: execpath = pathjoin(self.execdir, self.name) else: execpath = pathjoin(resdir, os.path.basename(self.executable)) if not self.symlink_exec: self.files.append((self.executable, execpath)) self.binaries.append(execpath) self.execpath = execpath # For execve wrapper setexecutable = setExecutableTemplate % os.path.basename(self.executable) else: setexecutable = "" # XXX for locals() call | 1d9481c0312908f2dc99ed25ccd52498b1818355 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/1d9481c0312908f2dc99ed25ccd52498b1818355/bundlebuilder.py |
|
open(mainwrapperpath, "w").write(EXECVE_WRAPPER % locals()) | open(mainwrapperpath, "w").write(BOOTSTRAP_SCRIPT % locals()) | def preProcess(self): resdir = "Contents/Resources" if self.executable is not None: if self.mainprogram is None: execpath = pathjoin(self.execdir, self.name) else: execpath = pathjoin(resdir, os.path.basename(self.executable)) if not self.symlink_exec: self.files.append((self.executable, execpath)) self.binaries.append(execpath) self.execpath = execpath # For execve wrapper setexecutable = setExecutableTemplate % os.path.basename(self.executable) else: setexecutable = "" # XXX for locals() call | 1d9481c0312908f2dc99ed25ccd52498b1818355 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/1d9481c0312908f2dc99ed25ccd52498b1818355/bundlebuilder.py |
if self.missingModules: | if self.missingModules or self.maybeMissingModules: | def postProcess(self): self.addPythonModules() if self.strip and not self.symlink: self.stripBinaries() | 1d9481c0312908f2dc99ed25ccd52498b1818355 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/1d9481c0312908f2dc99ed25ccd52498b1818355/bundlebuilder.py |
self.missingModules.extend(mf.any_missing()) | if hasattr(mf, "any_missing_maybe"): missing, maybe = mf.any_missing_maybe() else: missing = mf.any_missing() maybe = [] self.missingModules.extend(missing) self.maybeMissingModules.extend(maybe) | def findDependencies(self): self.message("Finding module dependencies", 1) import modulefinder mf = modulefinder.ModuleFinder(excludes=self.excludeModules) # manually add our own site.py site = mf.add_module("site") site.__code__ = SITE_CO mf.scan_code(SITE_CO, site) | 1d9481c0312908f2dc99ed25ccd52498b1818355 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/1d9481c0312908f2dc99ed25ccd52498b1818355/bundlebuilder.py |
missingsub = [name for name in missing if "." in name] missing = [name for name in missing if "." not in name] | if self.maybeMissingModules: maybe = self.maybeMissingModules else: maybe = [name for name in missing if "." in name] missing = [name for name in missing if "." not in name] | def reportMissing(self): missing = [name for name in self.missingModules if name not in MAYMISS_MODULES] missingsub = [name for name in missing if "." in name] missing = [name for name in missing if "." not in name] missing.sort() missingsub.sort() if missing: self.message("Warning: couldn't find the following modules:", 1) self.message(" " + ", ".join(missing)) if missingsub: self.message("Warning: couldn't find the following submodules " "(but it's probably OK since modulefinder can't distinguish " "between from \"module import submodule\" and " "\"from module import name\"):", 1) self.message(" " + ", ".join(missingsub)) | 1d9481c0312908f2dc99ed25ccd52498b1818355 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/1d9481c0312908f2dc99ed25ccd52498b1818355/bundlebuilder.py |
missingsub.sort() | maybe.sort() if maybe: self.message("Warning: couldn't find the following submodules:", 1) self.message(" (Note that these could be false alarms -- " "it's not always", 1) self.message(" possible to distinguish between from \"package import submodule\" ", 1) self.message(" and \"from package import name\")", 1) for name in maybe: self.message(" ? " + name, 1) | def reportMissing(self): missing = [name for name in self.missingModules if name not in MAYMISS_MODULES] missingsub = [name for name in missing if "." in name] missing = [name for name in missing if "." not in name] missing.sort() missingsub.sort() if missing: self.message("Warning: couldn't find the following modules:", 1) self.message(" " + ", ".join(missing)) if missingsub: self.message("Warning: couldn't find the following submodules " "(but it's probably OK since modulefinder can't distinguish " "between from \"module import submodule\" and " "\"from module import name\"):", 1) self.message(" " + ", ".join(missingsub)) | 1d9481c0312908f2dc99ed25ccd52498b1818355 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/1d9481c0312908f2dc99ed25ccd52498b1818355/bundlebuilder.py |
self.message(" " + ", ".join(missing)) if missingsub: self.message("Warning: couldn't find the following submodules " "(but it's probably OK since modulefinder can't distinguish " "between from \"module import submodule\" and " "\"from module import name\"):", 1) self.message(" " + ", ".join(missingsub)) | for name in missing: self.message(" ? " + name, 1) def report(self): import pprint pprint.pprint(self.__dict__) if self.standalone: self.reportMissing() | def reportMissing(self): missing = [name for name in self.missingModules if name not in MAYMISS_MODULES] missingsub = [name for name in missing if "." in name] missing = [name for name in missing if "." not in name] missing.sort() missingsub.sort() if missing: self.message("Warning: couldn't find the following modules:", 1) self.message(" " + ", ".join(missing)) if missingsub: self.message("Warning: couldn't find the following submodules " "(but it's probably OK since modulefinder can't distinguish " "between from \"module import submodule\" and " "\"from module import name\"):", 1) self.message(" " + ", ".join(missingsub)) | 1d9481c0312908f2dc99ed25ccd52498b1818355 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/1d9481c0312908f2dc99ed25ccd52498b1818355/bundlebuilder.py |
return self._qsize - self.getfilled() | return (self._qsize / self._nchannels / self._sampwidth) - self.getfilled() | def getfillable(self): return self._qsize - self.getfilled() | be72e118c6c4e6621615492b3a2f596d79a91168 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/be72e118c6c4e6621615492b3a2f596d79a91168/Audio_mac.py |
if op in (LITERAL, NOT_LITERAL): | if op in LITERAL_CODES: | def _compile(code, pattern, flags): # internal: compile a (sub)pattern emit = code.append for op, av in pattern: if op in (LITERAL, NOT_LITERAL): if flags & SRE_FLAG_IGNORECASE: emit(OPCODES[OP_IGNORE[op]]) emit(_sre.getlower(av, flags)) else: emit(OPCODES[op]) emit(av) elif op is IN: if flags & SRE_FLAG_IGNORECASE: emit(OPCODES[OP_IGNORE[op]]) def fixup(literal, flags=flags): return _sre.getlower(literal, flags) else: emit(OPCODES[op]) fixup = lambda x: x skip = len(code); emit(0) _compile_charset(av, flags, code, fixup) code[skip] = len(code) - skip elif op is ANY: if flags & SRE_FLAG_DOTALL: emit(OPCODES[ANY_ALL]) else: emit(OPCODES[ANY]) elif op in (REPEAT, MIN_REPEAT, MAX_REPEAT): if flags & SRE_FLAG_TEMPLATE: raise error, "internal: unsupported template operator" emit(OPCODES[REPEAT]) skip = len(code); emit(0) emit(av[0]) emit(av[1]) _compile(code, av[2], flags) emit(OPCODES[SUCCESS]) code[skip] = len(code) - skip elif _simple(av) and op != REPEAT: if op == MAX_REPEAT: emit(OPCODES[REPEAT_ONE]) else: emit(OPCODES[MIN_REPEAT_ONE]) skip = len(code); emit(0) emit(av[0]) emit(av[1]) _compile(code, av[2], flags) emit(OPCODES[SUCCESS]) code[skip] = len(code) - skip else: emit(OPCODES[REPEAT]) skip = len(code); emit(0) emit(av[0]) emit(av[1]) _compile(code, av[2], flags) code[skip] = len(code) - skip if op == MAX_REPEAT: emit(OPCODES[MAX_UNTIL]) else: emit(OPCODES[MIN_UNTIL]) elif op is SUBPATTERN: if av[0]: emit(OPCODES[MARK]) emit((av[0]-1)*2) # _compile_info(code, av[1], flags) _compile(code, av[1], flags) if av[0]: emit(OPCODES[MARK]) emit((av[0]-1)*2+1) elif op in (SUCCESS, FAILURE): emit(OPCODES[op]) elif op in (ASSERT, ASSERT_NOT): emit(OPCODES[op]) skip = len(code); emit(0) if av[0] >= 0: emit(0) # look ahead else: lo, hi = av[1].getwidth() if lo != hi: raise error, "look-behind requires fixed-width pattern" emit(lo) # look behind _compile(code, av[1], flags) emit(OPCODES[SUCCESS]) code[skip] = len(code) - skip elif op is CALL: emit(OPCODES[op]) skip = len(code); emit(0) _compile(code, av, flags) emit(OPCODES[SUCCESS]) code[skip] = len(code) - skip elif op is AT: emit(OPCODES[op]) if flags & SRE_FLAG_MULTILINE: av = AT_MULTILINE.get(av, av) if flags & SRE_FLAG_LOCALE: av = AT_LOCALE.get(av, av) elif flags & SRE_FLAG_UNICODE: av = AT_UNICODE.get(av, av) emit(ATCODES[av]) elif op is BRANCH: emit(OPCODES[op]) tail = [] for av in av[1]: skip = len(code); emit(0) # _compile_info(code, av, flags) _compile(code, av, flags) emit(OPCODES[JUMP]) tail.append(len(code)); emit(0) code[skip] = len(code) - skip emit(0) # end of branch for tail in tail: code[tail] = len(code) - tail elif op is CATEGORY: emit(OPCODES[op]) if flags & SRE_FLAG_LOCALE: av = CH_LOCALE[av] elif flags & SRE_FLAG_UNICODE: av = CH_UNICODE[av] emit(CHCODES[av]) elif op is GROUPREF: if flags & SRE_FLAG_IGNORECASE: emit(OPCODES[OP_IGNORE[op]]) else: emit(OPCODES[op]) emit(av-1) elif op is GROUPREF_EXISTS: emit(OPCODES[op]) emit((av[0]-1)*2) skipyes = len(code); emit(0) _compile(code, av[1], flags) if av[2]: emit(OPCODES[JUMP]) skipno = len(code); emit(0) code[skipyes] = len(code) - skipyes + 1 _compile(code, av[2], flags) code[skipno] = len(code) - skipno else: code[skipyes] = len(code) - skipyes + 1 else: raise ValueError, ("unsupported operand type", op) | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
fixup = lambda x: x skip = len(code); emit(0) | fixup = _identityfunction skip = _len(code); emit(0) | def fixup(literal, flags=flags): return _sre.getlower(literal, flags) | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
code[skip] = len(code) - skip | code[skip] = _len(code) - skip | def fixup(literal, flags=flags): return _sre.getlower(literal, flags) | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
elif op in (REPEAT, MIN_REPEAT, MAX_REPEAT): | elif op in REPEATING_CODES: | def fixup(literal, flags=flags): return _sre.getlower(literal, flags) | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
skip = len(code); emit(0) | skip = _len(code); emit(0) | def fixup(literal, flags=flags): return _sre.getlower(literal, flags) | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
code[skip] = len(code) - skip elif _simple(av) and op != REPEAT: if op == MAX_REPEAT: | code[skip] = _len(code) - skip elif _simple(av) and op is not REPEAT: if op is MAX_REPEAT: | def fixup(literal, flags=flags): return _sre.getlower(literal, flags) | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
skip = len(code); emit(0) | skip = _len(code); emit(0) | def fixup(literal, flags=flags): return _sre.getlower(literal, flags) | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
code[skip] = len(code) - skip | code[skip] = _len(code) - skip | def fixup(literal, flags=flags): return _sre.getlower(literal, flags) | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
skip = len(code); emit(0) | skip = _len(code); emit(0) | def fixup(literal, flags=flags): return _sre.getlower(literal, flags) | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
code[skip] = len(code) - skip if op == MAX_REPEAT: | code[skip] = _len(code) - skip if op is MAX_REPEAT: | def fixup(literal, flags=flags): return _sre.getlower(literal, flags) | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
elif op in (SUCCESS, FAILURE): emit(OPCODES[op]) elif op in (ASSERT, ASSERT_NOT): emit(OPCODES[op]) skip = len(code); emit(0) | elif op in SUCCESS_CODES: emit(OPCODES[op]) elif op in ASSERT_CODES: emit(OPCODES[op]) skip = _len(code); emit(0) | def fixup(literal, flags=flags): return _sre.getlower(literal, flags) | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
code[skip] = len(code) - skip | code[skip] = _len(code) - skip | def fixup(literal, flags=flags): return _sre.getlower(literal, flags) | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
skip = len(code); emit(0) | skip = _len(code); emit(0) | def fixup(literal, flags=flags): return _sre.getlower(literal, flags) | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
code[skip] = len(code) - skip | code[skip] = _len(code) - skip | def fixup(literal, flags=flags): return _sre.getlower(literal, flags) | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
skip = len(code); emit(0) | skip = _len(code); emit(0) | def fixup(literal, flags=flags): return _sre.getlower(literal, flags) | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
tail.append(len(code)); emit(0) code[skip] = len(code) - skip | tailappend(_len(code)); emit(0) code[skip] = _len(code) - skip | def fixup(literal, flags=flags): return _sre.getlower(literal, flags) | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
code[tail] = len(code) - tail | code[tail] = _len(code) - tail | def fixup(literal, flags=flags): return _sre.getlower(literal, flags) | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
skipyes = len(code); emit(0) | skipyes = _len(code); emit(0) | def fixup(literal, flags=flags): return _sre.getlower(literal, flags) | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
skipno = len(code); emit(0) code[skipyes] = len(code) - skipyes + 1 | skipno = _len(code); emit(0) code[skipyes] = _len(code) - skipyes + 1 | def fixup(literal, flags=flags): return _sre.getlower(literal, flags) | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
code[skipno] = len(code) - skipno else: code[skipyes] = len(code) - skipyes + 1 | code[skipno] = _len(code) - skipno else: code[skipyes] = _len(code) - skipyes + 1 | def fixup(literal, flags=flags): return _sre.getlower(literal, flags) | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
fixup = lambda x: x | fixup = _identityfunction | def _compile_charset(charset, flags, code, fixup=None): # compile charset subprogram emit = code.append if fixup is None: fixup = lambda x: x for op, av in _optimize_charset(charset, fixup): emit(OPCODES[op]) if op is NEGATE: pass elif op is LITERAL: emit(fixup(av)) elif op is RANGE: emit(fixup(av[0])) emit(fixup(av[1])) elif op is CHARSET: code.extend(av) elif op is BIGCHARSET: code.extend(av) elif op is CATEGORY: if flags & SRE_FLAG_LOCALE: emit(CHCODES[CH_LOCALE[av]]) elif flags & SRE_FLAG_UNICODE: emit(CHCODES[CH_UNICODE[av]]) else: emit(CHCODES[av]) else: raise error, "internal: unsupported set operator" emit(OPCODES[FAILURE]) | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
out.append((op, av)) | outappend((op, av)) | def _optimize_charset(charset, fixup): # internal: optimize character set out = [] charmap = [False]*256 try: for op, av in charset: if op is NEGATE: out.append((op, av)) elif op is LITERAL: charmap[fixup(av)] = True elif op is RANGE: for i in range(fixup(av[0]), fixup(av[1])+1): charmap[i] = True elif op is CATEGORY: # XXX: could append to charmap tail return charset # cannot compress except IndexError: # character set contains unicode characters return _optimize_unicode(charset, fixup) # compress character map i = p = n = 0 runs = [] for c in charmap: if c: if n == 0: p = i n = n + 1 elif n: runs.append((p, n)) n = 0 i = i + 1 if n: runs.append((p, n)) if len(runs) <= 2: # use literal/range for p, n in runs: if n == 1: out.append((LITERAL, p)) else: out.append((RANGE, (p, p+n-1))) if len(out) < len(charset): return out else: # use bitmap data = _mk_bitmap(charmap) out.append((CHARSET, data)) return out return charset | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
runs.append((p, n)) | runsappend((p, n)) | def _optimize_charset(charset, fixup): # internal: optimize character set out = [] charmap = [False]*256 try: for op, av in charset: if op is NEGATE: out.append((op, av)) elif op is LITERAL: charmap[fixup(av)] = True elif op is RANGE: for i in range(fixup(av[0]), fixup(av[1])+1): charmap[i] = True elif op is CATEGORY: # XXX: could append to charmap tail return charset # cannot compress except IndexError: # character set contains unicode characters return _optimize_unicode(charset, fixup) # compress character map i = p = n = 0 runs = [] for c in charmap: if c: if n == 0: p = i n = n + 1 elif n: runs.append((p, n)) n = 0 i = i + 1 if n: runs.append((p, n)) if len(runs) <= 2: # use literal/range for p, n in runs: if n == 1: out.append((LITERAL, p)) else: out.append((RANGE, (p, p+n-1))) if len(out) < len(charset): return out else: # use bitmap data = _mk_bitmap(charmap) out.append((CHARSET, data)) return out return charset | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
runs.append((p, n)) | runsappend((p, n)) | def _optimize_charset(charset, fixup): # internal: optimize character set out = [] charmap = [False]*256 try: for op, av in charset: if op is NEGATE: out.append((op, av)) elif op is LITERAL: charmap[fixup(av)] = True elif op is RANGE: for i in range(fixup(av[0]), fixup(av[1])+1): charmap[i] = True elif op is CATEGORY: # XXX: could append to charmap tail return charset # cannot compress except IndexError: # character set contains unicode characters return _optimize_unicode(charset, fixup) # compress character map i = p = n = 0 runs = [] for c in charmap: if c: if n == 0: p = i n = n + 1 elif n: runs.append((p, n)) n = 0 i = i + 1 if n: runs.append((p, n)) if len(runs) <= 2: # use literal/range for p, n in runs: if n == 1: out.append((LITERAL, p)) else: out.append((RANGE, (p, p+n-1))) if len(out) < len(charset): return out else: # use bitmap data = _mk_bitmap(charmap) out.append((CHARSET, data)) return out return charset | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
out.append((LITERAL, p)) else: out.append((RANGE, (p, p+n-1))) | outappend((LITERAL, p)) else: outappend((RANGE, (p, p+n-1))) | def _optimize_charset(charset, fixup): # internal: optimize character set out = [] charmap = [False]*256 try: for op, av in charset: if op is NEGATE: out.append((op, av)) elif op is LITERAL: charmap[fixup(av)] = True elif op is RANGE: for i in range(fixup(av[0]), fixup(av[1])+1): charmap[i] = True elif op is CATEGORY: # XXX: could append to charmap tail return charset # cannot compress except IndexError: # character set contains unicode characters return _optimize_unicode(charset, fixup) # compress character map i = p = n = 0 runs = [] for c in charmap: if c: if n == 0: p = i n = n + 1 elif n: runs.append((p, n)) n = 0 i = i + 1 if n: runs.append((p, n)) if len(runs) <= 2: # use literal/range for p, n in runs: if n == 1: out.append((LITERAL, p)) else: out.append((RANGE, (p, p+n-1))) if len(out) < len(charset): return out else: # use bitmap data = _mk_bitmap(charmap) out.append((CHARSET, data)) return out return charset | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
out.append((CHARSET, data)) | outappend((CHARSET, data)) | def _optimize_charset(charset, fixup): # internal: optimize character set out = [] charmap = [False]*256 try: for op, av in charset: if op is NEGATE: out.append((op, av)) elif op is LITERAL: charmap[fixup(av)] = True elif op is RANGE: for i in range(fixup(av[0]), fixup(av[1])+1): charmap[i] = True elif op is CATEGORY: # XXX: could append to charmap tail return charset # cannot compress except IndexError: # character set contains unicode characters return _optimize_unicode(charset, fixup) # compress character map i = p = n = 0 runs = [] for c in charmap: if c: if n == 0: p = i n = n + 1 elif n: runs.append((p, n)) n = 0 i = i + 1 if n: runs.append((p, n)) if len(runs) <= 2: # use literal/range for p, n in runs: if n == 1: out.append((LITERAL, p)) else: out.append((RANGE, (p, p+n-1))) if len(out) < len(charset): return out else: # use bitmap data = _mk_bitmap(charmap) out.append((CHARSET, data)) return out return charset | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
m = m << 1 | m = m + m | def _mk_bitmap(bits): data = [] if _sre.CODESIZE == 2: start = (1, 0) else: start = (1L, 0L) m, v = start for c in bits: if c: v = v + m m = m << 1 if m > MAXCODE: data.append(v) m, v = start return data | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
data.append(v) | dataappend(v) | def _mk_bitmap(bits): data = [] if _sre.CODESIZE == 2: start = (1, 0) else: start = (1L, 0L) m, v = start for c in bits: if c: v = v + m m = m << 1 if m > MAXCODE: data.append(v) m, v = start return data | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
for i in range(fixup(av[0]), fixup(av[1])+1): | for i in xrange(fixup(av[0]), fixup(av[1])+1): | def _optimize_unicode(charset, fixup): try: import array except ImportError: return charset charmap = [False]*65536 negate = 0 try: for op, av in charset: if op is NEGATE: negate = 1 elif op is LITERAL: charmap[fixup(av)] = True elif op is RANGE: for i in range(fixup(av[0]), fixup(av[1])+1): charmap[i] = True elif op is CATEGORY: # XXX: could expand category return charset # cannot compress except IndexError: # non-BMP characters return charset if negate: if sys.maxunicode != 65535: # XXX: negation does not work with big charsets return charset for i in range(65536): charmap[i] = not charmap[i] comps = {} mapping = [0]*256 block = 0 data = [] for i in range(256): chunk = tuple(charmap[i*256:(i+1)*256]) new = comps.setdefault(chunk, block) mapping[i] = new if new == block: block = block + 1 data = data + _mk_bitmap(chunk) header = [block] if MAXCODE == 65535: code = 'H' else: code = 'L' # Convert block indices to byte array of 256 bytes mapping = array.array('b', mapping).tostring() # Convert byte array to word array header = header + array.array(code, mapping).tolist() data[0:0] = header return [(BIGCHARSET, data)] | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
for i in range(65536): | for i in xrange(65536): | def _optimize_unicode(charset, fixup): try: import array except ImportError: return charset charmap = [False]*65536 negate = 0 try: for op, av in charset: if op is NEGATE: negate = 1 elif op is LITERAL: charmap[fixup(av)] = True elif op is RANGE: for i in range(fixup(av[0]), fixup(av[1])+1): charmap[i] = True elif op is CATEGORY: # XXX: could expand category return charset # cannot compress except IndexError: # non-BMP characters return charset if negate: if sys.maxunicode != 65535: # XXX: negation does not work with big charsets return charset for i in range(65536): charmap[i] = not charmap[i] comps = {} mapping = [0]*256 block = 0 data = [] for i in range(256): chunk = tuple(charmap[i*256:(i+1)*256]) new = comps.setdefault(chunk, block) mapping[i] = new if new == block: block = block + 1 data = data + _mk_bitmap(chunk) header = [block] if MAXCODE == 65535: code = 'H' else: code = 'L' # Convert block indices to byte array of 256 bytes mapping = array.array('b', mapping).tostring() # Convert byte array to word array header = header + array.array(code, mapping).tolist() data[0:0] = header return [(BIGCHARSET, data)] | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
for i in range(256): | for i in xrange(256): | def _optimize_unicode(charset, fixup): try: import array except ImportError: return charset charmap = [False]*65536 negate = 0 try: for op, av in charset: if op is NEGATE: negate = 1 elif op is LITERAL: charmap[fixup(av)] = True elif op is RANGE: for i in range(fixup(av[0]), fixup(av[1])+1): charmap[i] = True elif op is CATEGORY: # XXX: could expand category return charset # cannot compress except IndexError: # non-BMP characters return charset if negate: if sys.maxunicode != 65535: # XXX: negation does not work with big charsets return charset for i in range(65536): charmap[i] = not charmap[i] comps = {} mapping = [0]*256 block = 0 data = [] for i in range(256): chunk = tuple(charmap[i*256:(i+1)*256]) new = comps.setdefault(chunk, block) mapping[i] = new if new == block: block = block + 1 data = data + _mk_bitmap(chunk) header = [block] if MAXCODE == 65535: code = 'H' else: code = 'L' # Convert block indices to byte array of 256 bytes mapping = array.array('b', mapping).tostring() # Convert byte array to word array header = header + array.array(code, mapping).tolist() data[0:0] = header return [(BIGCHARSET, data)] | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
prefix.append(av) | prefixappend(av) | def _compile_info(code, pattern, flags): # internal: compile an info block. in the current version, # this contains min/max pattern width, and an optional literal # prefix or a character map lo, hi = pattern.getwidth() if lo == 0: return # not worth it # look for a literal prefix prefix = [] prefix_skip = 0 charset = [] # not used if not (flags & SRE_FLAG_IGNORECASE): # look for literal prefix for op, av in pattern.data: if op is LITERAL: if len(prefix) == prefix_skip: prefix_skip = prefix_skip + 1 prefix.append(av) elif op is SUBPATTERN and len(av[1]) == 1: op, av = av[1][0] if op is LITERAL: prefix.append(av) else: break else: break # if no prefix, look for charset prefix if not prefix and pattern.data: op, av = pattern.data[0] if op is SUBPATTERN and av[1]: op, av = av[1][0] if op is LITERAL: charset.append((op, av)) elif op is BRANCH: c = [] for p in av[1]: if not p: break op, av = p[0] if op is LITERAL: c.append((op, av)) else: break else: charset = c elif op is BRANCH: c = [] for p in av[1]: if not p: break op, av = p[0] if op is LITERAL: c.append((op, av)) else: break else: charset = c elif op is IN: charset = av | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
prefix.append(av) | prefixappend(av) | def _compile_info(code, pattern, flags): # internal: compile an info block. in the current version, # this contains min/max pattern width, and an optional literal # prefix or a character map lo, hi = pattern.getwidth() if lo == 0: return # not worth it # look for a literal prefix prefix = [] prefix_skip = 0 charset = [] # not used if not (flags & SRE_FLAG_IGNORECASE): # look for literal prefix for op, av in pattern.data: if op is LITERAL: if len(prefix) == prefix_skip: prefix_skip = prefix_skip + 1 prefix.append(av) elif op is SUBPATTERN and len(av[1]) == 1: op, av = av[1][0] if op is LITERAL: prefix.append(av) else: break else: break # if no prefix, look for charset prefix if not prefix and pattern.data: op, av = pattern.data[0] if op is SUBPATTERN and av[1]: op, av = av[1][0] if op is LITERAL: charset.append((op, av)) elif op is BRANCH: c = [] for p in av[1]: if not p: break op, av = p[0] if op is LITERAL: c.append((op, av)) else: break else: charset = c elif op is BRANCH: c = [] for p in av[1]: if not p: break op, av = p[0] if op is LITERAL: c.append((op, av)) else: break else: charset = c elif op is IN: charset = av | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
charset.append((op, av)) | charsetappend((op, av)) | def _compile_info(code, pattern, flags): # internal: compile an info block. in the current version, # this contains min/max pattern width, and an optional literal # prefix or a character map lo, hi = pattern.getwidth() if lo == 0: return # not worth it # look for a literal prefix prefix = [] prefix_skip = 0 charset = [] # not used if not (flags & SRE_FLAG_IGNORECASE): # look for literal prefix for op, av in pattern.data: if op is LITERAL: if len(prefix) == prefix_skip: prefix_skip = prefix_skip + 1 prefix.append(av) elif op is SUBPATTERN and len(av[1]) == 1: op, av = av[1][0] if op is LITERAL: prefix.append(av) else: break else: break # if no prefix, look for charset prefix if not prefix and pattern.data: op, av = pattern.data[0] if op is SUBPATTERN and av[1]: op, av = av[1][0] if op is LITERAL: charset.append((op, av)) elif op is BRANCH: c = [] for p in av[1]: if not p: break op, av = p[0] if op is LITERAL: c.append((op, av)) else: break else: charset = c elif op is BRANCH: c = [] for p in av[1]: if not p: break op, av = p[0] if op is LITERAL: c.append((op, av)) else: break else: charset = c elif op is IN: charset = av | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
c.append((op, av)) | cappend((op, av)) | def _compile_info(code, pattern, flags): # internal: compile an info block. in the current version, # this contains min/max pattern width, and an optional literal # prefix or a character map lo, hi = pattern.getwidth() if lo == 0: return # not worth it # look for a literal prefix prefix = [] prefix_skip = 0 charset = [] # not used if not (flags & SRE_FLAG_IGNORECASE): # look for literal prefix for op, av in pattern.data: if op is LITERAL: if len(prefix) == prefix_skip: prefix_skip = prefix_skip + 1 prefix.append(av) elif op is SUBPATTERN and len(av[1]) == 1: op, av = av[1][0] if op is LITERAL: prefix.append(av) else: break else: break # if no prefix, look for charset prefix if not prefix and pattern.data: op, av = pattern.data[0] if op is SUBPATTERN and av[1]: op, av = av[1][0] if op is LITERAL: charset.append((op, av)) elif op is BRANCH: c = [] for p in av[1]: if not p: break op, av = p[0] if op is LITERAL: c.append((op, av)) else: break else: charset = c elif op is BRANCH: c = [] for p in av[1]: if not p: break op, av = p[0] if op is LITERAL: c.append((op, av)) else: break else: charset = c elif op is IN: charset = av | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
c.append((op, av)) | cappend((op, av)) | def _compile_info(code, pattern, flags): # internal: compile an info block. in the current version, # this contains min/max pattern width, and an optional literal # prefix or a character map lo, hi = pattern.getwidth() if lo == 0: return # not worth it # look for a literal prefix prefix = [] prefix_skip = 0 charset = [] # not used if not (flags & SRE_FLAG_IGNORECASE): # look for literal prefix for op, av in pattern.data: if op is LITERAL: if len(prefix) == prefix_skip: prefix_skip = prefix_skip + 1 prefix.append(av) elif op is SUBPATTERN and len(av[1]) == 1: op, av = av[1][0] if op is LITERAL: prefix.append(av) else: break else: break # if no prefix, look for charset prefix if not prefix and pattern.data: op, av = pattern.data[0] if op is SUBPATTERN and av[1]: op, av = av[1][0] if op is LITERAL: charset.append((op, av)) elif op is BRANCH: c = [] for p in av[1]: if not p: break op, av = p[0] if op is LITERAL: c.append((op, av)) else: break else: charset = c elif op is BRANCH: c = [] for p in av[1]: if not p: break op, av = p[0] if op is LITERAL: c.append((op, av)) else: break else: charset = c elif op is IN: charset = av | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
for i in range(len(prefix)): | for i in xrange(len(prefix)): | def _compile_info(code, pattern, flags): # internal: compile an info block. in the current version, # this contains min/max pattern width, and an optional literal # prefix or a character map lo, hi = pattern.getwidth() if lo == 0: return # not worth it # look for a literal prefix prefix = [] prefix_skip = 0 charset = [] # not used if not (flags & SRE_FLAG_IGNORECASE): # look for literal prefix for op, av in pattern.data: if op is LITERAL: if len(prefix) == prefix_skip: prefix_skip = prefix_skip + 1 prefix.append(av) elif op is SUBPATTERN and len(av[1]) == 1: op, av = av[1][0] if op is LITERAL: prefix.append(av) else: break else: break # if no prefix, look for charset prefix if not prefix and pattern.data: op, av = pattern.data[0] if op is SUBPATTERN and av[1]: op, av = av[1][0] if op is LITERAL: charset.append((op, av)) elif op is BRANCH: c = [] for p in av[1]: if not p: break op, av = p[0] if op is LITERAL: c.append((op, av)) else: break else: charset = c elif op is BRANCH: c = [] for p in av[1]: if not p: break op, av = p[0] if op is LITERAL: c.append((op, av)) else: break else: charset = c elif op is IN: charset = av | 2177055b902a7e649c7988bb4499ab5115b4ea1d /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/2177055b902a7e649c7988bb4499ab5115b4ea1d/sre_compile.py |
exit_status = not main() | exit_status = int(not main()) | def main(): """Script main program.""" import getopt try: opts, args = getopt.getopt(sys.argv[1:], 'lfqd:x:') except getopt.error, msg: print msg print "usage: python compileall.py [-l] [-f] [-q] [-d destdir] " \ "[-x regexp] [directory ...]" print "-l: don't recurse down" print "-f: force rebuild even if timestamps are up-to-date" print "-q: quiet operation" print "-d destdir: purported directory name for error messages" print " if no directory arguments, -l sys.path is assumed" print "-x regexp: skip files matching the regular expression regexp" print " the regexp is search for in the full path of the file" sys.exit(2) maxlevels = 10 ddir = None force = 0 quiet = 0 rx = None for o, a in opts: if o == '-l': maxlevels = 0 if o == '-d': ddir = a if o == '-f': force = 1 if o == '-q': quiet = 1 if o == '-x': import re rx = re.compile(a) if ddir: if len(args) != 1: print "-d destdir require exactly one directory argument" sys.exit(2) success = 1 try: if args: for dir in args: if not compile_dir(dir, maxlevels, ddir, force, rx, quiet): success = 0 else: success = compile_path() except KeyboardInterrupt: print "\n[interrupt]" success = 0 return success | 25fd4f489e81b42bda2a7efeb5071747987c67a5 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/25fd4f489e81b42bda2a7efeb5071747987c67a5/compileall.py |
initial_slash = (path[0] == '/') | initial_slashes = path.startswith('/') if (initial_slashes and path.startswith('//') and not path.startswith('///')): initial_slashes = 2 | def normpath(path): """Normalize path, eliminating double slashes, etc.""" if path == '': return '.' initial_slash = (path[0] == '/') comps = path.split('/') new_comps = [] for comp in comps: if comp in ('', '.'): continue if (comp != '..' or (not initial_slash and not new_comps) or (new_comps and new_comps[-1] == '..')): new_comps.append(comp) elif new_comps: new_comps.pop() comps = new_comps path = '/'.join(comps) if initial_slash: path = '/' + path return path or '.' | 3729695dbd60bd4ca526f86d7ba5ed7ac1bd7738 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/3729695dbd60bd4ca526f86d7ba5ed7ac1bd7738/posixpath.py |
if (comp != '..' or (not initial_slash and not new_comps) or | if (comp != '..' or (not initial_slashes and not new_comps) or | def normpath(path): """Normalize path, eliminating double slashes, etc.""" if path == '': return '.' initial_slash = (path[0] == '/') comps = path.split('/') new_comps = [] for comp in comps: if comp in ('', '.'): continue if (comp != '..' or (not initial_slash and not new_comps) or (new_comps and new_comps[-1] == '..')): new_comps.append(comp) elif new_comps: new_comps.pop() comps = new_comps path = '/'.join(comps) if initial_slash: path = '/' + path return path or '.' | 3729695dbd60bd4ca526f86d7ba5ed7ac1bd7738 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/3729695dbd60bd4ca526f86d7ba5ed7ac1bd7738/posixpath.py |
if initial_slash: path = '/' + path | if initial_slashes: path = '/'*initial_slashes + path | def normpath(path): """Normalize path, eliminating double slashes, etc.""" if path == '': return '.' initial_slash = (path[0] == '/') comps = path.split('/') new_comps = [] for comp in comps: if comp in ('', '.'): continue if (comp != '..' or (not initial_slash and not new_comps) or (new_comps and new_comps[-1] == '..')): new_comps.append(comp) elif new_comps: new_comps.pop() comps = new_comps path = '/'.join(comps) if initial_slash: path = '/' + path return path or '.' | 3729695dbd60bd4ca526f86d7ba5ed7ac1bd7738 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/3729695dbd60bd4ca526f86d7ba5ed7ac1bd7738/posixpath.py |
'db4': {'libs': ('db-4.1', 'db-4.0',), | 'db4': {'libs': ('db-4.1', 'db41', 'db-4.0', 'db4',), | def detect_modules(self): # Ensure that /usr/local is always used add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') | 27b382a8dac25383012f40cf622de2acbadcad25 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/27b382a8dac25383012f40cf622de2acbadcad25/setup.py |
'db3': {'libs': ('db-3.3', 'db-3.2', 'db-3.1'), | 'db3': {'libs': ('db-3.3', 'db-3.2', 'db-3.1', 'db3',), | def detect_modules(self): # Ensure that /usr/local is always used add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') | 27b382a8dac25383012f40cf622de2acbadcad25 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/27b382a8dac25383012f40cf622de2acbadcad25/setup.py |
def _cmp(a, b): | def _cmp(a, b, sh, st): | def _cmp(a, b): try: return not abs(cmp(a, b)) except os.error: return 2 | d4f5db66816c3edc4c766d8087eaed92a7315099 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/d4f5db66816c3edc4c766d8087eaed92a7315099/filecmp.py |
return not abs(cmp(a, b)) | return not abs(cmp(a, b, sh, st)) | def _cmp(a, b): try: return not abs(cmp(a, b)) except os.error: return 2 | d4f5db66816c3edc4c766d8087eaed92a7315099 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/d4f5db66816c3edc4c766d8087eaed92a7315099/filecmp.py |
import tempfile try: execv(tempfile.mktemp(), ('blah',)) except error, _notfound: pass | if sys.platform[:4] == 'beos': try: unlink('/_ except error, _notfound: pass else: import tempfile t = tempfile.mktemp() try: execv(t, ('blah',)) except error, _notfound: pass | def _execvpe(file, args, env=None): if env is not None: func = execve argrest = (args, env) else: func = execv argrest = (args,) env = environ global _notfound head, tail = path.split(file) if head: apply(func, (file,) + argrest) return if env.has_key('PATH'): envpath = env['PATH'] else: envpath = defpath PATH = envpath.split(pathsep) if not _notfound: import tempfile # Exec a file that is guaranteed not to exist try: execv(tempfile.mktemp(), ('blah',)) except error, _notfound: pass exc, arg = error, _notfound for dir in PATH: fullname = path.join(dir, file) try: apply(func, (fullname,) + argrest) except error, (errno, msg): if errno != arg[0]: exc, arg = error, (errno, msg) raise exc, arg | 5a7a2c56dac0e5b5eba2526fb363ea3321847ac7 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/5a7a2c56dac0e5b5eba2526fb363ea3321847ac7/os.py |
flags = _fcntl.fcntl(fd, _fcntl.F_GETFD, 0) if flags >= 0: | try: flags = _fcntl.fcntl(fd, _fcntl.F_GETFD, 0) except IOError: pass else: | def _set_cloexec(fd): flags = _fcntl.fcntl(fd, _fcntl.F_GETFD, 0) if flags >= 0: # flags read successfully, modify flags |= _fcntl.FD_CLOEXEC _fcntl.fcntl(fd, _fcntl.F_SETFD, flags) | 6708dfc3054f3cae5733478862903bed1cf1586a /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/6708dfc3054f3cae5733478862903bed1cf1586a/tempfile.py |
def extended_linecache_checkcache(orig_checkcache=linecache.checkcache): | def extended_linecache_checkcache(filename=None, orig_checkcache=linecache.checkcache): | def extended_linecache_checkcache(orig_checkcache=linecache.checkcache): """Extend linecache.checkcache to preserve the <pyshell#...> entries Rather than repeating the linecache code, patch it to save the pyshell# entries, call the original linecache.checkcache(), and then restore the saved entries. Assigning the orig_checkcache keyword arg freezes its value at definition time to the (original) method linecache.checkcache(), i.e. makes orig_checkcache lexical. """ cache = linecache.cache save = {} for filename in cache.keys(): if filename[:1] + filename[-1:] == '<>': save[filename] = cache[filename] orig_checkcache() cache.update(save) | 85c95aa365a7cfaa7c40185445fe0648906f12e4 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/85c95aa365a7cfaa7c40185445fe0648906f12e4/PyShell.py |
Rather than repeating the linecache code, patch it to save the pyshell entries, call the original linecache.checkcache(), and then restore the saved entries. Assigning the orig_checkcache keyword arg freezes its value at definition time to the (original) method linecache.checkcache(), i.e. makes orig_checkcache lexical. | Rather than repeating the linecache code, patch it to save the <pyshell (which destroys them), and then restore the saved entries. orig_checkcache is bound at definition time to the original method, allowing it to be patched. | def extended_linecache_checkcache(orig_checkcache=linecache.checkcache): """Extend linecache.checkcache to preserve the <pyshell#...> entries Rather than repeating the linecache code, patch it to save the pyshell# entries, call the original linecache.checkcache(), and then restore the saved entries. Assigning the orig_checkcache keyword arg freezes its value at definition time to the (original) method linecache.checkcache(), i.e. makes orig_checkcache lexical. """ cache = linecache.cache save = {} for filename in cache.keys(): if filename[:1] + filename[-1:] == '<>': save[filename] = cache[filename] orig_checkcache() cache.update(save) | 85c95aa365a7cfaa7c40185445fe0648906f12e4 /local1/tlutelli/issta_data/temp/all_python//python/2006_temp/2006/12029/85c95aa365a7cfaa7c40185445fe0648906f12e4/PyShell.py |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.