desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Return exit code of process or `None` if it has yet to stop'
@property def exitcode(self):
if (self._popen is None): return self._popen return self._popen.poll()
'Return identifier (PID) of process or `None` if it has yet to start'
@property def ident(self):
if (self is _current_process): return os.getpid() else: return (self._popen and self._popen.pid)
'Accept a connection on the bound socket or named pipe of `self`. Returns a `Connection` object.'
def accept(self):
c = self._listener.accept() if self._authkey: deliver_challenge(c, self._authkey) answer_challenge(c, self._authkey) return c
'Close the bound socket or named pipe of `self`.'
def close(self):
return self._listener.close()
'Run the server forever'
def serve_forever(self):
current_process()._manager_server = self try: while 1: try: c = self.listener.accept() except (OSError, IOError): continue t = threading.Thread(target=self.handle_request, args=(c,)) t.daemon = True t.start() except (KeyboardInterrupt, SystemExit): pass finally: self.stop = 999 self.listener.close()
'Handle a new connection'
def handle_request(self, c):
funcname = result = request = None try: connection.deliver_challenge(c, self.authkey) connection.answer_challenge(c, self.authkey) request = c.recv() (ignore, funcname, args, kwds) = request assert (funcname in self.public), ('%r unrecognized' % funcname) func = getattr(self, funcname) except Exception: msg = ('#TRACEBACK', format_exc()) else: try: result = func(c, *args, **kwds) except Exception: msg = ('#TRACEBACK', format_exc()) else: msg = ('#RETURN', result) try: c.send(msg) except Exception as e: try: c.send(('#TRACEBACK', format_exc())) except Exception: pass util.info('Failure to send message: %r', msg) util.info(' ... request was %r', request) util.info(' ... exception was %r', e) c.close()
'Handle requests from the proxies in a particular process/thread'
def serve_client(self, conn):
util.debug('starting server thread to service %r', threading.current_thread().name) recv = conn.recv send = conn.send id_to_obj = self.id_to_obj while (not self.stop): try: methodname = obj = None request = recv() (ident, methodname, args, kwds) = request (obj, exposed, gettypeid) = id_to_obj[ident] if (methodname not in exposed): raise AttributeError(('method %r of %r object is not in exposed=%r' % (methodname, type(obj), exposed))) function = getattr(obj, methodname) try: res = function(*args, **kwds) except Exception as e: msg = ('#ERROR', e) else: typeid = (gettypeid and gettypeid.get(methodname, None)) if typeid: (rident, rexposed) = self.create(conn, typeid, res) token = Token(typeid, self.address, rident) msg = ('#PROXY', (rexposed, token)) else: msg = ('#RETURN', res) except AttributeError: if (methodname is None): msg = ('#TRACEBACK', format_exc()) else: try: fallback_func = self.fallback_mapping[methodname] result = fallback_func(self, conn, ident, obj, *args, **kwds) msg = ('#RETURN', result) except Exception: msg = ('#TRACEBACK', format_exc()) except EOFError: util.debug('got EOF -- exiting thread serving %r', threading.current_thread().name) sys.exit(0) except Exception: msg = ('#TRACEBACK', format_exc()) try: try: send(msg) except Exception as e: send(('#UNSERIALIZABLE', repr(msg))) except Exception as e: util.info('exception in thread serving %r', threading.current_thread().name) util.info(' ... message was %r', msg) util.info(' ... exception was %r', e) conn.close() sys.exit(1)
'Return some info --- useful to spot problems with refcounting'
def debug_info(self, c):
self.mutex.acquire() try: result = [] keys = self.id_to_obj.keys() keys.sort() for ident in keys: if (ident != '0'): result.append((' %s: refcount=%s\n %s' % (ident, self.id_to_refcount[ident], str(self.id_to_obj[ident][0])[:75]))) return '\n'.join(result) finally: self.mutex.release()
'Number of shared objects'
def number_of_objects(self, c):
return (len(self.id_to_obj) - 1)
'Shutdown this process'
def shutdown(self, c):
try: util.debug('manager received shutdown message') c.send(('#RETURN', None)) if (sys.stdout != sys.__stdout__): util.debug('resetting stdout, stderr') sys.stdout = sys.__stdout__ sys.stderr = sys.__stderr__ util._run_finalizers(0) for p in active_children(): util.debug('terminating a child process of manager') p.terminate() for p in active_children(): util.debug('terminating a child process of manager') p.join() util._run_finalizers() util.info('manager exiting with exitcode 0') except: import traceback traceback.print_exc() finally: exit(0)
'Create a new shared object and return its id'
def create(self, c, typeid, *args, **kwds):
self.mutex.acquire() try: (callable, exposed, method_to_typeid, proxytype) = self.registry[typeid] if (callable is None): assert ((len(args) == 1) and (not kwds)) obj = args[0] else: obj = callable(*args, **kwds) if (exposed is None): exposed = public_methods(obj) if (method_to_typeid is not None): assert (type(method_to_typeid) is dict) exposed = (list(exposed) + list(method_to_typeid)) ident = ('%x' % id(obj)) util.debug('%r callable returned object with id %r', typeid, ident) self.id_to_obj[ident] = (obj, set(exposed), method_to_typeid) if (ident not in self.id_to_refcount): self.id_to_refcount[ident] = 0 self.incref(c, ident) return (ident, tuple(exposed)) finally: self.mutex.release()
'Return the methods of the shared object indicated by token'
def get_methods(self, c, token):
return tuple(self.id_to_obj[token.id][1])
'Spawn a new thread to serve this connection'
def accept_connection(self, c, name):
threading.current_thread().name = name c.send(('#RETURN', None)) self.serve_client(c)
'Return server object with serve_forever() method and address attribute'
def get_server(self):
assert (self._state.value == State.INITIAL) return Server(self._registry, self._address, self._authkey, self._serializer)
'Connect manager object to the server process'
def connect(self):
(Listener, Client) = listener_client[self._serializer] conn = Client(self._address, authkey=self._authkey) dispatch(conn, None, 'dummy') self._state.value = State.STARTED
'Spawn a server process for this manager object'
def start(self, initializer=None, initargs=()):
assert (self._state.value == State.INITIAL) if ((initializer is not None) and (not hasattr(initializer, '__call__'))): raise TypeError('initializer must be a callable') (reader, writer) = connection.Pipe(duplex=False) self._process = Process(target=type(self)._run_server, args=(self._registry, self._address, self._authkey, self._serializer, writer, initializer, initargs)) ident = ':'.join((str(i) for i in self._process._identity)) self._process.name = ((type(self).__name__ + '-') + ident) self._process.start() writer.close() self._address = reader.recv() reader.close() self._state.value = State.STARTED self.shutdown = util.Finalize(self, type(self)._finalize_manager, args=(self._process, self._address, self._authkey, self._state, self._Client), exitpriority=0)
'Create a server, report its address and run it'
@classmethod def _run_server(cls, registry, address, authkey, serializer, writer, initializer=None, initargs=()):
if (initializer is not None): initializer(*initargs) server = cls._Server(registry, address, authkey, serializer) writer.send(server.address) writer.close() util.info('manager serving at %r', server.address) server.serve_forever()
'Create a new shared object; return the token and exposed tuple'
def _create(self, typeid, *args, **kwds):
assert (self._state.value == State.STARTED), 'server not yet started' conn = self._Client(self._address, authkey=self._authkey) try: (id, exposed) = dispatch(conn, None, 'create', ((typeid,) + args), kwds) finally: conn.close() return (Token(typeid, self._address, id), exposed)
'Join the manager process (if it has been spawned)'
def join(self, timeout=None):
self._process.join(timeout)
'Return some info about the servers shared objects and connections'
def _debug_info(self):
conn = self._Client(self._address, authkey=self._authkey) try: return dispatch(conn, None, 'debug_info') finally: conn.close()
'Return the number of shared objects'
def _number_of_objects(self):
conn = self._Client(self._address, authkey=self._authkey) try: return dispatch(conn, None, 'number_of_objects') finally: conn.close()
'Shutdown the manager process; will be registered as a finalizer'
@staticmethod def _finalize_manager(process, address, authkey, state, _Client):
if process.is_alive(): util.info('sending shutdown message to manager') try: conn = _Client(address, authkey=authkey) try: dispatch(conn, None, 'shutdown') finally: conn.close() except Exception: pass process.join(timeout=0.2) if process.is_alive(): util.info('manager still alive') if hasattr(process, 'terminate'): util.info('trying to `terminate()` manager process') process.terminate() process.join(timeout=0.1) if process.is_alive(): util.info('manager still alive after terminate') state.value = State.SHUTDOWN try: del BaseProxy._address_to_local[address] except KeyError: pass
'Register a typeid with the manager type'
@classmethod def register(cls, typeid, callable=None, proxytype=None, exposed=None, method_to_typeid=None, create_method=True):
if ('_registry' not in cls.__dict__): cls._registry = cls._registry.copy() if (proxytype is None): proxytype = AutoProxy exposed = (exposed or getattr(proxytype, '_exposed_', None)) method_to_typeid = (method_to_typeid or getattr(proxytype, '_method_to_typeid_', None)) if method_to_typeid: for (key, value) in method_to_typeid.items(): assert (type(key) is str), ('%r is not a string' % key) assert (type(value) is str), ('%r is not a string' % value) cls._registry[typeid] = (callable, exposed, method_to_typeid, proxytype) if create_method: def temp(self, *args, **kwds): util.debug('requesting creation of a shared %r object', typeid) (token, exp) = self._create(typeid, *args, **kwds) proxy = proxytype(token, self._serializer, manager=self, authkey=self._authkey, exposed=exp) conn = self._Client(token.address, authkey=self._authkey) dispatch(conn, None, 'decref', (token.id,)) return proxy temp.__name__ = typeid setattr(cls, typeid, temp)
'Try to call a method of the referrent and return a copy of the result'
def _callmethod(self, methodname, args=(), kwds={}):
try: conn = self._tls.connection except AttributeError: util.debug('thread %r does not own a connection', threading.current_thread().name) self._connect() conn = self._tls.connection conn.send((self._id, methodname, args, kwds)) (kind, result) = conn.recv() if (kind == '#RETURN'): return result elif (kind == '#PROXY'): (exposed, token) = result proxytype = self._manager._registry[token.typeid][(-1)] token.address = self._token.address proxy = proxytype(token, self._serializer, manager=self._manager, authkey=self._authkey, exposed=exposed) conn = self._Client(token.address, authkey=self._authkey) dispatch(conn, None, 'decref', (token.id,)) return proxy raise convert_to_error(kind, result)
'Get a copy of the value of the referent'
def _getvalue(self):
return self._callmethod('#GETVALUE')
'Return representation of the referent (or a fall-back if that fails)'
def __str__(self):
try: return self._callmethod('__repr__') except Exception: return (repr(self)[:(-1)] + "; '__str__()' failed>")
'Cleanup after any worker processes which have exited due to reaching their specified lifetime. Returns True if any workers were cleaned up.'
def _join_exited_workers(self):
cleaned = False for i in reversed(range(len(self._pool))): worker = self._pool[i] if (worker.exitcode is not None): debug(('cleaning up worker %d' % i)) worker.join() cleaned = True del self._pool[i] return cleaned
'Bring the number of pool processes up to the specified number, for use after reaping workers which have exited.'
def _repopulate_pool(self):
for i in range((self._processes - len(self._pool))): w = self.Process(target=worker, args=(self._inqueue, self._outqueue, self._initializer, self._initargs, self._maxtasksperchild)) self._pool.append(w) w.name = w.name.replace('Process', 'PoolWorker') w.daemon = True w.start() debug('added worker')
'Clean up any exited workers and start replacements for them.'
def _maintain_pool(self):
if self._join_exited_workers(): self._repopulate_pool()
'Equivalent of `apply()` builtin'
def apply(self, func, args=(), kwds={}):
assert (self._state == RUN) return self.apply_async(func, args, kwds).get()
'Equivalent of `map()` builtin'
def map(self, func, iterable, chunksize=None):
assert (self._state == RUN) return self.map_async(func, iterable, chunksize).get()
'Equivalent of `itertools.imap()` -- can be MUCH slower than `Pool.map()`'
def imap(self, func, iterable, chunksize=1):
assert (self._state == RUN) if (chunksize == 1): result = IMapIterator(self._cache) self._taskqueue.put((((result._job, i, func, (x,), {}) for (i, x) in enumerate(iterable)), result._set_length)) return result else: assert (chunksize > 1) task_batches = Pool._get_tasks(func, iterable, chunksize) result = IMapIterator(self._cache) self._taskqueue.put((((result._job, i, mapstar, (x,), {}) for (i, x) in enumerate(task_batches)), result._set_length)) return (item for chunk in result for item in chunk)
'Like `imap()` method but ordering of results is arbitrary'
def imap_unordered(self, func, iterable, chunksize=1):
assert (self._state == RUN) if (chunksize == 1): result = IMapUnorderedIterator(self._cache) self._taskqueue.put((((result._job, i, func, (x,), {}) for (i, x) in enumerate(iterable)), result._set_length)) return result else: assert (chunksize > 1) task_batches = Pool._get_tasks(func, iterable, chunksize) result = IMapUnorderedIterator(self._cache) self._taskqueue.put((((result._job, i, mapstar, (x,), {}) for (i, x) in enumerate(task_batches)), result._set_length)) return (item for chunk in result for item in chunk)
'Asynchronous equivalent of `apply()` builtin'
def apply_async(self, func, args=(), kwds={}, callback=None):
assert (self._state == RUN) result = ApplyResult(self._cache, callback) self._taskqueue.put(([(result._job, None, func, args, kwds)], None)) return result
'Asynchronous equivalent of `map()` builtin'
def map_async(self, func, iterable, chunksize=None, callback=None):
assert (self._state == RUN) if (not hasattr(iterable, '__len__')): iterable = list(iterable) if (chunksize is None): (chunksize, extra) = divmod(len(iterable), (len(self._pool) * 4)) if extra: chunksize += 1 if (len(iterable) == 0): chunksize = 0 task_batches = Pool._get_tasks(func, iterable, chunksize) result = MapResult(self._cache, chunksize, len(iterable), callback) self._taskqueue.put((((result._job, i, mapstar, (x,), {}) for (i, x) in enumerate(task_batches)), None)) return result
'Initialize and reset this instance.'
def __init__(self):
self.reset()
'Reset this instance. Loses all unprocessed data.'
def reset(self):
self.rawdata = '' self.lasttag = '???' self.interesting = interesting_normal self.cdata_elem = None markupbase.ParserBase.reset(self)
'Feed data to the parser. Call this as often as you want, with as little or as much text as you want (may include \'\n\').'
def feed(self, data):
self.rawdata = (self.rawdata + data) self.goahead(0)
'Handle any buffered data.'
def close(self):
self.goahead(1)
'Return full source of start tag: \'<...>\'.'
def get_starttag_text(self):
return self.__starttag_text
'Add a header to be used by the HTTP interface only e.g. u.addheader(\'Accept\', \'sound/basic\')'
def addheader(self, *args):
self.addheaders.append(args)
'Use URLopener().open(file) instead of open(file, \'r\').'
def open(self, fullurl, data=None):
fullurl = unwrap(toBytes(fullurl)) fullurl = quote(fullurl, safe="%/:=&?~#+!$,;'@()*[]|") if (self.tempcache and (fullurl in self.tempcache)): (filename, headers) = self.tempcache[fullurl] fp = open(filename, 'rb') return addinfourl(fp, headers, fullurl) (urltype, url) = splittype(fullurl) if (not urltype): urltype = 'file' if (urltype in self.proxies): proxy = self.proxies[urltype] (urltype, proxyhost) = splittype(proxy) (host, selector) = splithost(proxyhost) url = (host, fullurl) else: proxy = None name = ('open_' + urltype) self.type = urltype name = name.replace('-', '_') if (not hasattr(self, name)): if proxy: return self.open_unknown_proxy(proxy, fullurl, data) else: return self.open_unknown(fullurl, data) try: if (data is None): return getattr(self, name)(url) else: return getattr(self, name)(url, data) except socket.error as msg: raise IOError, ('socket error', msg), sys.exc_info()[2]
'Overridable interface to open unknown URL type.'
def open_unknown(self, fullurl, data=None):
(type, url) = splittype(fullurl) raise IOError, ('url error', 'unknown url type', type)
'Overridable interface to open unknown URL type.'
def open_unknown_proxy(self, proxy, fullurl, data=None):
(type, url) = splittype(fullurl) raise IOError, ('url error', ('invalid proxy for %s' % type), proxy)
'retrieve(url) returns (filename, headers) for a local object or (tempfilename, headers) for a remote object.'
def retrieve(self, url, filename=None, reporthook=None, data=None):
url = unwrap(toBytes(url)) if (self.tempcache and (url in self.tempcache)): return self.tempcache[url] (type, url1) = splittype(url) if ((filename is None) and ((not type) or (type == 'file'))): try: fp = self.open_local_file(url1) hdrs = fp.info() fp.close() return (url2pathname(splithost(url1)[1]), hdrs) except IOError: pass fp = self.open(url, data) try: headers = fp.info() if filename: tfp = open(filename, 'wb') else: import tempfile (garbage, path) = splittype(url) (garbage, path) = splithost((path or '')) (path, garbage) = splitquery((path or '')) (path, garbage) = splitattr((path or '')) suffix = os.path.splitext(path)[1] (fd, filename) = tempfile.mkstemp(suffix) self.__tempfiles.append(filename) tfp = os.fdopen(fd, 'wb') try: result = (filename, headers) if (self.tempcache is not None): self.tempcache[url] = result bs = (1024 * 8) size = (-1) read = 0 blocknum = 0 if ('content-length' in headers): size = int(headers['Content-Length']) if reporthook: reporthook(blocknum, bs, size) while 1: block = fp.read(bs) if (block == ''): break read += len(block) tfp.write(block) blocknum += 1 if reporthook: reporthook(blocknum, bs, size) finally: tfp.close() finally: fp.close() if ((size >= 0) and (read < size)): raise ContentTooShortError(('retrieval incomplete: got only %i out of %i bytes' % (read, size)), result) return result
'Use HTTP protocol.'
def open_http(self, url, data=None):
import httplib user_passwd = None proxy_passwd = None if isinstance(url, str): (host, selector) = splithost(url) if host: (user_passwd, host) = splituser(host) host = unquote(host) realhost = host else: (host, selector) = url (proxy_passwd, host) = splituser(host) (urltype, rest) = splittype(selector) url = rest user_passwd = None if (urltype.lower() != 'http'): realhost = None else: (realhost, rest) = splithost(rest) if realhost: (user_passwd, realhost) = splituser(realhost) if user_passwd: selector = ('%s://%s%s' % (urltype, realhost, rest)) if proxy_bypass(realhost): host = realhost if (not host): raise IOError, ('http error', 'no host given') if proxy_passwd: proxy_passwd = unquote(proxy_passwd) proxy_auth = base64.b64encode(proxy_passwd).strip() else: proxy_auth = None if user_passwd: user_passwd = unquote(user_passwd) auth = base64.b64encode(user_passwd).strip() else: auth = None h = httplib.HTTP(host) if (data is not None): h.putrequest('POST', selector) h.putheader('Content-Type', 'application/x-www-form-urlencoded') h.putheader('Content-Length', ('%d' % len(data))) else: h.putrequest('GET', selector) if proxy_auth: h.putheader('Proxy-Authorization', ('Basic %s' % proxy_auth)) if auth: h.putheader('Authorization', ('Basic %s' % auth)) if realhost: h.putheader('Host', realhost) for args in self.addheaders: h.putheader(*args) h.endheaders(data) (errcode, errmsg, headers) = h.getreply() fp = h.getfile() if (errcode == (-1)): if fp: fp.close() raise IOError, ('http protocol error', 0, 'got a bad status line', None) if (200 <= errcode < 300): return addinfourl(fp, headers, ('http:' + url), errcode) elif (data is None): return self.http_error(url, fp, errcode, errmsg, headers) else: return self.http_error(url, fp, errcode, errmsg, headers, data)
'Handle http errors. Derived class can override this, or provide specific handlers named http_error_DDD where DDD is the 3-digit error code.'
def http_error(self, url, fp, errcode, errmsg, headers, data=None):
name = ('http_error_%d' % errcode) if hasattr(self, name): method = getattr(self, name) if (data is None): result = method(url, fp, errcode, errmsg, headers) else: result = method(url, fp, errcode, errmsg, headers, data) if result: return result return self.http_error_default(url, fp, errcode, errmsg, headers)
'Default error handler: close the connection and raise IOError.'
def http_error_default(self, url, fp, errcode, errmsg, headers):
fp.close() raise IOError, ('http error', errcode, errmsg, headers)
'Use local file or FTP depending on form of URL.'
def open_file(self, url):
if (not isinstance(url, str)): raise IOError, ('file error', 'proxy support for file protocol currently not implemented') if ((url[:2] == '//') and (url[2:3] != '/') and (url[2:12].lower() != 'localhost/')): return self.open_ftp(url) else: return self.open_local_file(url)
'Use local file.'
def open_local_file(self, url):
import mimetypes, mimetools, email.utils try: from cStringIO import StringIO except ImportError: from StringIO import StringIO (host, file) = splithost(url) localname = url2pathname(file) try: stats = os.stat(localname) except OSError as e: raise IOError(e.errno, e.strerror, e.filename) size = stats.st_size modified = email.utils.formatdate(stats.st_mtime, usegmt=True) mtype = mimetypes.guess_type(url)[0] headers = mimetools.Message(StringIO(('Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' % ((mtype or 'text/plain'), size, modified)))) if (not host): urlfile = file if (file[:1] == '/'): urlfile = ('file://' + file) elif (file[:2] == './'): raise ValueError(('local file url may start with / or file:. Unknown url of type: %s' % url)) return addinfourl(open(localname, 'rb'), headers, urlfile) (host, port) = splitport(host) if ((not port) and (socket.gethostbyname(host) in (localhost(), thishost()))): urlfile = file if (file[:1] == '/'): urlfile = ('file://' + file) return addinfourl(open(localname, 'rb'), headers, urlfile) raise IOError, ('local file error', 'not on local host')
'Use FTP protocol.'
def open_ftp(self, url):
if (not isinstance(url, str)): raise IOError, ('ftp error', 'proxy support for ftp protocol currently not implemented') import mimetypes, mimetools try: from cStringIO import StringIO except ImportError: from StringIO import StringIO (host, path) = splithost(url) if (not host): raise IOError, ('ftp error', 'no host given') (host, port) = splitport(host) (user, host) = splituser(host) if user: (user, passwd) = splitpasswd(user) else: passwd = None host = unquote(host) user = (user or '') passwd = (passwd or '') host = socket.gethostbyname(host) if (not port): import ftplib port = ftplib.FTP_PORT else: port = int(port) (path, attrs) = splitattr(path) path = unquote(path) dirs = path.split('/') (dirs, file) = (dirs[:(-1)], dirs[(-1)]) if (dirs and (not dirs[0])): dirs = dirs[1:] if (dirs and (not dirs[0])): dirs[0] = '/' key = (user, host, port, '/'.join(dirs)) if (len(self.ftpcache) > MAXFTPCACHE): for k in self.ftpcache.keys(): if (k != key): v = self.ftpcache[k] del self.ftpcache[k] v.close() try: if (not (key in self.ftpcache)): self.ftpcache[key] = ftpwrapper(user, passwd, host, port, dirs) if (not file): type = 'D' else: type = 'I' for attr in attrs: (attr, value) = splitvalue(attr) if ((attr.lower() == 'type') and (value in ('a', 'A', 'i', 'I', 'd', 'D'))): type = value.upper() (fp, retrlen) = self.ftpcache[key].retrfile(file, type) mtype = mimetypes.guess_type(('ftp:' + url))[0] headers = '' if mtype: headers += ('Content-Type: %s\n' % mtype) if ((retrlen is not None) and (retrlen >= 0)): headers += ('Content-Length: %d\n' % retrlen) headers = mimetools.Message(StringIO(headers)) return addinfourl(fp, headers, ('ftp:' + url)) except ftperrors() as msg: raise IOError, ('ftp error', msg), sys.exc_info()[2]
'Use "data" URL.'
def open_data(self, url, data=None):
if (not isinstance(url, str)): raise IOError, ('data error', 'proxy support for data protocol currently not implemented') import mimetools try: from cStringIO import StringIO except ImportError: from StringIO import StringIO try: [type, data] = url.split(',', 1) except ValueError: raise IOError, ('data error', 'bad data URL') if (not type): type = 'text/plain;charset=US-ASCII' semi = type.rfind(';') if ((semi >= 0) and ('=' not in type[semi:])): encoding = type[(semi + 1):] type = type[:semi] else: encoding = '' msg = [] msg.append(('Date: %s' % time.strftime('%a, %d %b %Y %H:%M:%S GMT', time.gmtime(time.time())))) msg.append(('Content-type: %s' % type)) if (encoding == 'base64'): data = base64.decodestring(data) else: data = unquote(data) msg.append(('Content-Length: %d' % len(data))) msg.append('') msg.append(data) msg = '\n'.join(msg) f = StringIO(msg) headers = mimetools.Message(f, 0) return addinfourl(f, headers, url)
'Default error handling -- don\'t raise an exception.'
def http_error_default(self, url, fp, errcode, errmsg, headers):
return addinfourl(fp, headers, ('http:' + url), errcode)
'Error 302 -- relocated (temporarily).'
def http_error_302(self, url, fp, errcode, errmsg, headers, data=None):
self.tries += 1 try: if (self.maxtries and (self.tries >= self.maxtries)): if hasattr(self, 'http_error_500'): meth = self.http_error_500 else: meth = self.http_error_default return meth(url, fp, 500, 'Internal Server Error: Redirect Recursion', headers) result = self.redirect_internal(url, fp, errcode, errmsg, headers, data) return result finally: self.tries = 0
'Error 301 -- also relocated (permanently).'
def http_error_301(self, url, fp, errcode, errmsg, headers, data=None):
return self.http_error_302(url, fp, errcode, errmsg, headers, data)
'Error 303 -- also relocated (essentially identical to 302).'
def http_error_303(self, url, fp, errcode, errmsg, headers, data=None):
return self.http_error_302(url, fp, errcode, errmsg, headers, data)
'Error 307 -- relocated, but turn POST into error.'
def http_error_307(self, url, fp, errcode, errmsg, headers, data=None):
if (data is None): return self.http_error_302(url, fp, errcode, errmsg, headers, data) else: return self.http_error_default(url, fp, errcode, errmsg, headers)
'Error 401 -- authentication required. This function supports Basic authentication only.'
def http_error_401(self, url, fp, errcode, errmsg, headers, data=None):
if (not ('www-authenticate' in headers)): URLopener.http_error_default(self, url, fp, errcode, errmsg, headers) stuff = headers['www-authenticate'] import re match = re.match('[ DCTB ]*([^ DCTB ]+)[ DCTB ]+realm="([^"]*)"', stuff) if (not match): URLopener.http_error_default(self, url, fp, errcode, errmsg, headers) (scheme, realm) = match.groups() if (scheme.lower() != 'basic'): URLopener.http_error_default(self, url, fp, errcode, errmsg, headers) name = (('retry_' + self.type) + '_basic_auth') if (data is None): return getattr(self, name)(url, realm) else: return getattr(self, name)(url, realm, data)
'Error 407 -- proxy authentication required. This function supports Basic authentication only.'
def http_error_407(self, url, fp, errcode, errmsg, headers, data=None):
if (not ('proxy-authenticate' in headers)): URLopener.http_error_default(self, url, fp, errcode, errmsg, headers) stuff = headers['proxy-authenticate'] import re match = re.match('[ DCTB ]*([^ DCTB ]+)[ DCTB ]+realm="([^"]*)"', stuff) if (not match): URLopener.http_error_default(self, url, fp, errcode, errmsg, headers) (scheme, realm) = match.groups() if (scheme.lower() != 'basic'): URLopener.http_error_default(self, url, fp, errcode, errmsg, headers) name = (('retry_proxy_' + self.type) + '_basic_auth') if (data is None): return getattr(self, name)(url, realm) else: return getattr(self, name)(url, realm, data)
'Override this in a GUI environment!'
def prompt_user_passwd(self, host, realm):
import getpass try: user = raw_input(('Enter username for %s at %s: ' % (realm, host))) passwd = getpass.getpass(('Enter password for %s in %s at %s: ' % (user, realm, host))) return (user, passwd) except KeyboardInterrupt: print return (None, None)
'Return current line number and offset.'
def getpos(self):
return (self.lineno, self.offset)
'Creates an instance of the HTMLParser class. The formatter parameter is the formatter instance associated with the parser.'
def __init__(self, formatter, verbose=0):
sgmllib.SGMLParser.__init__(self, verbose) self.formatter = formatter
'Begins saving character data in a buffer instead of sending it to the formatter object. Retrieve the stored data via the save_end() method. Use of the save_bgn() / save_end() pair may not be nested.'
def save_bgn(self):
self.savedata = ''
'Ends buffering character data and returns all data saved since the preceding call to the save_bgn() method. If the nofill flag is false, whitespace is collapsed to single spaces. A call to this method without a preceding call to the save_bgn() method will raise a TypeError exception.'
def save_end(self):
data = self.savedata self.savedata = None if (not self.nofill): data = ' '.join(data.split()) return data
'This method is called at the start of an anchor region. The arguments correspond to the attributes of the <A> tag with the same names. The default implementation maintains a list of hyperlinks (defined by the HREF attribute for <A> tags) within the document. The list of hyperlinks is available as the data attribute anchorlist.'
def anchor_bgn(self, href, name, type):
self.anchor = href if self.anchor: self.anchorlist.append(href)
'This method is called at the end of an anchor region. The default implementation adds a textual footnote marker using an index into the list of hyperlinks created by the anchor_bgn()method.'
def anchor_end(self):
if self.anchor: self.handle_data(('[%d]' % len(self.anchorlist))) self.anchor = None
'This method is called to handle images. The default implementation simply passes the alt value to the handle_data() method.'
def handle_image(self, src, alt, *args):
self.handle_data(alt)
'Constructor. The optional \'locals\' argument specifies the dictionary in which code will be executed; it defaults to a newly created dictionary with key "__name__" set to "__console__" and key "__doc__" set to None.'
def __init__(self, locals=None):
if (locals is None): locals = {'__name__': '__console__', '__doc__': None} self.locals = locals self.compile = CommandCompiler()
'Compile and run some source in the interpreter. Arguments are as for compile_command(). One several things can happen: 1) The input is incorrect; compile_command() raised an exception (SyntaxError or OverflowError). A syntax traceback will be printed by calling the showsyntaxerror() method. 2) The input is incomplete, and more input is required; compile_command() returned None. Nothing happens. 3) The input is complete; compile_command() returned a code object. The code is executed by calling self.runcode() (which also handles run-time exceptions, except for SystemExit). The return value is True in case 2, False in the other cases (unless an exception is raised). The return value can be used to decide whether to use sys.ps1 or sys.ps2 to prompt the next line.'
def runsource(self, source, filename='<input>', symbol='single'):
try: code = self.compile(source, filename, symbol) except (OverflowError, SyntaxError, ValueError): self.showsyntaxerror(filename) return False if (code is None): return True self.runcode(code) return False
'Execute a code object. When an exception occurs, self.showtraceback() is called to display a traceback. All exceptions are caught except SystemExit, which is reraised. A note about KeyboardInterrupt: this exception may occur elsewhere in this code, and may not always be caught. The caller should be prepared to deal with it.'
def runcode(self, code):
try: exec code in self.locals except SystemExit: raise except: self.showtraceback() else: if softspace(sys.stdout, 0): print
'Display the syntax error that just occurred. This doesn\'t display a stack trace because there isn\'t one. If a filename is given, it is stuffed in the exception instead of what was there before (because Python\'s parser always uses "<string>" when reading from a string). The output is written by self.write(), below.'
def showsyntaxerror(self, filename=None):
(type, value, sys.last_traceback) = sys.exc_info() sys.last_type = type sys.last_value = value if (filename and (type is SyntaxError)): try: (msg, (dummy_filename, lineno, offset, line)) = value except: pass else: value = SyntaxError(msg, (filename, lineno, offset, line)) sys.last_value = value list = traceback.format_exception_only(type, value) map(self.write, list)
'Display the exception that just occurred. We remove the first stack item because it is our own code. The output is written by self.write(), below.'
def showtraceback(self):
try: (type, value, tb) = sys.exc_info() sys.last_type = type sys.last_value = value sys.last_traceback = tb tblist = traceback.extract_tb(tb) del tblist[:1] list = traceback.format_list(tblist) if list: list.insert(0, 'Traceback (most recent call last):\n') list[len(list):] = traceback.format_exception_only(type, value) finally: tblist = tb = None map(self.write, list)
'Write a string. The base implementation writes to sys.stderr; a subclass may replace this with a different implementation.'
def write(self, data):
sys.stderr.write(data)
'Constructor. The optional locals argument will be passed to the InteractiveInterpreter base class. The optional filename argument should specify the (file)name of the input stream; it will show up in tracebacks.'
def __init__(self, locals=None, filename='<console>'):
InteractiveInterpreter.__init__(self, locals) self.filename = filename self.resetbuffer()
'Reset the input buffer.'
def resetbuffer(self):
self.buffer = []
'Closely emulate the interactive Python console. The optional banner argument specify the banner to print before the first interaction; by default it prints a banner similar to the one printed by the real Python interpreter, followed by the current class name in parentheses (so as not to confuse this with the real interpreter -- since it\'s so close!).'
def interact(self, banner=None):
try: sys.ps1 except AttributeError: sys.ps1 = '>>> ' try: sys.ps2 except AttributeError: sys.ps2 = '... ' cprt = 'Type "help", "copyright", "credits" or "license" for more information.' if (banner is None): self.write(('Python %s on %s\n%s\n(%s)\n' % (sys.version, sys.platform, cprt, self.__class__.__name__))) else: self.write(('%s\n' % str(banner))) more = 0 while 1: try: if more: prompt = sys.ps2 else: prompt = sys.ps1 try: line = self.raw_input(prompt) encoding = getattr(sys.stdin, 'encoding', None) if (encoding and (not isinstance(line, unicode))): line = line.decode(encoding) except EOFError: self.write('\n') break else: more = self.push(line) except KeyboardInterrupt: self.write('\nKeyboardInterrupt\n') self.resetbuffer() more = 0
'Push a line to the interpreter. The line should not have a trailing newline; it may have internal newlines. The line is appended to a buffer and the interpreter\'s runsource() method is called with the concatenated contents of the buffer as source. If this indicates that the command was executed or invalid, the buffer is reset; otherwise, the command is incomplete, and the buffer is left as it was after the line was appended. The return value is 1 if more input is required, 0 if the line was dealt with in some way (this is the same as runsource()).'
def push(self, line):
self.buffer.append(line) source = '\n'.join(self.buffer) more = self.runsource(source, self.filename) if (not more): self.resetbuffer() return more
'Write a prompt and read a line. The returned line does not include the trailing newline. When the user enters the EOF key sequence, EOFError is raised. The base implementation uses the built-in function raw_input(); a subclass may replace this with a different implementation.'
def raw_input(self, prompt=''):
return raw_input(prompt)
'Return a Request or None in response to a redirect. This is called by the http_error_30x methods when a redirection response is received. If a redirection should take place, return a new Request to allow http_error_30x to perform the redirect. Otherwise, raise HTTPError if no-one else should try to handle this url. Return None if you can\'t but another Handler might.'
def redirect_request(self, req, fp, code, msg, headers, newurl):
m = req.get_method() if (((code in (301, 302, 303, 307)) and (m in ('GET', 'HEAD'))) or ((code in (301, 302, 303)) and (m == 'POST'))): newurl = newurl.replace(' ', '%20') newheaders = dict(((k, v) for (k, v) in req.headers.items() if (k.lower() not in ('content-length', 'content-type')))) return Request(newurl, headers=newheaders, origin_req_host=req.get_origin_req_host(), unverifiable=True) else: raise HTTPError(req.get_full_url(), code, msg, headers, fp)
'Accept authority or URI and extract only the authority and path.'
def reduce_uri(self, uri, default_port=True):
parts = urlparse.urlsplit(uri) if parts[1]: scheme = parts[0] authority = parts[1] path = (parts[2] or '/') else: scheme = None authority = uri path = '/' (host, port) = splitport(authority) if (default_port and (port is None) and (scheme is not None)): dport = {'http': 80, 'https': 443}.get(scheme) if (dport is not None): authority = ('%s:%d' % (host, dport)) return (authority, path)
'Check if test is below base in a URI tree Both args must be URIs in reduced form.'
def is_suburi(self, base, test):
if (base == test): return True if (base[0] != test[0]): return False common = posixpath.commonprefix((base[1], test[1])) if (len(common) == len(base[1])): return True return False
'Return an addinfourl object for the request, using http_class. http_class must implement the HTTPConnection API from httplib. The addinfourl return value is a file-like object. It also has methods and attributes including: - info(): return a mimetools.Message object for the headers - geturl(): return the original request URL - code: HTTP status code'
def do_open(self, http_class, req, **http_conn_args):
host = req.get_host() if (not host): raise URLError('no host given') h = http_class(host, timeout=req.timeout, **http_conn_args) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update(dict(((k, v) for (k, v) in req.headers.items() if (k not in headers)))) headers['Connection'] = 'close' headers = dict(((name.title(), val) for (name, val) in headers.items())) if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = 'Proxy-Authorization' if (proxy_auth_hdr in headers): tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: h.request(req.get_method(), req.get_selector(), req.data, headers) except socket.error as err: h.close() raise URLError(err) else: try: r = h.getresponse(buffering=True) except TypeError: r = h.getresponse() r.recv = r.read fp = socket._fileobject(r, close=True) resp = addinfourl(fp, r.msg, req.get_full_url()) resp.code = r.status resp.msg = r.reason return resp
'Add header for field key handling repeats.'
def addheader(self, key, value):
prev = self.dict.get(key) if (prev is None): self.dict[key] = value else: combined = ', '.join((prev, value)) self.dict[key] = combined
'Add more field data from a continuation line.'
def addcontinue(self, key, more):
prev = self.dict[key] self.dict[key] = ((prev + '\n ') + more)
'Read header lines. Read header lines up to the entirely blank line that terminates them. The (normally blank) line that ends the headers is skipped, but not included in the returned list. If a non-header line ends the headers, (which is an error), an attempt is made to backspace over it; it is never included in the returned list. The variable self.status is set to the empty string if all went well, otherwise it is an error message. The variable self.headers is a completely uninterpreted list of lines contained in the header (so printing them will reproduce the header exactly as it appears in the file). If multiple header fields with the same name occur, they are combined according to the rules in RFC 2616 sec 4.2: Appending each subsequent field-value to the first, each separated by a comma. The order in which header fields with the same field-name are received is significant to the interpretation of the combined field value.'
def readheaders(self):
self.dict = {} self.unixfrom = '' self.headers = hlist = [] self.status = '' headerseen = '' firstline = 1 startofline = unread = tell = None if hasattr(self.fp, 'unread'): unread = self.fp.unread elif self.seekable: tell = self.fp.tell while True: if (len(hlist) > _MAXHEADERS): raise HTTPException(('got more than %d headers' % _MAXHEADERS)) if tell: try: startofline = tell() except IOError: startofline = tell = None self.seekable = 0 line = self.fp.readline((_MAXLINE + 1)) if (len(line) > _MAXLINE): raise LineTooLong('header line') if (not line): self.status = 'EOF in headers' break if (firstline and line.startswith('From ')): self.unixfrom = (self.unixfrom + line) continue firstline = 0 if (headerseen and (line[0] in ' DCTB ')): hlist.append(line) self.addcontinue(headerseen, line.strip()) continue elif self.iscomment(line): continue elif self.islast(line): break headerseen = self.isheader(line) if headerseen: hlist.append(line) self.addheader(headerseen, line[(len(headerseen) + 1):].strip()) continue elif (headerseen is not None): continue else: if (not self.dict): self.status = 'No headers' else: self.status = 'Non-header line where header expected' if unread: unread(line) elif tell: self.fp.seek(startofline) else: self.status = (self.status + '; bad seek') break
'Read the number of bytes requested, compensating for partial reads. Normally, we have a blocking socket, but a read() can be interrupted by a signal (resulting in a partial read). Note that we cannot distinguish between EOF and an interrupt when zero bytes have been read. IncompleteRead() will be raised in this situation. This function should be used when <amt> bytes "should" be present for reading. If the bytes are truly not available (due to EOF), then the IncompleteRead exception can be used to detect the problem.'
def _safe_read(self, amt):
s = [] while (amt > 0): chunk = self.fp.read(min(amt, MAXAMOUNT)) if (not chunk): raise IncompleteRead(''.join(s), amt) s.append(chunk) amt -= len(chunk) return ''.join(s)
'Return list of (header, value) tuples.'
def getheaders(self):
if (self.msg is None): raise ResponseNotReady() return self.msg.items()
'Set up host and port for HTTP CONNECT tunnelling. In a connection that uses HTTP Connect tunneling, the host passed to the constructor is used as proxy server that relays all communication to the endpoint passed to set_tunnel. This is done by sending a HTTP CONNECT request to the proxy server when the connection is established. This method must be called before the HTTP connection has been established. The headers argument should be a mapping of extra HTTP headers to send with the CONNECT request.'
def set_tunnel(self, host, port=None, headers=None):
if self.sock: raise RuntimeError("Can't setup tunnel for established connection.") (self._tunnel_host, self._tunnel_port) = self._get_hostport(host, port) if headers: self._tunnel_headers = headers else: self._tunnel_headers.clear()
'Connect to the host and port specified in __init__.'
def connect(self):
self.sock = self._create_connection((self.host, self.port), self.timeout, self.source_address) if self._tunnel_host: self._tunnel()
'Close the connection to the HTTP server.'
def close(self):
self.__state = _CS_IDLE try: sock = self.sock if sock: self.sock = None sock.close() finally: response = self.__response if response: self.__response = None response.close()
'Send `data\' to the server.'
def send(self, data):
if (self.sock is None): if self.auto_open: self.connect() else: raise NotConnected() if (self.debuglevel > 0): print 'send:', repr(data) blocksize = 8192 if (hasattr(data, 'read') and (not isinstance(data, array))): if (self.debuglevel > 0): print 'sendIng a read()able' datablock = data.read(blocksize) while datablock: self.sock.sendall(datablock) datablock = data.read(blocksize) else: self.sock.sendall(data)
'Add a line of output to the current request buffer. Assumes that the line does *not* end with \r\n.'
def _output(self, s):
self._buffer.append(s)
'Send the currently buffered request and clear the buffer. Appends an extra \r\n to the buffer. A message_body may be specified, to be appended to the request.'
def _send_output(self, message_body=None):
self._buffer.extend(('', '')) msg = '\r\n'.join(self._buffer) del self._buffer[:] if isinstance(message_body, str): msg += message_body message_body = None self.send(msg) if (message_body is not None): self.send(message_body)
'Send a request to the server. `method\' specifies an HTTP request method, e.g. \'GET\'. `url\' specifies the object being requested, e.g. \'/index.html\'. `skip_host\' if True does not add automatically a \'Host:\' header `skip_accept_encoding\' if True does not add automatically an \'Accept-Encoding:\' header'
def putrequest(self, method, url, skip_host=0, skip_accept_encoding=0):
if (self.__response and self.__response.isclosed()): self.__response = None if (self.__state == _CS_IDLE): self.__state = _CS_REQ_STARTED else: raise CannotSendRequest() self._method = method if (not url): url = '/' hdr = ('%s %s %s' % (method, url, self._http_vsn_str)) self._output(hdr) if (self._http_vsn == 11): if (not skip_host): netloc = '' if url.startswith('http'): (nil, netloc, nil, nil, nil) = urlsplit(url) if netloc: try: netloc_enc = netloc.encode('ascii') except UnicodeEncodeError: netloc_enc = netloc.encode('idna') self.putheader('Host', netloc_enc) else: if self._tunnel_host: host = self._tunnel_host port = self._tunnel_port else: host = self.host port = self.port try: host_enc = host.encode('ascii') except UnicodeEncodeError: host_enc = host.encode('idna') if (host_enc.find(':') >= 0): host_enc = (('[' + host_enc) + ']') if (port == self.default_port): self.putheader('Host', host_enc) else: self.putheader('Host', ('%s:%s' % (host_enc, port))) if (not skip_accept_encoding): self.putheader('Accept-Encoding', 'identity') else: pass
'Send a request header line to the server. For example: h.putheader(\'Accept\', \'text/html\')'
def putheader(self, header, *values):
if (self.__state != _CS_REQ_STARTED): raise CannotSendHeader() header = ('%s' % header) if (not _is_legal_header_name(header)): raise ValueError(('Invalid header name %r' % (header,))) values = [str(v) for v in values] for one_value in values: if _is_illegal_header_value(one_value): raise ValueError(('Invalid header value %r' % (one_value,))) hdr = ('%s: %s' % (header, '\r\n DCTB '.join(values))) self._output(hdr)
'Indicate that the last header line has been sent to the server. This method sends the request to the server. The optional message_body argument can be used to pass a message body associated with the request. The message body will be sent in the same packet as the message headers if it is string, otherwise it is sent as a separate packet.'
def endheaders(self, message_body=None):
if (self.__state == _CS_REQ_STARTED): self.__state = _CS_REQ_SENT else: raise CannotSendHeader() self._send_output(message_body)
'Send a complete request to the server.'
def request(self, method, url, body=None, headers={}):
self._send_request(method, url, body, headers)
'Get the response from the server.'
def getresponse(self, buffering=False):
if (self.__response and self.__response.isclosed()): self.__response = None if ((self.__state != _CS_REQ_SENT) or self.__response): raise ResponseNotReady() args = (self.sock,) kwds = {'strict': self.strict, 'method': self._method} if (self.debuglevel > 0): args += (self.debuglevel,) if buffering: kwds['buffering'] = True response = self.response_class(*args, **kwds) try: response.begin() assert (response.will_close != _UNKNOWN) self.__state = _CS_IDLE if response.will_close: self.close() else: self.__response = response return response except: response.close() raise
'Provide a default host, since the superclass requires one.'
def __init__(self, host='', port=None, strict=None):
if (port == 0): port = None self._setup(self._connection_class(host, port, strict))
'Accept arguments to set the host/port, since the superclass doesn\'t.'
def connect(self, host=None, port=None):
if (host is not None): (self._conn.host, self._conn.port) = self._conn._get_hostport(host, port) self._conn.connect()