desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Use HTTP protocol.'
def open_http(self, url, data=None):
import httplib user_passwd = None proxy_passwd = None if isinstance(url, str): (host, selector) = splithost(url) if host: (user_passwd, host) = splituser(host) host = unquote(host) realhost = host else: (host, selector) = url (proxy_passwd, host) = splituser(host) (urltype, rest) = splittype(selector) url = rest user_passwd = None if (urltype.lower() != 'http'): realhost = None else: (realhost, rest) = splithost(rest) if realhost: (user_passwd, realhost) = splituser(realhost) if user_passwd: selector = ('%s://%s%s' % (urltype, realhost, rest)) if proxy_bypass(realhost): host = realhost if (not host): raise IOError, ('http error', 'no host given') if proxy_passwd: import base64 proxy_auth = base64.b64encode(proxy_passwd).strip() else: proxy_auth = None if user_passwd: import base64 auth = base64.b64encode(user_passwd).strip() else: auth = None h = httplib.HTTP(host) if (data is not None): h.putrequest('POST', selector) h.putheader('Content-Type', 'application/x-www-form-urlencoded') h.putheader('Content-Length', ('%d' % len(data))) else: h.putrequest('GET', selector) if proxy_auth: h.putheader('Proxy-Authorization', ('Basic %s' % proxy_auth)) if auth: h.putheader('Authorization', ('Basic %s' % auth)) if realhost: h.putheader('Host', realhost) for args in self.addheaders: h.putheader(*args) h.endheaders(data) (errcode, errmsg, headers) = h.getreply() fp = h.getfile() if (errcode == (-1)): if fp: fp.close() raise IOError, ('http protocol error', 0, 'got a bad status line', None) if (200 <= errcode < 300): return addinfourl(fp, headers, ('http:' + url), errcode) elif (data is None): return self.http_error(url, fp, errcode, errmsg, headers) else: return self.http_error(url, fp, errcode, errmsg, headers, data)
'Handle http errors. Derived class can override this, or provide specific handlers named http_error_DDD where DDD is the 3-digit error code.'
def http_error(self, url, fp, errcode, errmsg, headers, data=None):
name = ('http_error_%d' % errcode) if hasattr(self, name): method = getattr(self, name) if (data is None): result = method(url, fp, errcode, errmsg, headers) else: result = method(url, fp, errcode, errmsg, headers, data) if result: return result return self.http_error_default(url, fp, errcode, errmsg, headers)
'Default error handler: close the connection and raise IOError.'
def http_error_default(self, url, fp, errcode, errmsg, headers):
void = fp.read() fp.close() raise IOError, ('http error', errcode, errmsg, headers)
'Use local file or FTP depending on form of URL.'
def open_file(self, url):
if (not isinstance(url, str)): raise IOError, ('file error', 'proxy support for file protocol currently not implemented') if ((url[:2] == '//') and (url[2:3] != '/') and (url[2:12].lower() != 'localhost/')): return self.open_ftp(url) else: return self.open_local_file(url)
'Use local file.'
def open_local_file(self, url):
import mimetypes, mimetools, email.utils try: from cStringIO import StringIO except ImportError: from StringIO import StringIO (host, file) = splithost(url) localname = url2pathname(file) try: stats = os.stat(localname) except OSError as e: raise IOError(e.errno, e.strerror, e.filename) size = stats.st_size modified = email.utils.formatdate(stats.st_mtime, usegmt=True) mtype = mimetypes.guess_type(url)[0] headers = mimetools.Message(StringIO(('Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' % ((mtype or 'text/plain'), size, modified)))) if (not host): urlfile = file if (file[:1] == '/'): urlfile = ('file://' + file) return addinfourl(open(localname, 'rb'), headers, urlfile) (host, port) = splitport(host) if ((not port) and (socket.gethostbyname(host) in (localhost(), thishost()))): urlfile = file if (file[:1] == '/'): urlfile = ('file://' + file) return addinfourl(open(localname, 'rb'), headers, urlfile) raise IOError, ('local file error', 'not on local host')
'Use FTP protocol.'
def open_ftp(self, url):
if (not isinstance(url, str)): raise IOError, ('ftp error', 'proxy support for ftp protocol currently not implemented') import mimetypes, mimetools try: from cStringIO import StringIO except ImportError: from StringIO import StringIO (host, path) = splithost(url) if (not host): raise IOError, ('ftp error', 'no host given') (host, port) = splitport(host) (user, host) = splituser(host) if user: (user, passwd) = splitpasswd(user) else: passwd = None host = unquote(host) user = (user or '') passwd = (passwd or '') host = socket.gethostbyname(host) if (not port): import ftplib port = ftplib.FTP_PORT else: port = int(port) (path, attrs) = splitattr(path) path = unquote(path) dirs = path.split('/') (dirs, file) = (dirs[:(-1)], dirs[(-1)]) if (dirs and (not dirs[0])): dirs = dirs[1:] if (dirs and (not dirs[0])): dirs[0] = '/' key = (user, host, port, '/'.join(dirs)) if (len(self.ftpcache) > MAXFTPCACHE): for k in self.ftpcache.keys(): if (k != key): v = self.ftpcache[k] del self.ftpcache[k] v.close() try: if (not (key in self.ftpcache)): self.ftpcache[key] = ftpwrapper(user, passwd, host, port, dirs) if (not file): type = 'D' else: type = 'I' for attr in attrs: (attr, value) = splitvalue(attr) if ((attr.lower() == 'type') and (value in ('a', 'A', 'i', 'I', 'd', 'D'))): type = value.upper() (fp, retrlen) = self.ftpcache[key].retrfile(file, type) mtype = mimetypes.guess_type(('ftp:' + url))[0] headers = '' if mtype: headers += ('Content-Type: %s\n' % mtype) if ((retrlen is not None) and (retrlen >= 0)): headers += ('Content-Length: %d\n' % retrlen) headers = mimetools.Message(StringIO(headers)) return addinfourl(fp, headers, ('ftp:' + url)) except ftperrors() as msg: raise IOError, ('ftp error', msg), sys.exc_info()[2]
'Use "data" URL.'
def open_data(self, url, data=None):
if (not isinstance(url, str)): raise IOError, ('data error', 'proxy support for data protocol currently not implemented') import mimetools try: from cStringIO import StringIO except ImportError: from StringIO import StringIO try: [type, data] = url.split(',', 1) except ValueError: raise IOError, ('data error', 'bad data URL') if (not type): type = 'text/plain;charset=US-ASCII' semi = type.rfind(';') if ((semi >= 0) and ('=' not in type[semi:])): encoding = type[(semi + 1):] type = type[:semi] else: encoding = '' msg = [] msg.append(('Date: %s' % time.strftime('%a, %d %b %Y %H:%M:%S GMT', time.gmtime(time.time())))) msg.append(('Content-type: %s' % type)) if (encoding == 'base64'): import base64 data = base64.decodestring(data) else: data = unquote(data) msg.append(('Content-Length: %d' % len(data))) msg.append('') msg.append(data) msg = '\n'.join(msg) f = StringIO(msg) headers = mimetools.Message(f, 0) return addinfourl(f, headers, url)
'Default error handling -- don\'t raise an exception.'
def http_error_default(self, url, fp, errcode, errmsg, headers):
return addinfourl(fp, headers, ('http:' + url), errcode)
'Error 302 -- relocated (temporarily).'
def http_error_302(self, url, fp, errcode, errmsg, headers, data=None):
self.tries += 1 if (self.maxtries and (self.tries >= self.maxtries)): if hasattr(self, 'http_error_500'): meth = self.http_error_500 else: meth = self.http_error_default self.tries = 0 return meth(url, fp, 500, 'Internal Server Error: Redirect Recursion', headers) result = self.redirect_internal(url, fp, errcode, errmsg, headers, data) self.tries = 0 return result
'Error 301 -- also relocated (permanently).'
def http_error_301(self, url, fp, errcode, errmsg, headers, data=None):
return self.http_error_302(url, fp, errcode, errmsg, headers, data)
'Error 303 -- also relocated (essentially identical to 302).'
def http_error_303(self, url, fp, errcode, errmsg, headers, data=None):
return self.http_error_302(url, fp, errcode, errmsg, headers, data)
'Error 307 -- relocated, but turn POST into error.'
def http_error_307(self, url, fp, errcode, errmsg, headers, data=None):
if (data is None): return self.http_error_302(url, fp, errcode, errmsg, headers, data) else: return self.http_error_default(url, fp, errcode, errmsg, headers)
'Error 401 -- authentication required. This function supports Basic authentication only.'
def http_error_401(self, url, fp, errcode, errmsg, headers, data=None):
if (not ('www-authenticate' in headers)): URLopener.http_error_default(self, url, fp, errcode, errmsg, headers) stuff = headers['www-authenticate'] import re match = re.match('[ DCTB ]*([^ DCTB ]+)[ DCTB ]+realm="([^"]*)"', stuff) if (not match): URLopener.http_error_default(self, url, fp, errcode, errmsg, headers) (scheme, realm) = match.groups() if (scheme.lower() != 'basic'): URLopener.http_error_default(self, url, fp, errcode, errmsg, headers) name = (('retry_' + self.type) + '_basic_auth') if (data is None): return getattr(self, name)(url, realm) else: return getattr(self, name)(url, realm, data)
'Error 407 -- proxy authentication required. This function supports Basic authentication only.'
def http_error_407(self, url, fp, errcode, errmsg, headers, data=None):
if (not ('proxy-authenticate' in headers)): URLopener.http_error_default(self, url, fp, errcode, errmsg, headers) stuff = headers['proxy-authenticate'] import re match = re.match('[ DCTB ]*([^ DCTB ]+)[ DCTB ]+realm="([^"]*)"', stuff) if (not match): URLopener.http_error_default(self, url, fp, errcode, errmsg, headers) (scheme, realm) = match.groups() if (scheme.lower() != 'basic'): URLopener.http_error_default(self, url, fp, errcode, errmsg, headers) name = (('retry_proxy_' + self.type) + '_basic_auth') if (data is None): return getattr(self, name)(url, realm) else: return getattr(self, name)(url, realm, data)
'Override this in a GUI environment!'
def prompt_user_passwd(self, host, realm):
import getpass try: user = raw_input(('Enter username for %s at %s: ' % (realm, host))) passwd = getpass.getpass(('Enter password for %s in %s at %s: ' % (user, realm, host))) return (user, passwd) except KeyboardInterrupt: print return (None, None)
'Return current line number and offset.'
def getpos(self):
return (self.lineno, self.offset)
'Creates an instance of the HTMLParser class. The formatter parameter is the formatter instance associated with the parser.'
def __init__(self, formatter, verbose=0):
sgmllib.SGMLParser.__init__(self, verbose) self.formatter = formatter
'Begins saving character data in a buffer instead of sending it to the formatter object. Retrieve the stored data via the save_end() method. Use of the save_bgn() / save_end() pair may not be nested.'
def save_bgn(self):
self.savedata = ''
'Ends buffering character data and returns all data saved since the preceding call to the save_bgn() method. If the nofill flag is false, whitespace is collapsed to single spaces. A call to this method without a preceding call to the save_bgn() method will raise a TypeError exception.'
def save_end(self):
data = self.savedata self.savedata = None if (not self.nofill): data = ' '.join(data.split()) return data
'This method is called at the start of an anchor region. The arguments correspond to the attributes of the <A> tag with the same names. The default implementation maintains a list of hyperlinks (defined by the HREF attribute for <A> tags) within the document. The list of hyperlinks is available as the data attribute anchorlist.'
def anchor_bgn(self, href, name, type):
self.anchor = href if self.anchor: self.anchorlist.append(href)
'This method is called at the end of an anchor region. The default implementation adds a textual footnote marker using an index into the list of hyperlinks created by the anchor_bgn()method.'
def anchor_end(self):
if self.anchor: self.handle_data(('[%d]' % len(self.anchorlist))) self.anchor = None
'This method is called to handle images. The default implementation simply passes the alt value to the handle_data() method.'
def handle_image(self, src, alt, *args):
self.handle_data(alt)
'Constructor. The optional \'locals\' argument specifies the dictionary in which code will be executed; it defaults to a newly created dictionary with key "__name__" set to "__console__" and key "__doc__" set to None.'
def __init__(self, locals=None):
if (locals is None): locals = {'__name__': '__console__', '__doc__': None} self.locals = locals self.compile = CommandCompiler()
'Compile and run some source in the interpreter. Arguments are as for compile_command(). One several things can happen: 1) The input is incorrect; compile_command() raised an exception (SyntaxError or OverflowError). A syntax traceback will be printed by calling the showsyntaxerror() method. 2) The input is incomplete, and more input is required; compile_command() returned None. Nothing happens. 3) The input is complete; compile_command() returned a code object. The code is executed by calling self.runcode() (which also handles run-time exceptions, except for SystemExit). The return value is True in case 2, False in the other cases (unless an exception is raised). The return value can be used to decide whether to use sys.ps1 or sys.ps2 to prompt the next line.'
def runsource(self, source, filename='<input>', symbol='single'):
try: code = self.compile(source, filename, symbol) except (OverflowError, SyntaxError, ValueError): self.showsyntaxerror(filename) return False if (code is None): return True self.runcode(code) return False
'Execute a code object. When an exception occurs, self.showtraceback() is called to display a traceback. All exceptions are caught except SystemExit, which is reraised. A note about KeyboardInterrupt: this exception may occur elsewhere in this code, and may not always be caught. The caller should be prepared to deal with it.'
def runcode(self, code):
try: exec code in self.locals except SystemExit: raise except: self.showtraceback() else: if softspace(sys.stdout, 0): print
'Display the syntax error that just occurred. This doesn\'t display a stack trace because there isn\'t one. If a filename is given, it is stuffed in the exception instead of what was there before (because Python\'s parser always uses "<string>" when reading from a string). The output is written by self.write(), below.'
def showsyntaxerror(self, filename=None):
(type, value, sys.last_traceback) = sys.exc_info() sys.last_type = type sys.last_value = value if (filename and (type is SyntaxError)): try: (msg, (dummy_filename, lineno, offset, line)) = value except: pass else: value = SyntaxError(msg, (filename, lineno, offset, line)) sys.last_value = value list = traceback.format_exception_only(type, value) map(self.write, list)
'Display the exception that just occurred. We remove the first stack item because it is our own code. The output is written by self.write(), below.'
def showtraceback(self):
try: (type, value, tb) = sys.exc_info() sys.last_type = type sys.last_value = value sys.last_traceback = tb tblist = traceback.extract_tb(tb) del tblist[:1] list = traceback.format_list(tblist) if list: list.insert(0, 'Traceback (most recent call last):\n') list[len(list):] = traceback.format_exception_only(type, value) finally: tblist = tb = None map(self.write, list)
'Write a string. The base implementation writes to sys.stderr; a subclass may replace this with a different implementation.'
def write(self, data):
sys.stderr.write(data)
'Constructor. The optional locals argument will be passed to the InteractiveInterpreter base class. The optional filename argument should specify the (file)name of the input stream; it will show up in tracebacks.'
def __init__(self, locals=None, filename='<console>'):
InteractiveInterpreter.__init__(self, locals) self.filename = filename self.resetbuffer()
'Reset the input buffer.'
def resetbuffer(self):
self.buffer = []
'Closely emulate the interactive Python console. The optional banner argument specify the banner to print before the first interaction; by default it prints a banner similar to the one printed by the real Python interpreter, followed by the current class name in parentheses (so as not to confuse this with the real interpreter -- since it\'s so close!).'
def interact(self, banner=None):
try: sys.ps1 except AttributeError: sys.ps1 = '>>> ' try: sys.ps2 except AttributeError: sys.ps2 = '... ' cprt = 'Type "help", "copyright", "credits" or "license" for more information.' if (banner is None): self.write(('Python %s on %s\n%s\n(%s)\n' % (sys.version, sys.platform, cprt, self.__class__.__name__))) else: self.write(('%s\n' % str(banner))) more = 0 while 1: try: if more: prompt = sys.ps2 else: prompt = sys.ps1 try: line = self.raw_input(prompt) encoding = getattr(sys.stdin, 'encoding', None) if (encoding and (not isinstance(line, unicode))): line = line.decode(encoding) except EOFError: self.write('\n') break else: more = self.push(line) except KeyboardInterrupt: self.write('\nKeyboardInterrupt\n') self.resetbuffer() more = 0
'Push a line to the interpreter. The line should not have a trailing newline; it may have internal newlines. The line is appended to a buffer and the interpreter\'s runsource() method is called with the concatenated contents of the buffer as source. If this indicates that the command was executed or invalid, the buffer is reset; otherwise, the command is incomplete, and the buffer is left as it was after the line was appended. The return value is 1 if more input is required, 0 if the line was dealt with in some way (this is the same as runsource()).'
def push(self, line):
self.buffer.append(line) source = '\n'.join(self.buffer) more = self.runsource(source, self.filename) if (not more): self.resetbuffer() return more
'Write a prompt and read a line. The returned line does not include the trailing newline. When the user enters the EOF key sequence, EOFError is raised. The base implementation uses the built-in function raw_input(); a subclass may replace this with a different implementation.'
def raw_input(self, prompt=''):
return raw_input(prompt)
'Return a Request or None in response to a redirect. This is called by the http_error_30x methods when a redirection response is received. If a redirection should take place, return a new Request to allow http_error_30x to perform the redirect. Otherwise, raise HTTPError if no-one else should try to handle this url. Return None if you can\'t but another Handler might.'
def redirect_request(self, req, fp, code, msg, headers, newurl):
m = req.get_method() if (((code in (301, 302, 303, 307)) and (m in ('GET', 'HEAD'))) or ((code in (301, 302, 303)) and (m == 'POST'))): newurl = newurl.replace(' ', '%20') newheaders = dict(((k, v) for (k, v) in req.headers.items() if (k.lower() not in ('content-length', 'content-type')))) return Request(newurl, headers=newheaders, origin_req_host=req.get_origin_req_host(), unverifiable=True) else: raise HTTPError(req.get_full_url(), code, msg, headers, fp)
'Accept authority or URI and extract only the authority and path.'
def reduce_uri(self, uri, default_port=True):
parts = urlparse.urlsplit(uri) if parts[1]: scheme = parts[0] authority = parts[1] path = (parts[2] or '/') else: scheme = None authority = uri path = '/' (host, port) = splitport(authority) if (default_port and (port is None) and (scheme is not None)): dport = {'http': 80, 'https': 443}.get(scheme) if (dport is not None): authority = ('%s:%d' % (host, dport)) return (authority, path)
'Check if test is below base in a URI tree Both args must be URIs in reduced form.'
def is_suburi(self, base, test):
if (base == test): return True if (base[0] != test[0]): return False common = posixpath.commonprefix((base[1], test[1])) if (len(common) == len(base[1])): return True return False
'Return an addinfourl object for the request, using http_class. http_class must implement the HTTPConnection API from httplib. The addinfourl return value is a file-like object. It also has methods and attributes including: - info(): return a mimetools.Message object for the headers - geturl(): return the original request URL - code: HTTP status code'
def do_open(self, http_class, req):
host = req.get_host() if (not host): raise URLError('no host given') h = http_class(host, timeout=req.timeout) h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update(dict(((k, v) for (k, v) in req.headers.items() if (k not in headers)))) headers['Connection'] = 'close' headers = dict(((name.title(), val) for (name, val) in headers.items())) if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = 'Proxy-Authorization' if (proxy_auth_hdr in headers): tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: h.request(req.get_method(), req.get_selector(), req.data, headers) try: r = h.getresponse(buffering=True) except TypeError: r = h.getresponse() except socket.error as err: raise URLError(err) r.recv = r.read fp = socket._fileobject(r, close=True) resp = addinfourl(fp, r.msg, req.get_full_url()) resp.code = r.status resp.msg = r.reason return resp
'Add header for field key handling repeats.'
def addheader(self, key, value):
prev = self.dict.get(key) if (prev is None): self.dict[key] = value else: combined = ', '.join((prev, value)) self.dict[key] = combined
'Add more field data from a continuation line.'
def addcontinue(self, key, more):
prev = self.dict[key] self.dict[key] = ((prev + '\n ') + more)
'Read header lines. Read header lines up to the entirely blank line that terminates them. The (normally blank) line that ends the headers is skipped, but not included in the returned list. If a non-header line ends the headers, (which is an error), an attempt is made to backspace over it; it is never included in the returned list. The variable self.status is set to the empty string if all went well, otherwise it is an error message. The variable self.headers is a completely uninterpreted list of lines contained in the header (so printing them will reproduce the header exactly as it appears in the file). If multiple header fields with the same name occur, they are combined according to the rules in RFC 2616 sec 4.2: Appending each subsequent field-value to the first, each separated by a comma. The order in which header fields with the same field-name are received is significant to the interpretation of the combined field value.'
def readheaders(self):
self.dict = {} self.unixfrom = '' self.headers = hlist = [] self.status = '' headerseen = '' firstline = 1 startofline = unread = tell = None if hasattr(self.fp, 'unread'): unread = self.fp.unread elif self.seekable: tell = self.fp.tell while True: if tell: try: startofline = tell() except IOError: startofline = tell = None self.seekable = 0 line = self.fp.readline((_MAXLINE + 1)) if (len(line) > _MAXLINE): raise LineTooLong('header line') if (not line): self.status = 'EOF in headers' break if (firstline and line.startswith('From ')): self.unixfrom = (self.unixfrom + line) continue firstline = 0 if (headerseen and (line[0] in ' DCTB ')): hlist.append(line) self.addcontinue(headerseen, line.strip()) continue elif self.iscomment(line): continue elif self.islast(line): break headerseen = self.isheader(line) if headerseen: hlist.append(line) self.addheader(headerseen, line[(len(headerseen) + 1):].strip()) continue else: if (not self.dict): self.status = 'No headers' else: self.status = 'Non-header line where header expected' if unread: unread(line) elif tell: self.fp.seek(startofline) else: self.status = (self.status + '; bad seek') break
'Read the number of bytes requested, compensating for partial reads. Normally, we have a blocking socket, but a read() can be interrupted by a signal (resulting in a partial read). Note that we cannot distinguish between EOF and an interrupt when zero bytes have been read. IncompleteRead() will be raised in this situation. This function should be used when <amt> bytes "should" be present for reading. If the bytes are truly not available (due to EOF), then the IncompleteRead exception can be used to detect the problem.'
def _safe_read(self, amt):
s = [] while (amt > 0): chunk = self.fp.read(min(amt, MAXAMOUNT)) if (not chunk): raise IncompleteRead(''.join(s), amt) s.append(chunk) amt -= len(chunk) return ''.join(s)
'Return list of (header, value) tuples.'
def getheaders(self):
if (self.msg is None): raise ResponseNotReady() return self.msg.items()
'Sets up the host and the port for the HTTP CONNECT Tunnelling. The headers argument should be a mapping of extra HTTP headers to send with the CONNECT request.'
def set_tunnel(self, host, port=None, headers=None):
self._tunnel_host = host self._tunnel_port = port if headers: self._tunnel_headers = headers else: self._tunnel_headers.clear()
'Connect to the host and port specified in __init__.'
def connect(self):
self.sock = socket.create_connection((self.host, self.port), self.timeout, self.source_address) if self._tunnel_host: self._tunnel()
'Close the connection to the HTTP server.'
def close(self):
if self.sock: self.sock.close() self.sock = None if self.__response: self.__response.close() self.__response = None self.__state = _CS_IDLE
'Send `data\' to the server.'
def send(self, data):
if (self.sock is None): if self.auto_open: self.connect() else: raise NotConnected() if (self.debuglevel > 0): print 'send:', repr(data) blocksize = 8192 if (hasattr(data, 'read') and (not isinstance(data, array))): if (self.debuglevel > 0): print 'sendIng a read()able' datablock = data.read(blocksize) while datablock: self.sock.sendall(datablock) datablock = data.read(blocksize) else: self.sock.sendall(data)
'Add a line of output to the current request buffer. Assumes that the line does *not* end with \r\n.'
def _output(self, s):
self._buffer.append(s)
'Send the currently buffered request and clear the buffer. Appends an extra \r\n to the buffer. A message_body may be specified, to be appended to the request.'
def _send_output(self, message_body=None):
self._buffer.extend(('', '')) msg = '\r\n'.join(self._buffer) del self._buffer[:] if isinstance(message_body, str): msg += message_body message_body = None self.send(msg) if (message_body is not None): self.send(message_body)
'Send a request to the server. `method\' specifies an HTTP request method, e.g. \'GET\'. `url\' specifies the object being requested, e.g. \'/index.html\'. `skip_host\' if True does not add automatically a \'Host:\' header `skip_accept_encoding\' if True does not add automatically an \'Accept-Encoding:\' header'
def putrequest(self, method, url, skip_host=0, skip_accept_encoding=0):
if (self.__response and self.__response.isclosed()): self.__response = None if (self.__state == _CS_IDLE): self.__state = _CS_REQ_STARTED else: raise CannotSendRequest() self._method = method if (not url): url = '/' hdr = ('%s %s %s' % (method, url, self._http_vsn_str)) self._output(hdr) if (self._http_vsn == 11): if (not skip_host): netloc = '' if url.startswith('http'): (nil, netloc, nil, nil, nil) = urlsplit(url) if netloc: try: netloc_enc = netloc.encode('ascii') except UnicodeEncodeError: netloc_enc = netloc.encode('idna') self.putheader('Host', netloc_enc) else: try: host_enc = self.host.encode('ascii') except UnicodeEncodeError: host_enc = self.host.encode('idna') if (host_enc.find(':') >= 0): host_enc = (('[' + host_enc) + ']') if (self.port == self.default_port): self.putheader('Host', host_enc) else: self.putheader('Host', ('%s:%s' % (host_enc, self.port))) if (not skip_accept_encoding): self.putheader('Accept-Encoding', 'identity') else: pass
'Send a request header line to the server. For example: h.putheader(\'Accept\', \'text/html\')'
def putheader(self, header, *values):
if (self.__state != _CS_REQ_STARTED): raise CannotSendHeader() hdr = ('%s: %s' % (header, '\r\n DCTB '.join([str(v) for v in values]))) self._output(hdr)
'Indicate that the last header line has been sent to the server. This method sends the request to the server. The optional message_body argument can be used to pass message body associated with the request. The message body will be sent in the same packet as the message headers if possible. The message_body should be a string.'
def endheaders(self, message_body=None):
if (self.__state == _CS_REQ_STARTED): self.__state = _CS_REQ_SENT else: raise CannotSendHeader() self._send_output(message_body)
'Send a complete request to the server.'
def request(self, method, url, body=None, headers={}):
self._send_request(method, url, body, headers)
'Get the response from the server.'
def getresponse(self, buffering=False):
if (self.__response and self.__response.isclosed()): self.__response = None if ((self.__state != _CS_REQ_SENT) or self.__response): raise ResponseNotReady() args = (self.sock,) kwds = {'strict': self.strict, 'method': self._method} if (self.debuglevel > 0): args += (self.debuglevel,) if buffering: kwds['buffering'] = True response = self.response_class(*args, **kwds) response.begin() assert (response.will_close != _UNKNOWN) self.__state = _CS_IDLE if response.will_close: self.close() else: self.__response = response return response
'Provide a default host, since the superclass requires one.'
def __init__(self, host='', port=None, strict=None):
if (port == 0): port = None self._setup(self._connection_class(host, port, strict))
'Accept arguments to set the host/port, since the superclass doesn\'t.'
def connect(self, host=None, port=None):
if (host is not None): self._conn._set_hostport(host, port) self._conn.connect()
'Provide a getfile, since the superclass\' does not use this concept.'
def getfile(self):
return self.file
'Compat definition since superclass does not define it. Returns a tuple consisting of: - server status code (e.g. \'200\' if all goes well) - server "reason" corresponding to status code - any RFC822 headers in the response from the server'
def getreply(self, buffering=False):
try: if (not buffering): response = self._conn.getresponse() else: response = self._conn.getresponse(buffering) except BadStatusLine as e: self.file = self._conn.sock.makefile('rb', 0) self.close() self.headers = None return ((-1), e.line, None) self.headers = response.msg self.file = response.fp return (response.status, response.reason, response.msg)
'Resolve strings to objects using standard import and attribute syntax.'
def resolve(self, s):
name = s.split('.') used = name.pop(0) try: found = self.importer(used) for frag in name: used += ('.' + frag) try: found = getattr(found, frag) except AttributeError: self.importer(used) found = getattr(found, frag) return found except ImportError: (e, tb) = sys.exc_info()[1:] v = ValueError(('Cannot resolve %r: %s' % (s, e))) (v.__cause__, v.__traceback__) = (e, tb) raise v
'Default converter for the ext:// protocol.'
def ext_convert(self, value):
return self.resolve(value)
'Default converter for the cfg:// protocol.'
def cfg_convert(self, value):
rest = value m = self.WORD_PATTERN.match(rest) if (m is None): raise ValueError(('Unable to convert %r' % value)) else: rest = rest[m.end():] d = self.config[m.groups()[0]] while rest: m = self.DOT_PATTERN.match(rest) if m: d = d[m.groups()[0]] else: m = self.INDEX_PATTERN.match(rest) if m: idx = m.groups()[0] if (not self.DIGIT_PATTERN.match(idx)): d = d[idx] else: try: n = int(idx) d = d[n] except TypeError: d = d[idx] if m: rest = rest[m.end():] else: raise ValueError(('Unable to convert %r at %r' % (value, rest))) return d
'Convert values to an appropriate type. dicts, lists and tuples are replaced by their converting alternatives. Strings are checked to see if they have a conversion format and are converted if they do.'
def convert(self, value):
if ((not isinstance(value, ConvertingDict)) and isinstance(value, dict)): value = ConvertingDict(value) value.configurator = self elif ((not isinstance(value, ConvertingList)) and isinstance(value, list)): value = ConvertingList(value) value.configurator = self elif ((not isinstance(value, ConvertingTuple)) and isinstance(value, tuple)): value = ConvertingTuple(value) value.configurator = self elif isinstance(value, basestring): m = self.CONVERT_PATTERN.match(value) if m: d = m.groupdict() prefix = d['prefix'] converter = self.value_converters.get(prefix, None) if converter: suffix = d['suffix'] converter = getattr(self, converter) value = converter(suffix) return value
'Configure an object with a user-supplied factory.'
def configure_custom(self, config):
c = config.pop('()') if ((not hasattr(c, '__call__')) and hasattr(types, 'ClassType') and (type(c) != types.ClassType)): c = self.resolve(c) props = config.pop('.', None) kwargs = dict([(k, config[k]) for k in config if valid_ident(k)]) result = c(**kwargs) if props: for (name, value) in props.items(): setattr(result, name, value) return result
'Utility function which converts lists to tuples.'
def as_tuple(self, value):
if isinstance(value, list): value = tuple(value) return value
'Do the configuration.'
def configure(self):
config = self.config if ('version' not in config): raise ValueError("dictionary doesn't specify a version") if (config['version'] != 1): raise ValueError(('Unsupported version: %s' % config['version'])) incremental = config.pop('incremental', False) EMPTY_DICT = {} logging._acquireLock() try: if incremental: handlers = config.get('handlers', EMPTY_DICT) for name in handlers: if (name not in logging._handlers): raise ValueError(('No handler found with name %r' % name)) else: try: handler = logging._handlers[name] handler_config = handlers[name] level = handler_config.get('level', None) if level: handler.setLevel(logging._checkLevel(level)) except StandardError as e: raise ValueError(('Unable to configure handler %r: %s' % (name, e))) loggers = config.get('loggers', EMPTY_DICT) for name in loggers: try: self.configure_logger(name, loggers[name], True) except StandardError as e: raise ValueError(('Unable to configure logger %r: %s' % (name, e))) root = config.get('root', None) if root: try: self.configure_root(root, True) except StandardError as e: raise ValueError(('Unable to configure root logger: %s' % e)) else: disable_existing = config.pop('disable_existing_loggers', True) logging._handlers.clear() del logging._handlerList[:] formatters = config.get('formatters', EMPTY_DICT) for name in formatters: try: formatters[name] = self.configure_formatter(formatters[name]) except StandardError as e: raise ValueError(('Unable to configure formatter %r: %s' % (name, e))) filters = config.get('filters', EMPTY_DICT) for name in filters: try: filters[name] = self.configure_filter(filters[name]) except StandardError as e: raise ValueError(('Unable to configure filter %r: %s' % (name, e))) handlers = config.get('handlers', EMPTY_DICT) for name in sorted(handlers): try: handler = self.configure_handler(handlers[name]) handler.name = name handlers[name] = handler except StandardError as e: raise ValueError(('Unable to configure handler %r: %s' % (name, e))) root = logging.root existing = root.manager.loggerDict.keys() existing.sort(key=_encoded) child_loggers = [] loggers = config.get('loggers', EMPTY_DICT) for name in loggers: if (name in existing): i = existing.index(name) prefixed = (name + '.') pflen = len(prefixed) num_existing = len(existing) i = (i + 1) while ((i < num_existing) and (existing[i][:pflen] == prefixed)): child_loggers.append(existing[i]) i = (i + 1) existing.remove(name) try: self.configure_logger(name, loggers[name]) except StandardError as e: raise ValueError(('Unable to configure logger %r: %s' % (name, e))) for log in existing: logger = root.manager.loggerDict[log] if (log in child_loggers): logger.level = logging.NOTSET logger.handlers = [] logger.propagate = True elif disable_existing: logger.disabled = True root = config.get('root', None) if root: try: self.configure_root(root) except StandardError as e: raise ValueError(('Unable to configure root logger: %s' % e)) finally: logging._releaseLock()
'Configure a formatter from a dictionary.'
def configure_formatter(self, config):
if ('()' in config): factory = config['()'] try: result = self.configure_custom(config) except TypeError as te: if ("'format'" not in str(te)): raise config['fmt'] = config.pop('format') config['()'] = factory result = self.configure_custom(config) else: fmt = config.get('format', None) dfmt = config.get('datefmt', None) result = logging.Formatter(fmt, dfmt) return result
'Configure a filter from a dictionary.'
def configure_filter(self, config):
if ('()' in config): result = self.configure_custom(config) else: name = config.get('name', '') result = logging.Filter(name) return result
'Add filters to a filterer from a list of names.'
def add_filters(self, filterer, filters):
for f in filters: try: filterer.addFilter(self.config['filters'][f]) except StandardError as e: raise ValueError(('Unable to add filter %r: %s' % (f, e)))
'Configure a handler from a dictionary.'
def configure_handler(self, config):
formatter = config.pop('formatter', None) if formatter: try: formatter = self.config['formatters'][formatter] except StandardError as e: raise ValueError(('Unable to set formatter %r: %s' % (formatter, e))) level = config.pop('level', None) filters = config.pop('filters', None) if ('()' in config): c = config.pop('()') if ((not hasattr(c, '__call__')) and hasattr(types, 'ClassType') and (type(c) != types.ClassType)): c = self.resolve(c) factory = c else: klass = self.resolve(config.pop('class')) if (issubclass(klass, logging.handlers.MemoryHandler) and ('target' in config)): try: config['target'] = self.config['handlers'][config['target']] except StandardError as e: raise ValueError(('Unable to set target handler %r: %s' % (config['target'], e))) elif (issubclass(klass, logging.handlers.SMTPHandler) and ('mailhost' in config)): config['mailhost'] = self.as_tuple(config['mailhost']) elif (issubclass(klass, logging.handlers.SysLogHandler) and ('address' in config)): config['address'] = self.as_tuple(config['address']) factory = klass kwargs = dict([(k, config[k]) for k in config if valid_ident(k)]) try: result = factory(**kwargs) except TypeError as te: if ("'stream'" not in str(te)): raise kwargs['strm'] = kwargs.pop('stream') result = factory(**kwargs) if formatter: result.setFormatter(formatter) if (level is not None): result.setLevel(logging._checkLevel(level)) if filters: self.add_filters(result, filters) return result
'Add handlers to a logger from a list of names.'
def add_handlers(self, logger, handlers):
for h in handlers: try: logger.addHandler(self.config['handlers'][h]) except StandardError as e: raise ValueError(('Unable to add handler %r: %s' % (h, e)))
'Perform configuration which is common to root and non-root loggers.'
def common_logger_config(self, logger, config, incremental=False):
level = config.get('level', None) if (level is not None): logger.setLevel(logging._checkLevel(level)) if (not incremental): for h in logger.handlers[:]: logger.removeHandler(h) handlers = config.get('handlers', None) if handlers: self.add_handlers(logger, handlers) filters = config.get('filters', None) if filters: self.add_filters(logger, filters)
'Configure a non-root logger from a dictionary.'
def configure_logger(self, name, config, incremental=False):
logger = logging.getLogger(name) self.common_logger_config(logger, config, incremental) propagate = config.get('propagate', None) if (propagate is not None): logger.propagate = propagate
'Configure a root logger from a dictionary.'
def configure_root(self, config, incremental=False):
root = logging.getLogger() self.common_logger_config(root, config, incremental)
'Use the specified filename for streamed logging'
def __init__(self, filename, mode, encoding=None, delay=0):
if (codecs is None): encoding = None logging.FileHandler.__init__(self, filename, mode, encoding, delay) self.mode = mode self.encoding = encoding
'Emit a record. Output the record to the file, catering for rollover as described in doRollover().'
def emit(self, record):
try: if self.shouldRollover(record): self.doRollover() logging.FileHandler.emit(self, record) except (KeyboardInterrupt, SystemExit): raise except: self.handleError(record)
'Open the specified file and use it as the stream for logging. By default, the file grows indefinitely. You can specify particular values of maxBytes and backupCount to allow the file to rollover at a predetermined size. Rollover occurs whenever the current log file is nearly maxBytes in length. If backupCount is >= 1, the system will successively create new files with the same pathname as the base file, but with extensions ".1", ".2" etc. appended to it. For example, with a backupCount of 5 and a base file name of "app.log", you would get "app.log", "app.log.1", "app.log.2", ... through to "app.log.5". The file being written to is always "app.log" - when it gets filled up, it is closed and renamed to "app.log.1", and if files "app.log.1", "app.log.2" etc. exist, then they are renamed to "app.log.2", "app.log.3" etc. respectively. If maxBytes is zero, rollover never occurs.'
def __init__(self, filename, mode='a', maxBytes=0, backupCount=0, encoding=None, delay=0):
if (maxBytes > 0): mode = 'a' BaseRotatingHandler.__init__(self, filename, mode, encoding, delay) self.maxBytes = maxBytes self.backupCount = backupCount
'Do a rollover, as described in __init__().'
def doRollover(self):
if self.stream: self.stream.close() self.stream = None if (self.backupCount > 0): for i in range((self.backupCount - 1), 0, (-1)): sfn = ('%s.%d' % (self.baseFilename, i)) dfn = ('%s.%d' % (self.baseFilename, (i + 1))) if os.path.exists(sfn): if os.path.exists(dfn): os.remove(dfn) os.rename(sfn, dfn) dfn = (self.baseFilename + '.1') if os.path.exists(dfn): os.remove(dfn) os.rename(self.baseFilename, dfn) self.mode = 'w' self.stream = self._open()
'Determine if rollover should occur. Basically, see if the supplied record would cause the file to exceed the size limit we have.'
def shouldRollover(self, record):
if (self.stream is None): self.stream = self._open() if (self.maxBytes > 0): msg = ('%s\n' % self.format(record)) self.stream.seek(0, 2) if ((self.stream.tell() + len(msg)) >= self.maxBytes): return 1 return 0
'Work out the rollover time based on the specified time.'
def computeRollover(self, currentTime):
result = (currentTime + self.interval) if ((self.when == 'MIDNIGHT') or self.when.startswith('W')): if self.utc: t = time.gmtime(currentTime) else: t = time.localtime(currentTime) currentHour = t[3] currentMinute = t[4] currentSecond = t[5] r = (_MIDNIGHT - ((((currentHour * 60) + currentMinute) * 60) + currentSecond)) result = (currentTime + r) if self.when.startswith('W'): day = t[6] if (day != self.dayOfWeek): if (day < self.dayOfWeek): daysToWait = (self.dayOfWeek - day) else: daysToWait = (((6 - day) + self.dayOfWeek) + 1) newRolloverAt = (result + (daysToWait * ((60 * 60) * 24))) if (not self.utc): dstNow = t[(-1)] dstAtRollover = time.localtime(newRolloverAt)[(-1)] if (dstNow != dstAtRollover): if (not dstNow): newRolloverAt = (newRolloverAt - 3600) else: newRolloverAt = (newRolloverAt + 3600) result = newRolloverAt return result
'Determine if rollover should occur. record is not used, as we are just comparing times, but it is needed so the method signatures are the same'
def shouldRollover(self, record):
t = int(time.time()) if (t >= self.rolloverAt): return 1 return 0
'Determine the files to delete when rolling over. More specific than the earlier method, which just used glob.glob().'
def getFilesToDelete(self):
(dirName, baseName) = os.path.split(self.baseFilename) fileNames = os.listdir(dirName) result = [] prefix = (baseName + '.') plen = len(prefix) for fileName in fileNames: if (fileName[:plen] == prefix): suffix = fileName[plen:] if self.extMatch.match(suffix): result.append(os.path.join(dirName, fileName)) result.sort() if (len(result) < self.backupCount): result = [] else: result = result[:(len(result) - self.backupCount)] return result
'do a rollover; in this case, a date/time stamp is appended to the filename when the rollover happens. However, you want the file to be named for the start of the interval, not the current time. If there is a backup count, then we have to get a list of matching filenames, sort them and remove the one with the oldest suffix.'
def doRollover(self):
if self.stream: self.stream.close() self.stream = None t = (self.rolloverAt - self.interval) if self.utc: timeTuple = time.gmtime(t) else: timeTuple = time.localtime(t) dfn = ((self.baseFilename + '.') + time.strftime(self.suffix, timeTuple)) if os.path.exists(dfn): os.remove(dfn) os.rename(self.baseFilename, dfn) if (self.backupCount > 0): for s in self.getFilesToDelete(): os.remove(s) self.mode = 'w' self.stream = self._open() currentTime = int(time.time()) newRolloverAt = self.computeRollover(currentTime) while (newRolloverAt <= currentTime): newRolloverAt = (newRolloverAt + self.interval) if (((self.when == 'MIDNIGHT') or self.when.startswith('W')) and (not self.utc)): dstNow = time.localtime(currentTime)[(-1)] dstAtRollover = time.localtime(newRolloverAt)[(-1)] if (dstNow != dstAtRollover): if (not dstNow): newRolloverAt = (newRolloverAt - 3600) else: newRolloverAt = (newRolloverAt + 3600) self.rolloverAt = newRolloverAt
'Emit a record. First check if the underlying file has changed, and if it has, close the old stream and reopen the file to get the current stream.'
def emit(self, record):
if (not os.path.exists(self.baseFilename)): stat = None changed = 1 else: stat = os.stat(self.baseFilename) changed = ((stat[ST_DEV] != self.dev) or (stat[ST_INO] != self.ino)) if (changed and (self.stream is not None)): self.stream.flush() self.stream.close() self.stream = self._open() if (stat is None): stat = os.stat(self.baseFilename) (self.dev, self.ino) = (stat[ST_DEV], stat[ST_INO]) logging.FileHandler.emit(self, record)
'Initializes the handler with a specific host address and port. The attribute \'closeOnError\' is set to 1 - which means that if a socket error occurs, the socket is silently closed and then reopened on the next logging call.'
def __init__(self, host, port):
logging.Handler.__init__(self) self.host = host self.port = port self.sock = None self.closeOnError = 0 self.retryTime = None self.retryStart = 1.0 self.retryMax = 30.0 self.retryFactor = 2.0
'A factory method which allows subclasses to define the precise type of socket they want.'
def makeSocket(self, timeout=1):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) if hasattr(s, 'settimeout'): s.settimeout(timeout) s.connect((self.host, self.port)) return s
'Try to create a socket, using an exponential backoff with a max retry time. Thanks to Robert Olson for the original patch (SF #815911) which has been slightly refactored.'
def createSocket(self):
now = time.time() if (self.retryTime is None): attempt = 1 else: attempt = (now >= self.retryTime) if attempt: try: self.sock = self.makeSocket() self.retryTime = None except socket.error: if (self.retryTime is None): self.retryPeriod = self.retryStart else: self.retryPeriod = (self.retryPeriod * self.retryFactor) if (self.retryPeriod > self.retryMax): self.retryPeriod = self.retryMax self.retryTime = (now + self.retryPeriod)
'Send a pickled string to the socket. This function allows for partial sends which can happen when the network is busy.'
def send(self, s):
if (self.sock is None): self.createSocket() if self.sock: try: if hasattr(self.sock, 'sendall'): self.sock.sendall(s) else: sentsofar = 0 left = len(s) while (left > 0): sent = self.sock.send(s[sentsofar:]) sentsofar = (sentsofar + sent) left = (left - sent) except socket.error: self.sock.close() self.sock = None
'Pickles the record in binary format with a length prefix, and returns it ready for transmission across the socket.'
def makePickle(self, record):
ei = record.exc_info if ei: dummy = self.format(record) record.exc_info = None s = cPickle.dumps(record.__dict__, 1) if ei: record.exc_info = ei slen = struct.pack('>L', len(s)) return (slen + s)
'Handle an error during logging. An error has occurred during logging. Most likely cause - connection lost. Close the socket so that we can retry on the next event.'
def handleError(self, record):
if (self.closeOnError and self.sock): self.sock.close() self.sock = None else: logging.Handler.handleError(self, record)
'Emit a record. Pickles the record and writes it to the socket in binary format. If there is an error with the socket, silently drop the packet. If there was a problem with the socket, re-establishes the socket.'
def emit(self, record):
try: s = self.makePickle(record) self.send(s) except (KeyboardInterrupt, SystemExit): raise except: self.handleError(record)
'Closes the socket.'
def close(self):
if self.sock: self.sock.close() self.sock = None logging.Handler.close(self)
'Initializes the handler with a specific host address and port.'
def __init__(self, host, port):
SocketHandler.__init__(self, host, port) self.closeOnError = 0
'The factory method of SocketHandler is here overridden to create a UDP socket (SOCK_DGRAM).'
def makeSocket(self):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) return s
'Send a pickled string to a socket. This function no longer allows for partial sends which can happen when the network is busy - UDP does not guarantee delivery and can deliver packets out of sequence.'
def send(self, s):
if (self.sock is None): self.createSocket() self.sock.sendto(s, (self.host, self.port))
'Initialize a handler. If address is specified as a string, a UNIX socket is used. To log to a local syslogd, "SysLogHandler(address="/dev/log")" can be used. If facility is not specified, LOG_USER is used.'
def __init__(self, address=('localhost', SYSLOG_UDP_PORT), facility=LOG_USER, socktype=socket.SOCK_DGRAM):
logging.Handler.__init__(self) self.address = address self.facility = facility self.socktype = socktype if isinstance(address, basestring): self.unixsocket = 1 self._connect_unixsocket(address) else: self.unixsocket = 0 self.socket = socket.socket(socket.AF_INET, socktype) if (socktype == socket.SOCK_STREAM): self.socket.connect(address) self.formatter = None
'Encode the facility and priority. You can pass in strings or integers - if strings are passed, the facility_names and priority_names mapping dictionaries are used to convert them to integers.'
def encodePriority(self, facility, priority):
if isinstance(facility, basestring): facility = self.facility_names[facility] if isinstance(priority, basestring): priority = self.priority_names[priority] return ((facility << 3) | priority)
'Closes the socket.'
def close(self):
if self.unixsocket: self.socket.close() logging.Handler.close(self)
'Map a logging level name to a key in the priority_names map. This is useful in two scenarios: when custom levels are being used, and in the case where you can\'t do a straightforward mapping by lowercasing the logging level name because of locale- specific issues (see SF #1524081).'
def mapPriority(self, levelName):
return self.priority_map.get(levelName, 'warning')
'Emit a record. The record is formatted, and then sent to the syslog server. If exception information is present, it is NOT sent to the server.'
def emit(self, record):
msg = (self.format(record) + '\x00') '\n We need to convert record level to lowercase, maybe this will\n change in the future.\n ' prio = ('<%d>' % self.encodePriority(self.facility, self.mapPriority(record.levelname))) if (type(msg) is unicode): msg = msg.encode('utf-8') if codecs: msg = (codecs.BOM_UTF8 + msg) msg = (prio + msg) try: if self.unixsocket: try: self.socket.send(msg) except socket.error: self._connect_unixsocket(self.address) self.socket.send(msg) elif (self.socktype == socket.SOCK_DGRAM): self.socket.sendto(msg, self.address) else: self.socket.sendall(msg) except (KeyboardInterrupt, SystemExit): raise except: self.handleError(record)
'Initialize the handler. Initialize the instance with the from and to addresses and subject line of the email. To specify a non-standard SMTP port, use the (host, port) tuple format for the mailhost argument. To specify authentication credentials, supply a (username, password) tuple for the credentials argument. To specify the use of a secure protocol (TLS), pass in a tuple for the secure argument. This will only be used when authentication credentials are supplied. The tuple will be either an empty tuple, or a single-value tuple with the name of a keyfile, or a 2-value tuple with the names of the keyfile and certificate file. (This tuple is passed to the `starttls` method).'
def __init__(self, mailhost, fromaddr, toaddrs, subject, credentials=None, secure=None):
logging.Handler.__init__(self) if isinstance(mailhost, tuple): (self.mailhost, self.mailport) = mailhost else: (self.mailhost, self.mailport) = (mailhost, None) if isinstance(credentials, tuple): (self.username, self.password) = credentials else: self.username = None self.fromaddr = fromaddr if isinstance(toaddrs, basestring): toaddrs = [toaddrs] self.toaddrs = toaddrs self.subject = subject self.secure = secure
'Determine the subject for the email. If you want to specify a subject line which is record-dependent, override this method.'
def getSubject(self, record):
return self.subject