desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Produce text documentation for a given module object.'
def docmodule(self, object, name=None, mod=None):
name = object.__name__ (synop, desc) = splitdoc(getdoc(object)) result = self.section('NAME', (name + (synop and (' - ' + synop)))) try: all = object.__all__ except AttributeError: all = None try: file = inspect.getabsfile(object) except TypeError: file = '(built-in)' result = (result + self.section('FILE', file)) docloc = self.getdocloc(object) if (docloc is not None): result = (result + self.section('MODULE DOCS', docloc)) if desc: result = (result + self.section('DESCRIPTION', desc)) classes = [] for (key, value) in inspect.getmembers(object, inspect.isclass): if ((all is not None) or ((inspect.getmodule(value) or object) is object)): if visiblename(key, all, object): classes.append((key, value)) funcs = [] for (key, value) in inspect.getmembers(object, inspect.isroutine): if ((all is not None) or inspect.isbuiltin(value) or (inspect.getmodule(value) is object)): if visiblename(key, all, object): funcs.append((key, value)) data = [] for (key, value) in inspect.getmembers(object, isdata): if visiblename(key, all, object): data.append((key, value)) modpkgs = [] modpkgs_names = set() if hasattr(object, '__path__'): for (importer, modname, ispkg) in pkgutil.iter_modules(object.__path__): modpkgs_names.add(modname) if ispkg: modpkgs.append((modname + ' (package)')) else: modpkgs.append(modname) modpkgs.sort() result = (result + self.section('PACKAGE CONTENTS', join(modpkgs, '\n'))) submodules = [] for (key, value) in inspect.getmembers(object, inspect.ismodule): if (value.__name__.startswith((name + '.')) and (key not in modpkgs_names)): submodules.append(key) if submodules: submodules.sort() result = (result + self.section('SUBMODULES', join(submodules, '\n'))) if classes: classlist = map((lambda key_value: key_value[1]), classes) contents = [self.formattree(inspect.getclasstree(classlist, 1), name)] for (key, value) in classes: contents.append(self.document(value, key, name)) result = (result + self.section('CLASSES', join(contents, '\n'))) if funcs: contents = [] for (key, value) in funcs: contents.append(self.document(value, key, name)) result = (result + self.section('FUNCTIONS', join(contents, '\n'))) if data: contents = [] for (key, value) in data: contents.append(self.docother(value, key, name, maxlen=70)) result = (result + self.section('DATA', join(contents, '\n'))) if hasattr(object, '__version__'): version = str(object.__version__) if ((version[:11] == ('$' + 'Revision: ')) and (version[(-1):] == '$')): version = strip(version[11:(-1)]) result = (result + self.section('VERSION', version)) if hasattr(object, '__date__'): result = (result + self.section('DATE', str(object.__date__))) if hasattr(object, '__author__'): result = (result + self.section('AUTHOR', str(object.__author__))) if hasattr(object, '__credits__'): result = (result + self.section('CREDITS', str(object.__credits__))) return result
'Produce text documentation for a given class object.'
def docclass(self, object, name=None, mod=None, *ignored):
realname = object.__name__ name = (name or realname) bases = object.__bases__ def makename(c, m=object.__module__): return classname(c, m) if (name == realname): title = ('class ' + self.bold(realname)) else: title = ((self.bold(name) + ' = class ') + realname) if bases: parents = map(makename, bases) title = (title + ('(%s)' % join(parents, ', '))) doc = getdoc(object) contents = ((doc and [(doc + '\n')]) or []) push = contents.append mro = deque(inspect.getmro(object)) if (len(mro) > 2): push('Method resolution order:') for base in mro: push((' ' + makename(base))) push('') class HorizontalRule: def __init__(self): self.needone = 0 def maybe(self): if self.needone: push(('-' * 70)) self.needone = 1 hr = HorizontalRule() def spill(msg, attrs, predicate): (ok, attrs) = _split_list(attrs, predicate) if ok: hr.maybe() push(msg) for (name, kind, homecls, value) in ok: push(self.document(getattr(object, name), name, mod, object)) return attrs def spilldescriptors(msg, attrs, predicate): (ok, attrs) = _split_list(attrs, predicate) if ok: hr.maybe() push(msg) for (name, kind, homecls, value) in ok: push(self._docdescriptor(name, value, mod)) return attrs def spilldata(msg, attrs, predicate): (ok, attrs) = _split_list(attrs, predicate) if ok: hr.maybe() push(msg) for (name, kind, homecls, value) in ok: if (hasattr(value, '__call__') or inspect.isdatadescriptor(value)): doc = getdoc(value) else: doc = None push((self.docother(getattr(object, name), name, mod, maxlen=70, doc=doc) + '\n')) return attrs attrs = filter((lambda data: visiblename(data[0], obj=object)), classify_class_attrs(object)) while attrs: if mro: thisclass = mro.popleft() else: thisclass = attrs[0][2] (attrs, inherited) = _split_list(attrs, (lambda t: (t[2] is thisclass))) if (thisclass is __builtin__.object): attrs = inherited continue elif (thisclass is object): tag = 'defined here' else: tag = ('inherited from %s' % classname(thisclass, object.__module__)) attrs.sort() attrs = spill(('Methods %s:\n' % tag), attrs, (lambda t: (t[1] == 'method'))) attrs = spill(('Class methods %s:\n' % tag), attrs, (lambda t: (t[1] == 'class method'))) attrs = spill(('Static methods %s:\n' % tag), attrs, (lambda t: (t[1] == 'static method'))) attrs = spilldescriptors(('Data descriptors %s:\n' % tag), attrs, (lambda t: (t[1] == 'data descriptor'))) attrs = spilldata(('Data and other attributes %s:\n' % tag), attrs, (lambda t: (t[1] == 'data'))) assert (attrs == []) attrs = inherited contents = '\n'.join(contents) if (not contents): return (title + '\n') return (((title + '\n') + self.indent(rstrip(contents), ' | ')) + '\n')
'Format an argument default value as text.'
def formatvalue(self, object):
return ('=' + self.repr(object))
'Produce text documentation for a function or method object.'
def docroutine(self, object, name=None, mod=None, cl=None):
realname = object.__name__ name = (name or realname) note = '' skipdocs = 0 if inspect.ismethod(object): imclass = object.im_class if cl: if (imclass is not cl): note = (' from ' + classname(imclass, mod)) elif (object.im_self is not None): note = (' method of %s instance' % classname(object.im_self.__class__, mod)) else: note = (' unbound %s method' % classname(imclass, mod)) object = object.im_func if (name == realname): title = self.bold(realname) else: if (cl and (realname in cl.__dict__) and (cl.__dict__[realname] is object)): skipdocs = 1 title = ((self.bold(name) + ' = ') + realname) if inspect.isfunction(object): (args, varargs, varkw, defaults) = inspect.getargspec(object) argspec = inspect.formatargspec(args, varargs, varkw, defaults, formatvalue=self.formatvalue) if (realname == '<lambda>'): title = (self.bold(name) + ' lambda ') argspec = argspec[1:(-1)] else: argspec = '(...)' decl = ((title + argspec) + note) if skipdocs: return (decl + '\n') else: doc = (getdoc(object) or '') return ((decl + '\n') + (doc and (rstrip(self.indent(doc)) + '\n')))
'Produce text documentation for a property.'
def docproperty(self, object, name=None, mod=None, cl=None):
return self._docdescriptor(name, object, mod)
'Produce text documentation for a data descriptor.'
def docdata(self, object, name=None, mod=None, cl=None):
return self._docdescriptor(name, object, mod)
'Produce text documentation for a data object.'
def docother(self, object, name=None, mod=None, parent=None, maxlen=None, doc=None):
repr = self.repr(object) if maxlen: line = (((name and (name + ' = ')) or '') + repr) chop = (maxlen - len(line)) if (chop < 0): repr = (repr[:chop] + '...') line = (((name and (self.bold(name) + ' = ')) or '') + repr) if (doc is not None): line += ('\n' + self.indent(str(doc))) return line
'Read one line, using raw_input when available.'
def getline(self, prompt):
if (self.input is sys.stdin): return raw_input(prompt) else: self.output.write(prompt) self.output.flush() return self.input.readline()
'Return scope of name. The scope of a name could be LOCAL, GLOBAL, FREE, or CELL.'
def check_name(self, name):
if (name in self.globals): return SC_GLOBAL_EXPLICT if (name in self.cells): return SC_CELL if (name in self.defs): return SC_LOCAL if (self.nested and ((name in self.frees) or (name in self.uses))): return SC_FREE if self.nested: return SC_UNKNOWN else: return SC_GLOBAL_IMPLICIT
'Force name to be global in scope. Some child of the current node had a free reference to name. When the child was processed, it was labelled a free variable. Now that all its enclosing scope have been processed, the name is known to be a global or builtin. So walk back down the child chain and set the name to be global rather than free. Be careful to stop if a child does not think the name is free.'
def force_global(self, name):
self.globals[name] = 1 if (name in self.frees): del self.frees[name] for child in self.children: if (child.check_name(name) == SC_FREE): child.force_global(name)
'Process list of free vars from nested scope. Returns a list of names that are either 1) declared global in the parent or 2) undefined in a top-level parent. In either case, the nested scope should treat them as globals.'
def add_frees(self, names):
child_globals = [] for name in names: sc = self.check_name(name) if self.nested: if ((sc == SC_UNKNOWN) or (sc == SC_FREE) or isinstance(self, ClassScope)): self.frees[name] = 1 elif (sc == SC_GLOBAL_IMPLICIT): child_globals.append(name) elif (isinstance(self, FunctionScope) and (sc == SC_LOCAL)): self.cells[name] = 1 elif (sc != SC_CELL): child_globals.append(name) elif (sc == SC_LOCAL): self.cells[name] = 1 elif (sc != SC_CELL): child_globals.append(name) return child_globals
'Propagate assignment flag down to child nodes. The Assign node doesn\'t itself contains the variables being assigned to. Instead, the children in node.nodes are visited with the assign flag set to true. When the names occur in those nodes, they are marked as defs. Some names that occur in an assignment target are not bound by the assignment, e.g. a name occurring inside a slice. The visitor handles these nodes specially; they do not propagate the assign flag to their children.'
def visitAssign(self, node, scope):
for n in node.nodes: self.visit(n, scope, 1) self.visit(node.expr, scope)
'Verify that class is constructed correctly'
def checkClass(self):
try: assert hasattr(self, 'graph') assert getattr(self, 'NameFinder') assert getattr(self, 'FunctionGen') assert getattr(self, 'ClassGen') except AssertionError as msg: intro = ('Bad class construction for %s' % self.__class__.__name__) raise AssertionError, intro
'Return a code object'
def getCode(self):
return self.graph.getCode()
'Emit name ops for names generated implicitly by for loops The interpreter generates names that start with a period or dollar sign. The symbol table ignores these names because they aren\'t present in the program text.'
def _implicitNameOp(self, prefix, name):
if self.optimized: self.emit((prefix + '_FAST'), name) else: self.emit((prefix + '_NAME'), name)
'Emit SET_LINENO if necessary. The instruction is considered necessary if the node has a lineno attribute and it is different than the last lineno emitted. Returns true if SET_LINENO was emitted. There are no rules for when an AST node should have a lineno attribute. The transformer and AST code need to be reviewed and a consistent policy implemented and documented. Until then, this method works around missing line numbers.'
def set_lineno(self, node, force=False):
lineno = getattr(node, 'lineno', None) if ((lineno is not None) and ((lineno != self.last_lineno) or force)): self.emit('SET_LINENO', lineno) self.last_lineno = lineno return True return False
'Transform an AST into a modified parse tree.'
def transform(self, tree):
if (not (isinstance(tree, tuple) or isinstance(tree, list))): tree = parser.st2tuple(tree, line_info=1) return self.compile_node(tree)
'Return a modified parse tree for the given suite text.'
def parsesuite(self, text):
return self.transform(parser.suite(text))
'Return a modified parse tree for the given expression text.'
def parseexpr(self, text):
return self.transform(parser.expr(text))
'Return a modified parse tree for the contents of the given file.'
def parsefile(self, file):
if (type(file) == type('')): file = open(file) return self.parsesuite(file.read())
'Return node suitable for lvalue of augmented assignment Names, slices, and attributes are the only allowable nodes.'
def com_augassign(self, node):
l = self.com_node(node) if (l.__class__ in (Name, Slice, Subscript, Getattr)): return l raise SyntaxError, ("can't assign to %s" % l.__class__.__name__)
'Compile \'NODE (OP NODE)*\' into (type, [ node1, ..., nodeN ]).'
def com_binary(self, constructor, nodelist):
l = len(nodelist) if (l == 1): n = nodelist[0] return self.lookup_node(n)(n[1:]) items = [] for i in range(0, l, 2): n = nodelist[i] items.append(self.lookup_node(n)(n[1:])) return constructor(items, lineno=extractLineNo(nodelist))
'Do preorder walk of tree using visitor'
def preorder(self, tree, visitor, *args):
self.visitor = visitor visitor.visit = self.dispatch self.dispatch(tree, *args)
'Return list of features enabled by future statements'
def get_features(self):
return self.found.keys()
'Return the blocks in reverse postorder i.e. each node appears before all of its successors'
def getBlocksInOrder(self):
order = order_blocks(self.entry, self.exit) return order
'Return nodes appropriate for use with dominator'
def getRoot(self):
return self.entry
'Returns True if there is an unconditional transfer to an other block at the end of this block. This means there is no risk for the bytecode executer to go past this block\'s bytecode.'
def has_unconditional_transfer(self):
try: (op, arg) = self.insts[(-1)] except (IndexError, ValueError): return return (op in self._uncond_transfer)
'Get the whole list of followers, including the next block.'
def get_followers(self):
followers = set(self.next) for inst in self.insts: if (inst[0] in PyFlowGraph.hasjrel): followers.add(inst[1]) return followers
'Return all graphs contained within this block. For example, a MAKE_FUNCTION block will contain a reference to the graph for the function body.'
def getContainedGraphs(self):
contained = [] for inst in self.insts: if (len(inst) == 1): continue op = inst[1] if hasattr(op, 'graph'): contained.append(op.graph) return contained
'Get a Python code object'
def getCode(self):
assert (self.stage == RAW) self.computeStackDepth() self.flattenGraph() assert (self.stage == FLAT) self.convertArgs() assert (self.stage == CONV) self.makeByteCode() assert (self.stage == DONE) return self.newCodeObject()
'Compute the max stack depth. Approach is to compute the stack effect of each basic block. Then find the path through the code with the largest total effect.'
def computeStackDepth(self):
depth = {} exit = None for b in self.getBlocks(): depth[b] = findDepth(b.getInstructions()) seen = {} def max_depth(b, d): if (b in seen): return d seen[b] = 1 d = (d + depth[b]) children = b.get_children() if children: return max([max_depth(c, d) for c in children]) elif (not (b.label == 'exit')): return max_depth(self.exit, d) else: return d self.stacksize = max_depth(self.entry, 0)
'Arrange the blocks in order and resolve jumps'
def flattenGraph(self):
assert (self.stage == RAW) self.insts = insts = [] pc = 0 begin = {} end = {} for b in self.getBlocksInOrder(): begin[b] = pc for inst in b.getInstructions(): insts.append(inst) if (len(inst) == 1): pc = (pc + 1) elif (inst[0] != 'SET_LINENO'): pc = (pc + 3) end[b] = pc pc = 0 for i in range(len(insts)): inst = insts[i] if (len(inst) == 1): pc = (pc + 1) elif (inst[0] != 'SET_LINENO'): pc = (pc + 3) opname = inst[0] if (opname in self.hasjrel): oparg = inst[1] offset = (begin[oparg] - pc) insts[i] = (opname, offset) elif (opname in self.hasjabs): insts[i] = (opname, begin[inst[1]]) self.stage = FLAT
'Convert arguments from symbolic to concrete form'
def convertArgs(self):
assert (self.stage == FLAT) self.consts.insert(0, self.docstring) self.sort_cellvars() for i in range(len(self.insts)): t = self.insts[i] if (len(t) == 2): (opname, oparg) = t conv = self._converters.get(opname, None) if conv: self.insts[i] = (opname, conv(self, oparg)) self.stage = CONV
'Sort cellvars in the order of varnames and prune from freevars.'
def sort_cellvars(self):
cells = {} for name in self.cellvars: cells[name] = 1 self.cellvars = [name for name in self.varnames if (name in cells)] for name in self.cellvars: del cells[name] self.cellvars = (self.cellvars + cells.keys()) self.closure = (self.cellvars + self.freevars)
'Return index of name in list, appending if necessary This routine uses a list instead of a dictionary, because a dictionary can\'t store two different keys if the keys have the same value but different types, e.g. 2 and 2L. The compiler must treat these two separately, so it does an explicit type comparison before comparing the values.'
def _lookupName(self, name, list):
t = type(name) for i in range(len(list)): if ((t == type(list[i])) and (list[i] == name)): return i end = len(list) list.append(name) return end
'Return a tuple for the const slot of the code object Must convert references to code (MAKE_FUNCTION) to code objects recursively.'
def getConsts(self):
l = [] for elt in self.consts: if isinstance(elt, PyFlowGraph): elt = elt.getCode() l.append(elt) return tuple(l)
'Create new visitor object. If optional argument multi is not None, then print messages for each error rather than raising a SyntaxError for the first.'
def __init__(self, multi=None):
self.multi = multi self.errors = 0
'Create a new mutex -- initially unlocked.'
def __init__(self):
self.locked = False self.queue = deque()
'Test the locked bit of the mutex.'
def test(self):
return self.locked
'Atomic test-and-set -- grab the lock if it is not set, return True if it succeeded.'
def testandset(self):
if (not self.locked): self.locked = True return True else: return False
'Lock a mutex, call the function with supplied argument when it is acquired. If the mutex is already locked, place function and argument in the queue.'
def lock(self, function, argument):
if self.testandset(): function(argument) else: self.queue.append((function, argument))
'Unlock a mutex. If the queue is not empty, call the next function with its argument.'
def unlock(self):
if self.queue: (function, argument) = self.queue.popleft() function(argument) else: self.locked = False
'Handle pretty printing operations onto a stream using a set of configured parameters. indent Number of spaces to indent for each level of nesting. width Attempted maximum number of columns in the output. depth The maximum depth to print out nested structures. stream The desired output stream. If omitted (or false), the standard output stream available at construction will be used.'
def __init__(self, indent=1, width=80, depth=None, stream=None):
indent = int(indent) width = int(width) assert (indent >= 0), 'indent must be >= 0' assert ((depth is None) or (depth > 0)), 'depth must be > 0' assert width, 'width must be != 0' self._depth = depth self._indent_per_level = indent self._width = width if (stream is not None): self._stream = stream else: self._stream = _sys.stdout
'Format object for a specific context, returning a string and flags indicating whether the representation is \'readable\' and whether the object represents a recursive construct.'
def format(self, object, context, maxlevels, level):
return _safe_repr(object, context, maxlevels, level)
'Run the callback unless it has already been called or cancelled'
def __call__(self, wr=None):
try: del _finalizer_registry[self._key] except KeyError: sub_debug('finalizer no longer registered') else: sub_debug('finalizer calling %s with args %s and kwargs %s', self._callback, self._args, self._kwargs) res = self._callback(*self._args, **self._kwargs) self._weakref = self._callback = self._args = self._kwargs = self._key = None return res
'Cancel finalization of the object'
def cancel(self):
try: del _finalizer_registry[self._key] except KeyError: pass else: self._weakref = self._callback = self._args = self._kwargs = self._key = None
'Return whether this finalizer is still waiting to invoke callback'
def still_active(self):
return (self._key in _finalizer_registry)
'Method to be run in sub-process; can be overridden in sub-class'
def run(self):
if self._target: self._target(*self._args, **self._kwargs)
'Start child process'
def start(self):
assert (self._popen is None), 'cannot start a process twice' assert (self._parent_pid == os.getpid()), 'can only start a process object created by current process' assert (not _current_process._daemonic), 'daemonic processes are not allowed to have children' _cleanup() if (self._Popen is not None): Popen = self._Popen else: from .forking import Popen self._popen = Popen(self) _current_process._children.add(self)
'Terminate process; sends SIGTERM signal or uses TerminateProcess()'
def terminate(self):
self._popen.terminate()
'Wait until child process terminates'
def join(self, timeout=None):
assert (self._parent_pid == os.getpid()), 'can only join a child process' assert (self._popen is not None), 'can only join a started process' res = self._popen.wait(timeout) if (res is not None): _current_process._children.discard(self)
'Return whether process is alive'
def is_alive(self):
if (self is _current_process): return True assert (self._parent_pid == os.getpid()), 'can only test a child process' if (self._popen is None): return False self._popen.poll() return (self._popen.returncode is None)
'Return whether process is a daemon'
@property def daemon(self):
return self._daemonic
'Set whether process is a daemon'
@daemon.setter def daemon(self, daemonic):
assert (self._popen is None), 'process has already started' self._daemonic = daemonic
'Set authorization key of process'
@authkey.setter def authkey(self, authkey):
self._authkey = AuthenticationString(authkey)
'Return exit code of process or `None` if it has yet to stop'
@property def exitcode(self):
if (self._popen is None): return self._popen return self._popen.poll()
'Return identifier (PID) of process or `None` if it has yet to start'
@property def ident(self):
if (self is _current_process): return os.getpid() else: return (self._popen and self._popen.pid)
'Accept a connection on the bound socket or named pipe of `self`. Returns a `Connection` object.'
def accept(self):
c = self._listener.accept() if self._authkey: deliver_challenge(c, self._authkey) answer_challenge(c, self._authkey) return c
'Close the bound socket or named pipe of `self`.'
def close(self):
return self._listener.close()
'Run the server forever'
def serve_forever(self):
current_process()._manager_server = self try: while 1: try: c = self.listener.accept() except (OSError, IOError): continue t = threading.Thread(target=self.handle_request, args=(c,)) t.daemon = True t.start() except (KeyboardInterrupt, SystemExit): pass finally: self.stop = 999 self.listener.close()
'Handle a new connection'
def handle_request(self, c):
funcname = result = request = None try: connection.deliver_challenge(c, self.authkey) connection.answer_challenge(c, self.authkey) request = c.recv() (ignore, funcname, args, kwds) = request assert (funcname in self.public), ('%r unrecognized' % funcname) func = getattr(self, funcname) except Exception: msg = ('#TRACEBACK', format_exc()) else: try: result = func(c, *args, **kwds) except Exception: msg = ('#TRACEBACK', format_exc()) else: msg = ('#RETURN', result) try: c.send(msg) except Exception as e: try: c.send(('#TRACEBACK', format_exc())) except Exception: pass util.info('Failure to send message: %r', msg) util.info(' ... request was %r', request) util.info(' ... exception was %r', e) c.close()
'Handle requests from the proxies in a particular process/thread'
def serve_client(self, conn):
util.debug('starting server thread to service %r', threading.current_thread().name) recv = conn.recv send = conn.send id_to_obj = self.id_to_obj while (not self.stop): try: methodname = obj = None request = recv() (ident, methodname, args, kwds) = request (obj, exposed, gettypeid) = id_to_obj[ident] if (methodname not in exposed): raise AttributeError(('method %r of %r object is not in exposed=%r' % (methodname, type(obj), exposed))) function = getattr(obj, methodname) try: res = function(*args, **kwds) except Exception as e: msg = ('#ERROR', e) else: typeid = (gettypeid and gettypeid.get(methodname, None)) if typeid: (rident, rexposed) = self.create(conn, typeid, res) token = Token(typeid, self.address, rident) msg = ('#PROXY', (rexposed, token)) else: msg = ('#RETURN', res) except AttributeError: if (methodname is None): msg = ('#TRACEBACK', format_exc()) else: try: fallback_func = self.fallback_mapping[methodname] result = fallback_func(self, conn, ident, obj, *args, **kwds) msg = ('#RETURN', result) except Exception: msg = ('#TRACEBACK', format_exc()) except EOFError: util.debug('got EOF -- exiting thread serving %r', threading.current_thread().name) sys.exit(0) except Exception: msg = ('#TRACEBACK', format_exc()) try: try: send(msg) except Exception as e: send(('#UNSERIALIZABLE', repr(msg))) except Exception as e: util.info('exception in thread serving %r', threading.current_thread().name) util.info(' ... message was %r', msg) util.info(' ... exception was %r', e) conn.close() sys.exit(1)
'Return some info --- useful to spot problems with refcounting'
def debug_info(self, c):
self.mutex.acquire() try: result = [] keys = self.id_to_obj.keys() keys.sort() for ident in keys: if (ident != '0'): result.append((' %s: refcount=%s\n %s' % (ident, self.id_to_refcount[ident], str(self.id_to_obj[ident][0])[:75]))) return '\n'.join(result) finally: self.mutex.release()
'Number of shared objects'
def number_of_objects(self, c):
return (len(self.id_to_obj) - 1)
'Shutdown this process'
def shutdown(self, c):
try: util.debug('manager received shutdown message') c.send(('#RETURN', None)) if (sys.stdout != sys.__stdout__): util.debug('resetting stdout, stderr') sys.stdout = sys.__stdout__ sys.stderr = sys.__stderr__ util._run_finalizers(0) for p in active_children(): util.debug('terminating a child process of manager') p.terminate() for p in active_children(): util.debug('terminating a child process of manager') p.join() util._run_finalizers() util.info('manager exiting with exitcode 0') except: import traceback traceback.print_exc() finally: exit(0)
'Create a new shared object and return its id'
def create(self, c, typeid, *args, **kwds):
self.mutex.acquire() try: (callable, exposed, method_to_typeid, proxytype) = self.registry[typeid] if (callable is None): assert ((len(args) == 1) and (not kwds)) obj = args[0] else: obj = callable(*args, **kwds) if (exposed is None): exposed = public_methods(obj) if (method_to_typeid is not None): assert (type(method_to_typeid) is dict) exposed = (list(exposed) + list(method_to_typeid)) ident = ('%x' % id(obj)) util.debug('%r callable returned object with id %r', typeid, ident) self.id_to_obj[ident] = (obj, set(exposed), method_to_typeid) if (ident not in self.id_to_refcount): self.id_to_refcount[ident] = 0 self.incref(c, ident) return (ident, tuple(exposed)) finally: self.mutex.release()
'Return the methods of the shared object indicated by token'
def get_methods(self, c, token):
return tuple(self.id_to_obj[token.id][1])
'Spawn a new thread to serve this connection'
def accept_connection(self, c, name):
threading.current_thread().name = name c.send(('#RETURN', None)) self.serve_client(c)
'Return server object with serve_forever() method and address attribute'
def get_server(self):
assert (self._state.value == State.INITIAL) return Server(self._registry, self._address, self._authkey, self._serializer)
'Connect manager object to the server process'
def connect(self):
(Listener, Client) = listener_client[self._serializer] conn = Client(self._address, authkey=self._authkey) dispatch(conn, None, 'dummy') self._state.value = State.STARTED
'Spawn a server process for this manager object'
def start(self, initializer=None, initargs=()):
assert (self._state.value == State.INITIAL) if ((initializer is not None) and (not hasattr(initializer, '__call__'))): raise TypeError('initializer must be a callable') (reader, writer) = connection.Pipe(duplex=False) self._process = Process(target=type(self)._run_server, args=(self._registry, self._address, self._authkey, self._serializer, writer, initializer, initargs)) ident = ':'.join((str(i) for i in self._process._identity)) self._process.name = ((type(self).__name__ + '-') + ident) self._process.start() writer.close() self._address = reader.recv() reader.close() self._state.value = State.STARTED self.shutdown = util.Finalize(self, type(self)._finalize_manager, args=(self._process, self._address, self._authkey, self._state, self._Client), exitpriority=0)
'Create a server, report its address and run it'
@classmethod def _run_server(cls, registry, address, authkey, serializer, writer, initializer=None, initargs=()):
if (initializer is not None): initializer(*initargs) server = cls._Server(registry, address, authkey, serializer) writer.send(server.address) writer.close() util.info('manager serving at %r', server.address) server.serve_forever()
'Create a new shared object; return the token and exposed tuple'
def _create(self, typeid, *args, **kwds):
assert (self._state.value == State.STARTED), 'server not yet started' conn = self._Client(self._address, authkey=self._authkey) try: (id, exposed) = dispatch(conn, None, 'create', ((typeid,) + args), kwds) finally: conn.close() return (Token(typeid, self._address, id), exposed)
'Join the manager process (if it has been spawned)'
def join(self, timeout=None):
self._process.join(timeout)
'Return some info about the servers shared objects and connections'
def _debug_info(self):
conn = self._Client(self._address, authkey=self._authkey) try: return dispatch(conn, None, 'debug_info') finally: conn.close()
'Return the number of shared objects'
def _number_of_objects(self):
conn = self._Client(self._address, authkey=self._authkey) try: return dispatch(conn, None, 'number_of_objects') finally: conn.close()
'Shutdown the manager process; will be registered as a finalizer'
@staticmethod def _finalize_manager(process, address, authkey, state, _Client):
if process.is_alive(): util.info('sending shutdown message to manager') try: conn = _Client(address, authkey=authkey) try: dispatch(conn, None, 'shutdown') finally: conn.close() except Exception: pass process.join(timeout=0.2) if process.is_alive(): util.info('manager still alive') if hasattr(process, 'terminate'): util.info('trying to `terminate()` manager process') process.terminate() process.join(timeout=0.1) if process.is_alive(): util.info('manager still alive after terminate') state.value = State.SHUTDOWN try: del BaseProxy._address_to_local[address] except KeyError: pass
'Register a typeid with the manager type'
@classmethod def register(cls, typeid, callable=None, proxytype=None, exposed=None, method_to_typeid=None, create_method=True):
if ('_registry' not in cls.__dict__): cls._registry = cls._registry.copy() if (proxytype is None): proxytype = AutoProxy exposed = (exposed or getattr(proxytype, '_exposed_', None)) method_to_typeid = (method_to_typeid or getattr(proxytype, '_method_to_typeid_', None)) if method_to_typeid: for (key, value) in method_to_typeid.items(): assert (type(key) is str), ('%r is not a string' % key) assert (type(value) is str), ('%r is not a string' % value) cls._registry[typeid] = (callable, exposed, method_to_typeid, proxytype) if create_method: def temp(self, *args, **kwds): util.debug('requesting creation of a shared %r object', typeid) (token, exp) = self._create(typeid, *args, **kwds) proxy = proxytype(token, self._serializer, manager=self, authkey=self._authkey, exposed=exp) conn = self._Client(token.address, authkey=self._authkey) dispatch(conn, None, 'decref', (token.id,)) return proxy temp.__name__ = typeid setattr(cls, typeid, temp)
'Try to call a method of the referrent and return a copy of the result'
def _callmethod(self, methodname, args=(), kwds={}):
try: conn = self._tls.connection except AttributeError: util.debug('thread %r does not own a connection', threading.current_thread().name) self._connect() conn = self._tls.connection conn.send((self._id, methodname, args, kwds)) (kind, result) = conn.recv() if (kind == '#RETURN'): return result elif (kind == '#PROXY'): (exposed, token) = result proxytype = self._manager._registry[token.typeid][(-1)] proxy = proxytype(token, self._serializer, manager=self._manager, authkey=self._authkey, exposed=exposed) conn = self._Client(token.address, authkey=self._authkey) dispatch(conn, None, 'decref', (token.id,)) return proxy raise convert_to_error(kind, result)
'Get a copy of the value of the referent'
def _getvalue(self):
return self._callmethod('#GETVALUE')
'Return representation of the referent (or a fall-back if that fails)'
def __str__(self):
try: return self._callmethod('__repr__') except Exception: return (repr(self)[:(-1)] + "; '__str__()' failed>")
'Cleanup after any worker processes which have exited due to reaching their specified lifetime. Returns True if any workers were cleaned up.'
def _join_exited_workers(self):
cleaned = False for i in reversed(range(len(self._pool))): worker = self._pool[i] if (worker.exitcode is not None): debug(('cleaning up worker %d' % i)) worker.join() cleaned = True del self._pool[i] return cleaned
'Bring the number of pool processes up to the specified number, for use after reaping workers which have exited.'
def _repopulate_pool(self):
for i in range((self._processes - len(self._pool))): w = self.Process(target=worker, args=(self._inqueue, self._outqueue, self._initializer, self._initargs, self._maxtasksperchild)) self._pool.append(w) w.name = w.name.replace('Process', 'PoolWorker') w.daemon = True w.start() debug('added worker')
'Clean up any exited workers and start replacements for them.'
def _maintain_pool(self):
if self._join_exited_workers(): self._repopulate_pool()
'Equivalent of `apply()` builtin'
def apply(self, func, args=(), kwds={}):
assert (self._state == RUN) return self.apply_async(func, args, kwds).get()
'Equivalent of `map()` builtin'
def map(self, func, iterable, chunksize=None):
assert (self._state == RUN) return self.map_async(func, iterable, chunksize).get()
'Equivalent of `itertools.imap()` -- can be MUCH slower than `Pool.map()`'
def imap(self, func, iterable, chunksize=1):
assert (self._state == RUN) if (chunksize == 1): result = IMapIterator(self._cache) self._taskqueue.put((((result._job, i, func, (x,), {}) for (i, x) in enumerate(iterable)), result._set_length)) return result else: assert (chunksize > 1) task_batches = Pool._get_tasks(func, iterable, chunksize) result = IMapIterator(self._cache) self._taskqueue.put((((result._job, i, mapstar, (x,), {}) for (i, x) in enumerate(task_batches)), result._set_length)) return (item for chunk in result for item in chunk)
'Like `imap()` method but ordering of results is arbitrary'
def imap_unordered(self, func, iterable, chunksize=1):
assert (self._state == RUN) if (chunksize == 1): result = IMapUnorderedIterator(self._cache) self._taskqueue.put((((result._job, i, func, (x,), {}) for (i, x) in enumerate(iterable)), result._set_length)) return result else: assert (chunksize > 1) task_batches = Pool._get_tasks(func, iterable, chunksize) result = IMapUnorderedIterator(self._cache) self._taskqueue.put((((result._job, i, mapstar, (x,), {}) for (i, x) in enumerate(task_batches)), result._set_length)) return (item for chunk in result for item in chunk)
'Asynchronous equivalent of `apply()` builtin'
def apply_async(self, func, args=(), kwds={}, callback=None):
assert (self._state == RUN) result = ApplyResult(self._cache, callback) self._taskqueue.put(([(result._job, None, func, args, kwds)], None)) return result
'Asynchronous equivalent of `map()` builtin'
def map_async(self, func, iterable, chunksize=None, callback=None):
assert (self._state == RUN) if (not hasattr(iterable, '__len__')): iterable = list(iterable) if (chunksize is None): (chunksize, extra) = divmod(len(iterable), (len(self._pool) * 4)) if extra: chunksize += 1 if (len(iterable) == 0): chunksize = 0 task_batches = Pool._get_tasks(func, iterable, chunksize) result = MapResult(self._cache, chunksize, len(iterable), callback) self._taskqueue.put((((result._job, i, mapstar, (x,), {}) for (i, x) in enumerate(task_batches)), None)) return result
'Initialize and reset this instance.'
def __init__(self):
self.reset()
'Reset this instance. Loses all unprocessed data.'
def reset(self):
self.rawdata = '' self.lasttag = '???' self.interesting = interesting_normal markupbase.ParserBase.reset(self)
'Feed data to the parser. Call this as often as you want, with as little or as much text as you want (may include \'\n\').'
def feed(self, data):
self.rawdata = (self.rawdata + data) self.goahead(0)
'Handle any buffered data.'
def close(self):
self.goahead(1)
'Return full source of start tag: \'<...>\'.'
def get_starttag_text(self):
return self.__starttag_text
'Add a header to be used by the HTTP interface only e.g. u.addheader(\'Accept\', \'sound/basic\')'
def addheader(self, *args):
self.addheaders.append(args)
'Use URLopener().open(file) instead of open(file, \'r\').'
def open(self, fullurl, data=None):
fullurl = unwrap(toBytes(fullurl)) fullurl = quote(fullurl, safe="%/:=&?~#+!$,;'@()*[]|") if (self.tempcache and (fullurl in self.tempcache)): (filename, headers) = self.tempcache[fullurl] fp = open(filename, 'rb') return addinfourl(fp, headers, fullurl) (urltype, url) = splittype(fullurl) if (not urltype): urltype = 'file' if (urltype in self.proxies): proxy = self.proxies[urltype] (urltype, proxyhost) = splittype(proxy) (host, selector) = splithost(proxyhost) url = (host, fullurl) else: proxy = None name = ('open_' + urltype) self.type = urltype name = name.replace('-', '_') if (not hasattr(self, name)): if proxy: return self.open_unknown_proxy(proxy, fullurl, data) else: return self.open_unknown(fullurl, data) try: if (data is None): return getattr(self, name)(url) else: return getattr(self, name)(url, data) except socket.error as msg: raise IOError, ('socket error', msg), sys.exc_info()[2]
'Overridable interface to open unknown URL type.'
def open_unknown(self, fullurl, data=None):
(type, url) = splittype(fullurl) raise IOError, ('url error', 'unknown url type', type)
'Overridable interface to open unknown URL type.'
def open_unknown_proxy(self, proxy, fullurl, data=None):
(type, url) = splittype(fullurl) raise IOError, ('url error', ('invalid proxy for %s' % type), proxy)
'retrieve(url) returns (filename, headers) for a local object or (tempfilename, headers) for a remote object.'
def retrieve(self, url, filename=None, reporthook=None, data=None):
url = unwrap(toBytes(url)) if (self.tempcache and (url in self.tempcache)): return self.tempcache[url] (type, url1) = splittype(url) if ((filename is None) and ((not type) or (type == 'file'))): try: fp = self.open_local_file(url1) hdrs = fp.info() fp.close() return (url2pathname(splithost(url1)[1]), hdrs) except IOError: pass fp = self.open(url, data) try: headers = fp.info() if filename: tfp = open(filename, 'wb') else: import tempfile (garbage, path) = splittype(url) (garbage, path) = splithost((path or '')) (path, garbage) = splitquery((path or '')) (path, garbage) = splitattr((path or '')) suffix = os.path.splitext(path)[1] (fd, filename) = tempfile.mkstemp(suffix) self.__tempfiles.append(filename) tfp = os.fdopen(fd, 'wb') try: result = (filename, headers) if (self.tempcache is not None): self.tempcache[url] = result bs = (1024 * 8) size = (-1) read = 0 blocknum = 0 if reporthook: if ('content-length' in headers): size = int(headers['Content-Length']) reporthook(blocknum, bs, size) while 1: block = fp.read(bs) if (block == ''): break read += len(block) tfp.write(block) blocknum += 1 if reporthook: reporthook(blocknum, bs, size) finally: tfp.close() finally: fp.close() if ((size >= 0) and (read < size)): raise ContentTooShortError(('retrieval incomplete: got only %i out of %i bytes' % (read, size)), result) return result