_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
31
13.1k
language
stringclasses
1 value
meta_information
dict
q280400
QtKillRing.rotate
test
def rotate(self): """ Rotate the kill ring, then yank back the new top. """ if self._prev_yank: text = self._ring.rotate() if text: self._skip_cursor = True
python
{ "resource": "" }
q280401
patch_pyzmq
test
def patch_pyzmq(): """backport a few patches from newer pyzmq These can be removed as we bump our minimum pyzmq version """ import zmq # ioloop.install, introduced in pyzmq 2.1.7 from zmq.eventloop import ioloop def install(): import tornado.ioloop tornado.ioloop.IOLoop = ioloop.IOLoop if not hasattr(ioloop, 'install'): ioloop.install = install # fix missing DEALER/ROUTER aliases in pyzmq < 2.1.9 if not hasattr(zmq, 'DEALER'): zmq.DEALER = zmq.XREQ if not hasattr(zmq, 'ROUTER'): zmq.ROUTER = zmq.XREP
python
{ "resource": "" }
q280402
parser_from_schema
test
def parser_from_schema(schema_url, require_version=True): """ Returns an XSD-schema-enabled lxml parser from a WSDL or XSD `schema_url` can of course be local path via file:// url """ schema_tree = etree.parse(schema_url) def get_version(element, getter): try: return getter(element) except VersionNotFound: if require_version: raise else: return None root = schema_tree.getroot() if root.tag == '{%s}definitions' % namespaces.WSDL:
python
{ "resource": "" }
q280403
AuthenticatedHandler.ws_url
test
def ws_url(self): """websocket url matching the current request turns http[s]://host[:port] into ws[s]://host[:port] """ proto =
python
{ "resource": "" }
q280404
ZMQStreamHandler._reserialize_reply
test
def _reserialize_reply(self, msg_list): """Reserialize a reply message using JSON. This takes the msg list from the ZMQ socket, unserializes it using self.session and then serializes the result using JSON. This method should be used by self._on_zmq_reply to build messages that can
python
{ "resource": "" }
q280405
AuthenticatedZMQStreamHandler._inject_cookie_message
test
def _inject_cookie_message(self, msg): """Inject the first message, which is the document cookie, for authentication.""" if isinstance(msg, unicode): # Cookie can't constructor doesn't accept unicode strings for some reason msg = msg.encode('utf8', 'replace') try:
python
{ "resource": "" }
q280406
IOPubHandler.start_hb
test
def start_hb(self, callback): """Start the heartbeating and call the callback if the kernel dies.""" if not self._beating: self._kernel_alive = True def ping_or_dead(): self.hb_stream.flush() if self._kernel_alive: self._kernel_alive = False self.hb_stream.send(b'ping') # flush stream to force immediate socket send self.hb_stream.flush() else: try: callback() except: pass
python
{ "resource": "" }
q280407
IOPubHandler._really_start_hb
test
def _really_start_hb(self): """callback for delayed heartbeat start Only start the hb loop if we haven't been closed during the wait. """
python
{ "resource": "" }
q280408
IOPubHandler.stop_hb
test
def stop_hb(self): """Stop the heartbeating and cancel all related callbacks.""" if self._beating:
python
{ "resource": "" }
q280409
Demo.fload
test
def fload(self): """Load file object.""" # read data and parse into blocks if hasattr(self, 'fobj') and self.fobj is not None: self.fobj.close() if hasattr(self.src, "read"): # It seems to be a file or a file-like object
python
{ "resource": "" }
q280410
Demo._get_index
test
def _get_index(self,index): """Get the current block index, validating and checking status. Returns None if the demo is finished""" if index is None: if self.finished: print >>io.stdout, 'Demo finished. Use <demo_name>.reset() if you
python
{ "resource": "" }
q280411
Demo.seek
test
def seek(self,index): """Move the current seek pointer to the given block. You can use negative indices to seek from the end, with identical semantics to those of Python lists.""" if index<0:
python
{ "resource": "" }
q280412
Demo.edit
test
def edit(self,index=None): """Edit a block. If no number is given, use the last block executed. This edits the in-memory copy of the demo, it does NOT modify the original source file. If you want to do that, simply open the file in an editor and use reload() when you make changes to the file. This method is meant to let you change a block during a demonstration for explanatory purposes, without damaging your original script.""" index = self._get_index(index) if index is None: return # decrease the index by one (unless we're at the very beginning), so # that the default demo.edit() call
python
{ "resource": "" }
q280413
Demo.show
test
def show(self,index=None): """Show a single block on screen""" index = self._get_index(index) if index is None: return print >>io.stdout, self.marquee('<%s> block # %s (%s remaining)' %
python
{ "resource": "" }
q280414
Demo.show_all
test
def show_all(self): """Show entire demo on screen, block by block""" fname = self.title title = self.title nblocks = self.nblocks silent = self._silent marquee = self.marquee for index,block in enumerate(self.src_blocks_colored): if silent[index]: print >>io.stdout, marquee('<%s> SILENT block # %s (%s remaining)' % (title,index,nblocks-index-1))
python
{ "resource": "" }
q280415
series
test
def series(collection, method, prints = 15, *args, **kwargs): ''' Processes a collection in series Parameters ---------- collection : list list of Record objects method : method to call on each Record prints : int number of timer prints to the screen Returns ------- collection : list list of Record objects after going through method called If more than one collection is given, the function is called with an argument list consisting of the corresponding item of each collection, substituting None for missing values when not all collection have the same length. If the function is None, return the original collection (or a list of tuples if multiple collections). Example ------- adding 2 to every number in a range
python
{ "resource": "" }
q280416
batch
test
def batch(collection, method, processes=None, batch_size=None, quiet=False, kwargs_to_dump=None, args=None, **kwargs): '''Processes a collection in parallel batches, each batch processes in series on a single process. Running batches in parallel can be more effficient that splitting a list across cores as in spin.parallel because of parallel processing has high IO requirements. Parameters ---------- collection : list i.e. list of Record objects method : method to call on each Record processes : int number of processes to run on [defaults to number of cores on machine] batch_size : int lenght of each batch [defaults to number of elements / number of processes] Returns ------- collection : list list of Record objects after going through method called Example ------- adding 2 to every number in a range >>> import turntable >>> collection = range(100) >>> def jam(record): >>> return record + 2 >>> collection = turntable.spin.batch(collection, jam) Note ---- lambda functions do not work in parallel ''' if processes is None:
python
{ "resource": "" }
q280417
thread
test
def thread(function, sequence, cores=None, runSeries=False, quiet=False): '''sets up the threadpool with map for parallel processing''' # Make the Pool of workes if cores is None: pool = ThreadPool() else: pool = ThreadPool(cores) # Operate on the list of subjects with the requested function # in the split threads tic = time.time() if runSeries is False: try: results = pool.map(function, sequence) # close the pool and wiat for teh work to finish pool.close() pool.join() except: print 'thread
python
{ "resource": "" }
q280418
parallel
test
def parallel(collection, method, processes=None, args=None, **kwargs): '''Processes a collection in parallel. Parameters ---------- collection : list i.e. list of Record objects method : method to call on each Record processes : int number of processes to run on [defaults to number of cores on machine] batch_size : int lenght of each batch [defaults to number of elements / number of processes] Returns ------- collection : list list of Record objects after going through method called Example ------- adding 2 to every number in a range >>> import turntable >>> collection = range(100) >>> def jam(record): >>> return record + 2 >>> collection = turntable.spin.parallel(collection, jam) Note ---- lambda functions do not work in parallel ''' if processes is None: # default to the number of cores, not exceeding 20 processes = min(mp.cpu_count(), 20) print "Running parallel process on " + str(processes) + " cores. :-)" pool = mp.Pool(processes=processes) PROC = [] tic = time.time() for main_arg in collection: if args is None: ARGS
python
{ "resource": "" }
q280419
with_it
test
def with_it(obj): ''' wrap `with obj` out of func. example: ``` py @with_it(Lock()) def func(): pass ``` ''' def _wrap(func): @functools.wraps(func) def wrapper(*args, **kwargs):
python
{ "resource": "" }
q280420
with_objattrs
test
def with_objattrs(*names): ''' like `with_objattr` but enter context one by one. ''' def _wrap(func): @functools.wraps(func) def wrapper(self, *args, **kwargs): with contextlib.ExitStack() as stack: for name
python
{ "resource": "" }
q280421
tbsource
test
def tbsource(tb, context=6): """Get source from a traceback object. A tuple of two things is returned: a list of lines of context from the source code, and the index of the current line within that list. The optional second argument specifies the number of lines of context to return, which are centered around the current line. .. Note :: This is adapted from inspect.py in the python 2.4 standard library, since a bug in the 2.3 version of inspect prevents it from correctly locating source lines in a traceback frame. """ lineno = tb.tb_lineno frame = tb.tb_frame if context > 0: start = lineno - 1 - context//2 log.debug("lineno: %s start: %s", lineno, start) try: lines, dummy = inspect.findsource(frame) except IOError: lines, index = [''], 0 else: all_lines = lines start = max(start, 1) start =
python
{ "resource": "" }
q280422
find_inspectable_lines
test
def find_inspectable_lines(lines, pos): """Find lines in home that are inspectable. Walk back from the err line up to 3 lines, but don't walk back over changes in indent level. Walk forward up to 3 lines, counting \ separated lines as 1. Don't walk over changes in indent level (unless part of an extended line) """ cnt = re.compile(r'\\[\s\n]*$') df = re.compile(r':[\s\n]*$') ind = re.compile(r'^(\s*)') toinspect = [] home = lines[pos] home_indent = ind.match(home).groups()[0] before = lines[max(pos-3, 0):pos] before.reverse() after = lines[pos+1:min(pos+4, len(lines))] for line in before: if ind.match(line).groups()[0] == home_indent: toinspect.append(line) else:
python
{ "resource": "" }
q280423
countdown
test
def countdown(name, date, description='', id='', granularity='sec', start=None, progressbar=False, progressbar_inversed=False, showpct=False): ''' Create a countdown. ''' end_date = dateparse.parse_datetime(date) end = dateformat.format(end_date, 'U') content = '<div class="name">' + name + '</div>' content += '<div class="description">' + description + '</div>' if progressbar: if not end: raise Exception('For progressbar, start date is requried.') parsed_date = datetime.datetime.combine( dateparse.parse_date(start), datetime.time()) start_date = dateparse.parse_datetime(start) or parsed_date
python
{ "resource": "" }
q280424
cleanup
test
def cleanup(controller, engines): """Cleanup routine to shut down all subprocesses we opened.""" import signal, time print('Starting cleanup') print('Stopping engines...') for e in engines: e.send_signal(signal.SIGINT) print('Stopping controller...') # so it can shut down its queues
python
{ "resource": "" }
q280425
ConditionalModifier.pre_call
test
def pre_call(self, ctxt, pre_mod, post_mod, action): """ A modifier hook function. This is called in priority order prior to invoking the ``Action`` for the step. This allows a modifier to alter the context, or to take over subsequent action invocation. :param ctxt: The context object. :param pre_mod: A list of the modifiers preceding this modifier in the list of modifiers that is applicable to the action. This list is in priority order. :param post_mod: A list of the modifiers following this modifier in the list of modifiers that is applicable to the action. This list is in priority order. :param action: The action that will be performed. :returns: A ``None`` return value indicates that the modifier
python
{ "resource": "" }
q280426
IgnoreErrorsModifier.post_call
test
def post_call(self, ctxt, result, action, post_mod, pre_mod): """ A modifier hook function. This is called in reverse-priority order after invoking the ``Action`` for the step. This allows a modifier to inspect or alter the result of the step. :param ctxt: The context object. :param result: The result of the action. This will be a ``StepResult`` object. :param action: The action that was performed. :param post_mod: A list of modifiers following this modifier in the list of modifiers that is applicable to the action. This list is in priority order. :param pre_mod: A list of modifiers preceding this modifier in the list of modifiers that is applicable to
python
{ "resource": "" }
q280427
save_ids
test
def save_ids(f, self, *args, **kwargs): """Keep our history and outstanding attributes up to date after a method call.""" n_previous = len(self.client.history) try: ret = f(self, *args, **kwargs) finally: nmsgs = len(self.client.history) - n_previous
python
{ "resource": "" }
q280428
sync_results
test
def sync_results(f, self, *args, **kwargs): """sync relevant results from self.client to our results attribute.""" ret = f(self, *args, **kwargs) delta = self.outstanding.difference(self.client.outstanding)
python
{ "resource": "" }
q280429
spin_after
test
def spin_after(f, self, *args, **kwargs): """call spin after the method.""" ret =
python
{ "resource": "" }
q280430
BlockingSubSocketChannel.get_msgs
test
def get_msgs(self): """Get all messages that are currently ready.""" msgs = [] while
python
{ "resource": "" }
q280431
BlockingStdInSocketChannel.get_msg
test
def get_msg(self, block=True, timeout=None): "Gets a message if there is one that is ready."
python
{ "resource": "" }
q280432
prop
test
def prop(func=None, *, field = _UNSET, get: bool = True, set: bool = True, del_: bool = False, default = _UNSET, types: tuple = _UNSET): ''' `prop` is a sugar for `property`. ``` py @prop def value(self): pass # equals: @property def value(self): return self._value @value.setter def value(self, val): self._value = val ``` ''' def wrap(func): if not callable(func): raise TypeError prop_name = func.__name__ key = field if key is _UNSET: key = '_' + prop_name fget, fset, fdel = None, None, None if get: def fget(self): try: return self.__dict__[key] except KeyError: if default is not _UNSET: return default raise AttributeError(f"'{type(self).__name__}' object
python
{ "resource": "" }
q280433
get_onlys
test
def get_onlys(*fields): ''' `get_onlys` is a sugar for multi-`property`. ``` py name, age = get_onlys('_name', '_age') # equals: @property def name(self): return getattr(self, '_name') @property def age(self):
python
{ "resource": "" }
q280434
parse
test
def parse(url): """Parses a database URL.""" config = {} if not isinstance(url, six.string_types): url = '' url = urlparse.urlparse(url) # Remove query strings. path = url.path[1:] path = path.split('?', 2)[0] # Update with environment configuration. config.update({ 'NAME': path,
python
{ "resource": "" }
q280435
module_list
test
def module_list(path): """ Return the list containing the names of the modules available in the given folder. """ # sys.path has the cwd as an empty string, but isdir/listdir need it as '.' if path == '': path = '.' if os.path.isdir(path): folder_list = os.listdir(path) elif path.endswith('.egg'): try: folder_list = [f for f in zipimporter(path)._files] except: folder_list = [] else: folder_list = [] if not folder_list: return [] # A few local constants to be used in loops below isfile = os.path.isfile pjoin = os.path.join basename = os.path.basename def is_importable_file(path):
python
{ "resource": "" }
q280436
get_root_modules
test
def get_root_modules(): """ Returns a list containing the names of all the modules available in the folders of the pythonpath. """ ip = get_ipython() if 'rootmodules' in ip.db: return ip.db['rootmodules'] t = time() store = False modules = list(sys.builtin_module_names) for path in sys.path: modules += module_list(path) if time() - t >= TIMEOUT_STORAGE and not store: store = True print("\nCaching the list of root modules, please wait!") print("(This will only be done once - type '%rehashx' to "
python
{ "resource": "" }
q280437
quick_completer
test
def quick_completer(cmd, completions): """ Easily create a trivial completer for a command. Takes either a list of completions, or all completions in string (that will be split on whitespace). Example:: [d:\ipython]|1> import ipy_completers [d:\ipython]|2> ipy_completers.quick_completer('foo', ['bar','baz']) [d:\ipython]|3> foo b<TAB> bar baz
python
{ "resource": "" }
q280438
module_completion
test
def module_completion(line): """ Returns a list containing the completion possibilities for an import line. The line looks like this : 'import xml.d' 'from xml.dom import' """ words = line.split(' ') nwords = len(words) # from whatever <tab> -> 'import ' if nwords == 3 and words[0] == 'from': return ['import '] # 'from xy<tab>' or 'import xy<tab>' if nwords < 3 and (words[0] in ['import','from']) : if nwords == 1: return get_root_modules() mod = words[1].split('.') if len(mod) <
python
{ "resource": "" }
q280439
magic_run_completer
test
def magic_run_completer(self, event): """Complete files that end in .py or .ipy for the %run command. """ comps = arg_split(event.line, strict=False) relpath = (len(comps) > 1 and comps[-1] or '').strip("'\"") #print("\nev=", event) # dbg #print("rp=", relpath) # dbg #print('comps=', comps) # dbg lglob = glob.glob isdir = os.path.isdir relpath, tilde_expand, tilde_val = expand_user(relpath) dirs = [f.replace('\\','/') + "/" for f in lglob(relpath+'*') if isdir(f)] # Find if the user has already typed the first filename, after which we # should complete on all files, since after the first one other files may # be arguments to the input script. if filter(magic_run_re.match, comps):
python
{ "resource": "" }
q280440
cd_completer
test
def cd_completer(self, event): """Completer function for cd, which only returns directories.""" ip = get_ipython() relpath = event.symbol #print(event) # dbg if event.line.endswith('-b') or ' -b ' in event.line: # return only bookmark completions bkms = self.db.get('bookmarks', None) if bkms: return bkms.keys() else: return [] if event.symbol == '-': width_dh = str(len(str(len(ip.user_ns['_dh']) + 1))) # jump in directory history by number fmt = '-%0' + width_dh +'d [%s]' ents = [ fmt % (i,s) for i,s in enumerate(ip.user_ns['_dh'])] if len(ents) > 1: return ents return [] if event.symbol.startswith('--'): return ["--" + os.path.basename(d) for d in ip.user_ns['_dh']] # Expand ~ in path and normalize directory separators. relpath, tilde_expand, tilde_val = expand_user(relpath) relpath = relpath.replace('\\','/') found = [] for d in [f.replace('\\','/') + '/' for f in glob.glob(relpath+'*') if os.path.isdir(f)]: if ' ' in d: # we don't want to deal with any of that, complex code
python
{ "resource": "" }
q280441
Xunit._quoteattr
test
def _quoteattr(self, attr): """Escape an XML attribute. Value can be unicode.""" attr = xml_safe(attr) if isinstance(attr, unicode) and not UNICODE_STRINGS:
python
{ "resource": "" }
q280442
Xunit.configure
test
def configure(self, options, config): """Configures the xunit plugin.""" Plugin.configure(self, options, config) self.config = config if self.enabled: self.stats = {'errors': 0, 'failures': 0, 'passes': 0,
python
{ "resource": "" }
q280443
Xunit.report
test
def report(self, stream): """Writes an Xunit-formatted XML file The file includes a report of test errors and failures. """ self.stats['encoding'] = self.encoding self.stats['total'] = (self.stats['errors'] + self.stats['failures'] + self.stats['passes'] + self.stats['skipped']) self.error_report_file.write( u'<?xml version="1.0"
python
{ "resource": "" }
q280444
Xunit.addError
test
def addError(self, test, err, capt=None): """Add error output to Xunit report. """ taken = self._timeTaken() if issubclass(err[0], SkipTest): type = 'skipped' self.stats['skipped'] += 1 else: type = 'error' self.stats['errors'] += 1 tb = ''.join(traceback.format_exception(*err)) id = test.id()
python
{ "resource": "" }
q280445
Xunit.addFailure
test
def addFailure(self, test, err, capt=None, tb_info=None): """Add failure output to Xunit report. """ taken = self._timeTaken() tb = ''.join(traceback.format_exception(*err)) self.stats['failures'] += 1 id = test.id() self.errorlist.append( '<testcase classname=%(cls)s name=%(name)s time="%(taken).3f">' '<failure type=%(errtype)s message=%(message)s><![CDATA[%(tb)s]]>' '</failure></testcase>' %
python
{ "resource": "" }
q280446
Xunit.addSuccess
test
def addSuccess(self, test, capt=None): """Add success output to Xunit report. """ taken = self._timeTaken() self.stats['passes'] += 1 id = test.id() self.errorlist.append( '<testcase classname=%(cls)s name=%(name)s '
python
{ "resource": "" }
q280447
twobin
test
def twobin(loads): """Pick two at random, use the LRU of the two. The content of loads is ignored. Assumes LRU ordering of loads, with oldest first. """
python
{ "resource": "" }
q280448
weighted
test
def weighted(loads): """Pick two at random using inverse load as weight. Return the less loaded of the two. """ # weight 0 a million times more than 1: weights = 1./(1e-6+numpy.array(loads)) sums = weights.cumsum() t = sums[-1] x = random()*t y = random()*t idx = 0
python
{ "resource": "" }
q280449
TaskScheduler._register_engine
test
def _register_engine(self, uid): """New engine with ident `uid` became available.""" # head of the line: self.targets.insert(0,uid) self.loads.insert(0,0) # initialize sets self.completed[uid] = set()
python
{ "resource": "" }
q280450
TaskScheduler._unregister_engine
test
def _unregister_engine(self, uid): """Existing engine with ident `uid` became unavailable.""" if len(self.targets) == 1: # this was our only engine pass # handle any potentially finished tasks: self.engine_stream.flush() # don't pop destinations, because they might be used later # map(self.destinations.pop, self.completed.pop(uid)) # map(self.destinations.pop, self.failed.pop(uid)) # prevent this engine from receiving work idx = self.targets.index(uid) self.targets.pop(idx) self.loads.pop(idx) # wait 5 seconds before cleaning up pending
python
{ "resource": "" }
q280451
TaskScheduler.handle_stranded_tasks
test
def handle_stranded_tasks(self, engine): """Deal with jobs resident in an engine that died.""" lost = self.pending[engine] for msg_id in lost.keys(): if msg_id not in self.pending[engine]: # prevent double-handling of messages continue raw_msg = lost[msg_id].raw_msg idents,msg = self.session.feed_identities(raw_msg, copy=False) parent = self.session.unpack(msg[1].bytes) idents = [engine, idents[0]] # build fake error reply try: raise error.EngineError("Engine %r died while running task %r"%(engine, msg_id)) except: content = error.wrap_exception() # build fake header header = dict( status='error',
python
{ "resource": "" }
q280452
TaskScheduler.dispatch_submission
test
def dispatch_submission(self, raw_msg): """Dispatch job submission to appropriate handlers.""" # ensure targets up to date: self.notifier_stream.flush() try: idents, msg = self.session.feed_identities(raw_msg, copy=False) msg = self.session.unserialize(msg, content=False, copy=False) except Exception: self.log.error("task::Invaid task msg: %r"%raw_msg, exc_info=True) return # send to monitor self.mon_stream.send_multipart([b'intask']+raw_msg, copy=False) header = msg['header'] msg_id = header['msg_id'] self.all_ids.add(msg_id) # get targets as a set of bytes objects # from a list of unicode objects targets = header.get('targets', []) targets = map(cast_bytes, targets) targets = set(targets) retries = header.get('retries', 0) self.retries[msg_id] = retries # time dependencies after = header.get('after', None) if after: after = Dependency(after) if after.all: if after.success: after = Dependency(after.difference(self.all_completed), success=after.success, failure=after.failure, all=after.all, ) if after.failure: after = Dependency(after.difference(self.all_failed), success=after.success, failure=after.failure, all=after.all, ) if after.check(self.all_completed, self.all_failed): # recast as empty set, if `after` already met, # to prevent unnecessary set comparisons after = MET else: after = MET # location dependencies
python
{ "resource": "" }
q280453
TaskScheduler.audit_timeouts
test
def audit_timeouts(self): """Audit all waiting tasks for expired timeouts.""" now = datetime.now() for msg_id in self.depending.keys(): # must recheck, in case one failure cascaded to another: if msg_id in self.depending:
python
{ "resource": "" }
q280454
TaskScheduler.fail_unreachable
test
def fail_unreachable(self, msg_id, why=error.ImpossibleDependency): """a task has become unreachable, send a reply with an ImpossibleDependency error.""" if msg_id not in self.depending: self.log.error("msg %r already failed!", msg_id) return job = self.depending.pop(msg_id) for mid in job.dependents: if mid in self.graph: self.graph[mid].remove(msg_id) try: raise why() except:
python
{ "resource": "" }
q280455
TaskScheduler.maybe_run
test
def maybe_run(self, job): """check location dependencies, and run if they are met.""" msg_id = job.msg_id self.log.debug("Attempting to assign task %s", msg_id) if not self.targets: # no engines, definitely can't run return False if job.follow or job.targets or job.blacklist or self.hwm: # we need a can_run filter def can_run(idx): # check hwm if self.hwm and self.loads[idx] == self.hwm: return False target = self.targets[idx] # check blacklist if target in job.blacklist: return False # check targets if job.targets and target not in job.targets: return False # check follow return job.follow.check(self.completed[target], self.failed[target]) indices = filter(can_run, range(len(self.targets))) if not indices: # couldn't run if job.follow.all: # check follow for impossibility dests = set() relevant = set() if job.follow.success: relevant = self.all_completed if job.follow.failure: relevant = relevant.union(self.all_failed) for m in job.follow.intersection(relevant): dests.add(self.destinations[m])
python
{ "resource": "" }
q280456
TaskScheduler.save_unmet
test
def save_unmet(self, job): """Save a message for later submission when its dependencies are met.""" msg_id = job.msg_id self.depending[msg_id] = job # track the ids in follow or after, but not those already finished
python
{ "resource": "" }
q280457
TaskScheduler.submit_task
test
def submit_task(self, job, indices=None): """Submit a task to any of a subset of our targets.""" if indices: loads = [self.loads[i] for i in indices] else: loads = self.loads idx = self.scheme(loads) if indices: idx = indices[idx] target = self.targets[idx] # print (target, map(str, msg[:3]))
python
{ "resource": "" }
q280458
TaskScheduler.dispatch_result
test
def dispatch_result(self, raw_msg): """dispatch method for result replies""" try: idents,msg = self.session.feed_identities(raw_msg, copy=False) msg = self.session.unserialize(msg, content=False, copy=False) engine = idents[0] try: idx = self.targets.index(engine) except ValueError: pass # skip load-update for dead engines else: self.finish_job(idx) except Exception: self.log.error("task::Invaid result: %r", raw_msg, exc_info=True) return header = msg['header'] parent = msg['parent_header'] if header.get('dependencies_met', True): success = (header['status'] == 'ok') msg_id = parent['msg_id'] retries = self.retries[msg_id] if not success and retries > 0: # failed
python
{ "resource": "" }
q280459
TaskScheduler.handle_result
test
def handle_result(self, idents, parent, raw_msg, success=True): """handle a real task result, either success or failure""" # first, relay result to client engine = idents[0] client = idents[1] # swap_ids for ROUTER-ROUTER mirror raw_msg[:2] = [client,engine] # print (map(str, raw_msg[:4]))
python
{ "resource": "" }
q280460
TaskScheduler.handle_unmet_dependency
test
def handle_unmet_dependency(self, idents, parent): """handle an unmet dependency""" engine = idents[0] msg_id = parent['msg_id'] job = self.pending[engine].pop(msg_id) job.blacklist.add(engine) if job.blacklist == job.targets: self.depending[msg_id] = job self.fail_unreachable(msg_id) elif not self.maybe_run(job): # resubmit failed if msg_id not in self.all_failed: # put it back in our dependency tree
python
{ "resource": "" }
q280461
TaskScheduler.update_graph
test
def update_graph(self, dep_id=None, success=True): """dep_id just finished. Update our dependency graph and submit any jobs that just became runable. Called with dep_id=None to update entire graph for hwm, but without finishing a task. """ # print ("\n\n***********") # pprint (dep_id) # pprint (self.graph) # pprint (self.depending) # pprint (self.all_completed) # pprint (self.all_failed) # print ("\n\n***********\n\n") # update any jobs that depended on the dependency jobs = self.graph.pop(dep_id, []) # recheck *all* jobs if
python
{ "resource": "" }
q280462
Logger.logstart
test
def logstart(self, logfname=None, loghead=None, logmode=None, log_output=False, timestamp=False, log_raw_input=False): """Generate a new log-file with a default header. Raises RuntimeError if the log has already been started""" if self.logfile is not None: raise RuntimeError('Log file is already active: %s' % self.logfname) # The parameters can override constructor defaults if logfname is not None: self.logfname = logfname if loghead is not None: self.loghead = loghead if logmode is not None: self.logmode = logmode # Parameters not part of the constructor self.timestamp = timestamp self.log_output = log_output self.log_raw_input = log_raw_input # init depending on the log mode requested isfile = os.path.isfile logmode = self.logmode if logmode == 'append': self.logfile = io.open(self.logfname, 'a', encoding='utf-8') elif logmode == 'backup':
python
{ "resource": "" }
q280463
Logger.logstate
test
def logstate(self): """Print a status message about the logger.""" if self.logfile is None: print 'Logging has not been activated.' else: state = self.log_active and 'active' or 'temporarily suspended'
python
{ "resource": "" }
q280464
Logger.log
test
def log(self, line_mod, line_ori): """Write the sources to a log. Inputs: - line_mod: possibly modified input, such as the transformations made by input prefilters or input handlers of various kinds. This should always be valid Python. - line_ori: unmodified input line from the user. This is not necessarily valid Python.
python
{ "resource": "" }
q280465
Logger.log_write
test
def log_write(self, data, kind='input'): """Write data to the log file, if active""" #print 'data: %r' % data # dbg if self.log_active and data: write = self.logfile.write if kind=='input': if self.timestamp: write(str_to_unicode(time.strftime('# %a, %d %b %Y %H:%M:%S\n', time.localtime())))
python
{ "resource": "" }
q280466
Logger.logstop
test
def logstop(self): """Fully stop logging and close log file. In order to start logging again, a new logstart() call needs to be made, possibly (though not necessarily) with a new filename, mode and other options.""" if self.logfile is not None:
python
{ "resource": "" }
q280467
new_worksheet
test
def new_worksheet(name=None, cells=None): """Create a worksheet by name with with a list of cells.""" ws = NotebookNode() if name is not None:
python
{ "resource": "" }
q280468
StrDispatch.add_s
test
def add_s(self, s, obj, priority= 0 ): """ Adds a target 'string' for dispatching """
python
{ "resource": "" }
q280469
StrDispatch.add_re
test
def add_re(self, regex, obj, priority= 0 ): """ Adds a target regexp for dispatching """
python
{ "resource": "" }
q280470
StrDispatch.dispatch
test
def dispatch(self, key): """ Get a seq of Commandchain objects that match key """ if key in self.strs: yield self.strs[key] for r, obj in self.regexs.items(): if re.match(r, key):
python
{ "resource": "" }
q280471
StrDispatch.flat_matches
test
def flat_matches(self, key): """ Yield all 'value' targets, without priority """ for val in self.dispatch(key):
python
{ "resource": "" }
q280472
NotebookManager._notebook_dir_changed
test
def _notebook_dir_changed(self, name, old, new): """do a bit of validation of the notebook dir""" if os.path.exists(new) and not os.path.isdir(new): raise TraitError("notebook dir %r
python
{ "resource": "" }
q280473
NotebookManager.list_notebooks
test
def list_notebooks(self): """List all notebooks in the notebook dir. This returns a list of dicts of the form:: dict(notebook_id=notebook,name=name) """ names = glob.glob(os.path.join(self.notebook_dir, '*' + self.filename_ext)) names = [os.path.splitext(os.path.basename(name))[0] for name in names] data = [] for name in names: if name not in self.rev_mapping:
python
{ "resource": "" }
q280474
NotebookManager.new_notebook_id
test
def new_notebook_id(self, name): """Generate a new notebook_id for a name and store its mappings.""" # TODO: the following will give stable urls for notebooks, but unless # the notebooks are immediately redirected to their new urls when their # filemname changes, nasty inconsistencies result. So for now it's
python
{ "resource": "" }
q280475
NotebookManager.delete_notebook_id
test
def delete_notebook_id(self, notebook_id): """Delete a notebook's id only. This doesn't delete the
python
{ "resource": "" }
q280476
NotebookManager.notebook_exists
test
def notebook_exists(self, notebook_id): """Does a notebook exist?""" if notebook_id not in self.mapping: return False
python
{ "resource": "" }
q280477
NotebookManager.find_path
test
def find_path(self, notebook_id): """Return a full path to a notebook given its notebook_id.""" try: name = self.mapping[notebook_id] except KeyError:
python
{ "resource": "" }
q280478
NotebookManager.get_path_by_name
test
def get_path_by_name(self, name): """Return a full path to a notebook given its name.""" filename = name + self.filename_ext
python
{ "resource": "" }
q280479
NotebookManager.get_notebook
test
def get_notebook(self, notebook_id, format=u'json'): """Get the representation of a notebook in format by notebook_id.""" format = unicode(format) if format not in self.allowed_formats: raise web.HTTPError(415, u'Invalid notebook format: %s' % format) last_modified, nb = self.get_notebook_object(notebook_id) kwargs = {} if format == 'json': # don't split lines for sending over the wire, because it # should match
python
{ "resource": "" }
q280480
NotebookManager.get_notebook_object
test
def get_notebook_object(self, notebook_id): """Get the NotebookNode representation of a notebook by notebook_id.""" path = self.find_path(notebook_id) if not os.path.isfile(path): raise web.HTTPError(404, u'Notebook does not exist: %s' % notebook_id) info = os.stat(path) last_modified = datetime.datetime.utcfromtimestamp(info.st_mtime) with open(path,'r') as f: s = f.read() try: # v1 and v2 and json in the .ipynb files.
python
{ "resource": "" }
q280481
NotebookManager.save_new_notebook
test
def save_new_notebook(self, data, name=None, format=u'json'): """Save a new notebook and return its notebook_id. If a name is passed in, it overrides any values in the notebook data and the value in the data is updated to use that value. """ if format not in self.allowed_formats: raise web.HTTPError(415, u'Invalid notebook format: %s' % format) try: nb = current.reads(data.decode('utf-8'), format) except: raise web.HTTPError(400, u'Invalid JSON data') if name is None:
python
{ "resource": "" }
q280482
NotebookManager.save_notebook
test
def save_notebook(self, notebook_id, data, name=None, format=u'json'): """Save an existing notebook by notebook_id.""" if format not in self.allowed_formats: raise web.HTTPError(415, u'Invalid notebook format: %s' % format) try: nb = current.reads(data.decode('utf-8'), format)
python
{ "resource": "" }
q280483
NotebookManager.save_notebook_object
test
def save_notebook_object(self, notebook_id, nb): """Save an existing notebook object by notebook_id.""" if notebook_id not in self.mapping: raise web.HTTPError(404, u'Notebook does not exist: %s' % notebook_id) old_name = self.mapping[notebook_id] try: new_name = nb.metadata.name except AttributeError: raise web.HTTPError(400, u'Missing notebook name') path = self.get_path_by_name(new_name) try: with open(path,'w') as f: current.write(nb, f, u'json') except Exception as e: raise web.HTTPError(400, u'Unexpected error while saving notebook: %s' % e) # save .py script as well if self.save_script: pypath = os.path.splitext(path)[0] + '.py' try: with io.open(pypath,'w', encoding='utf-8') as f: current.write(nb, f, u'py') except Exception as e: raise web.HTTPError(400, u'Unexpected error while saving notebook as
python
{ "resource": "" }
q280484
NotebookManager.delete_notebook
test
def delete_notebook(self, notebook_id): """Delete notebook by notebook_id.""" path = self.find_path(notebook_id) if not os.path.isfile(path):
python
{ "resource": "" }
q280485
NotebookManager.new_notebook
test
def new_notebook(self): """Create a new notebook and return its notebook_id.""" path, name = self.increment_filename('Untitled') notebook_id = self.new_notebook_id(name) metadata = current.new_metadata(name=name)
python
{ "resource": "" }
q280486
NotebookManager.copy_notebook
test
def copy_notebook(self, notebook_id): """Copy an existing notebook and return its notebook_id.""" last_mod, nb = self.get_notebook_object(notebook_id) name = nb.metadata.name + '-Copy' path, name = self.increment_filename(name) nb.metadata.name = name
python
{ "resource": "" }
q280487
phys_tokens
test
def phys_tokens(toks): """Return all physical tokens, even line continuations. tokenize.generate_tokens() doesn't return a token for the backslash that continues lines. This wrapper provides those tokens so that we can re-create a faithful representation of the original source. Returns the same values as generate_tokens() """ last_line = None last_lineno = -1 last_ttype = None for ttype, ttext, (slineno, scol), (elineno, ecol), ltext in toks: if last_lineno != elineno: if last_line and last_line.endswith("\\\n"): # We are at the beginning of a new line, and the last line # ended with a backslash. We probably have to inject a # backslash token into the stream. Unfortunately, there's more # to figure out. This code:: # # usage = """\ # HEY THERE # """ # # triggers this condition, but the token text is:: # # '"""\\\nHEY THERE\n"""' # # so we need to figure out if the backslash is already in the # string token or not. inject_backslash = True if last_ttype == tokenize.COMMENT: # Comments like this \ # should never result in a new token. inject_backslash = False elif ttype == token.STRING:
python
{ "resource": "" }
q280488
source_token_lines
test
def source_token_lines(source): """Generate a series of lines, one for each line in `source`. Each line is a list of pairs, each pair is a token:: [('key', 'def'), ('ws', ' '), ('nam', 'hello'), ('op', '('), ... ] Each pair has a token class, and the token text. If you concatenate all the token texts, and then join them with newlines, you should have your original `source` back, with two differences: trailing whitespace is not preserved, and a final line with no newline is indistinguishable from a final line with a newline. """ ws_tokens = set([token.INDENT, token.DEDENT, token.NEWLINE, tokenize.NL]) line = [] col = 0 source = source.expandtabs(8).replace('\r\n', '\n') tokgen = generate_tokens(source) for ttype, ttext, (_, scol), (_, ecol), _ in phys_tokens(tokgen): mark_start = True for part in re.split('(\n)', ttext): if part == '\n': yield line line = [] col = 0 mark_end = False
python
{ "resource": "" }
q280489
load_default_config
test
def load_default_config(ipython_dir=None): """Load the default config file from the default ipython_dir. This is useful for embedded shells. """ if ipython_dir is None: ipython_dir =
python
{ "resource": "" }
q280490
TerminalIPythonApp._classes_default
test
def _classes_default(self): """This has to be in a method, for TerminalIPythonApp to be available.""" return [ InteractiveShellApp, # ShellApp comes before TerminalApp, because self.__class__, # it will also affect subclasses (e.g. QtConsole)
python
{ "resource": "" }
q280491
TerminalIPythonApp.parse_command_line
test
def parse_command_line(self, argv=None): """override to allow old '-pylab' flag with deprecation warning""" argv = sys.argv[1:] if argv is None else argv if '-pylab' in argv: # deprecated `-pylab` given, # warn and transform into current syntax argv = argv[:] # copy, don't clobber idx = argv.index('-pylab') warn.warn("`-pylab` flag has been deprecated.\n" " Use `--pylab` instead, or `--pylab=foo` to specify a backend.") sub = '--pylab' if len(argv) > idx+1:
python
{ "resource": "" }
q280492
TerminalIPythonApp.initialize
test
def initialize(self, argv=None): """Do actions after construct, but before starting the app.""" super(TerminalIPythonApp, self).initialize(argv) if self.subapp is not None: # don't bother initializing further, starting subapp return if not self.ignore_old_config:
python
{ "resource": "" }
q280493
TerminalIPythonApp.init_shell
test
def init_shell(self): """initialize the InteractiveShell instance""" # Create an InteractiveShell instance. # shell.display_banner should always be False for the terminal # based app, because we call shell.show_banner() by hand
python
{ "resource": "" }
q280494
TerminalIPythonApp.init_banner
test
def init_banner(self): """optionally display the banner""" if self.display_banner and self.interact: self.shell.show_banner()
python
{ "resource": "" }
q280495
repr_type
test
def repr_type(obj): """ Return a string representation of a value and its type for readable error messages. """ the_type = type(obj) if (not py3compat.PY3) and the_type is InstanceType:
python
{ "resource": "" }
q280496
parse_notifier_name
test
def parse_notifier_name(name): """Convert the name argument to a list of names. Examples -------- >>> parse_notifier_name('a') ['a'] >>> parse_notifier_name(['a','b']) ['a', 'b'] >>> parse_notifier_name(None) ['anytrait'] """ if isinstance(name, str): return [name]
python
{ "resource": "" }
q280497
TraitType.set_default_value
test
def set_default_value(self, obj): """Set the default value on a per instance basis. This method is called by :meth:`instance_init` to create and validate the default value. The creation and validation of default values must be delayed until the parent :class:`HasTraits` class has been instantiated. """ # Check for a deferred initializer defined in the same class as the # trait declaration or above. mro = type(obj).mro() meth_name = '_%s_default' % self.name for cls in mro[:mro.index(self.this_class)+1]: if meth_name in cls.__dict__:
python
{ "resource": "" }
q280498
HasTraits.on_trait_change
test
def on_trait_change(self, handler, name=None, remove=False): """Setup a handler to be called when a trait changes. This is used to setup dynamic notifications of trait changes. Static handlers can be created by creating methods on a HasTraits subclass with the naming convention '_[traitname]_changed'. Thus, to create static handler for the trait 'a', create the method _a_changed(self, name, old, new) (fewer arguments can be used, see below). Parameters ---------- handler : callable
python
{ "resource": "" }
q280499
HasTraits.class_traits
test
def class_traits(cls, **metadata): """Get a list of all the traits of this class. This method is just like the :meth:`traits` method, but is unbound. The TraitTypes returned don't know anything about the values that the various HasTrait's instances are holding. This follows the same algorithm as traits does and does not allow for any simple way of specifying merely that a metadata name exists, but has any value. This is because get_metadata returns None if a metadata key doesn't exist. """ traits = dict([memb for memb in getmembers(cls) if \ isinstance(memb[1], TraitType)])
python
{ "resource": "" }