_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
31
13.1k
language
stringclasses
1 value
meta_information
dict
q279900
StdInSocketChannel.input
test
def input(self, string): """Send a string of raw input to the kernel.""" content = dict(value=string)
python
{ "resource": "" }
q279901
KernelManager.start_channels
test
def start_channels(self, shell=True, sub=True, stdin=True, hb=True): """Starts the channels for this kernel. This will create the channels if they do not exist and then start them. If port numbers of 0 are being used (random ports) then you must first call :method:`start_kernel`. If the channels have been stopped and you call this, :class:`RuntimeError` will be raised. """ if shell: self.shell_channel.start() if sub:
python
{ "resource": "" }
q279902
KernelManager.stop_channels
test
def stop_channels(self): """Stops all the running channels for this kernel. """ if self.shell_channel.is_alive(): self.shell_channel.stop()
python
{ "resource": "" }
q279903
KernelManager.channels_running
test
def channels_running(self): """Are any of the channels created and running?""" return
python
{ "resource": "" }
q279904
KernelManager.load_connection_file
test
def load_connection_file(self): """load connection info from JSON dict in self.connection_file""" with open(self.connection_file) as f: cfg = json.loads(f.read()) self.ip = cfg['ip'] self.shell_port = cfg['shell_port']
python
{ "resource": "" }
q279905
KernelManager.write_connection_file
test
def write_connection_file(self): """write connection info to JSON dict in self.connection_file""" if self._connection_file_written: return self.connection_file,cfg = write_connection_file(self.connection_file, ip=self.ip, key=self.session.key, stdin_port=self.stdin_port, iopub_port=self.iopub_port, shell_port=self.shell_port, hb_port=self.hb_port)
python
{ "resource": "" }
q279906
KernelManager.start_kernel
test
def start_kernel(self, **kw): """Starts a kernel process and configures the manager to use it. If random ports (port=0) are being used, this method must be called before the channels are created. Parameters: ----------- launcher : callable, optional (default None) A custom function for launching the kernel process (generally a wrapper around ``entry_point.base_launch_kernel``). In most cases, it should not be necessary to use this parameter. **kw : optional See respective options for IPython and Python kernels. """ if self.ip not in LOCAL_IPS: raise RuntimeError("Can only launch a kernel on a local interface. " "Make sure that the '*_address' attributes are "
python
{ "resource": "" }
q279907
KernelManager.shutdown_kernel
test
def shutdown_kernel(self, restart=False): """ Attempts to the stop the kernel process cleanly. If the kernel cannot be stopped, it is killed, if possible. """ # FIXME: Shutdown does not work on Windows due to ZMQ errors! if sys.platform == 'win32': self.kill_kernel() return # Pause the heart beat channel if it exists. if self._hb_channel is not None: self._hb_channel.pause() # Don't send any additional kernel kill messages immediately, to give # the kernel a chance to properly execute shutdown actions. Wait for at # most 1s, checking every 0.1s. self.shell_channel.shutdown(restart=restart) for i in range(10): if self.is_alive: time.sleep(0.1) else: break else: # OK, we've
python
{ "resource": "" }
q279908
KernelManager.restart_kernel
test
def restart_kernel(self, now=False, **kw): """Restarts a kernel with the arguments that were used to launch it. If the old kernel was launched with random ports, the same ports will be used for the new kernel. Parameters ---------- now : bool, optional If True, the kernel is forcefully restarted *immediately*, without having a chance to do any cleanup action. Otherwise the kernel is given 1s to clean up before a forceful restart is issued. In all cases the kernel is restarted, the only difference is whether it is given a chance to perform a clean shutdown or not. **kw : optional Any options specified here will replace those used to launch the kernel. """ if self._launch_args is None: raise RuntimeError("Cannot restart the kernel. " "No previous call to 'start_kernel'.")
python
{ "resource": "" }
q279909
KernelManager.kill_kernel
test
def kill_kernel(self): """ Kill the running kernel. """ if self.has_kernel: # Pause the heart beat channel if it exists. if self._hb_channel is not None: self._hb_channel.pause() # Attempt to kill the kernel. try: self.kernel.kill() except OSError, e: # In Windows, we will get an Access Denied error if the process
python
{ "resource": "" }
q279910
KernelManager.interrupt_kernel
test
def interrupt_kernel(self): """ Interrupts the kernel. Unlike ``signal_kernel``, this operation is well supported on all platforms. """ if self.has_kernel: if sys.platform == 'win32': from parentpoller import ParentPollerWindows as Poller
python
{ "resource": "" }
q279911
KernelManager.signal_kernel
test
def signal_kernel(self, signum): """ Sends a signal to the kernel. Note that since only SIGTERM is supported on Windows, this function is only useful on Unix systems. """ if self.has_kernel:
python
{ "resource": "" }
q279912
KernelManager.is_alive
test
def is_alive(self): """Is the kernel process still running?""" if self.has_kernel: if self.kernel.poll() is None: return True else: return False elif self._hb_channel is not None: # We didn't start the kernel with this KernelManager so we
python
{ "resource": "" }
q279913
KernelManager.shell_channel
test
def shell_channel(self): """Get the REQ socket channel object to make requests of the kernel.""" if self._shell_channel is None: self._shell_channel = self.shell_channel_class(self.context,
python
{ "resource": "" }
q279914
KernelManager.sub_channel
test
def sub_channel(self): """Get the SUB socket channel object.""" if self._sub_channel is None: self._sub_channel = self.sub_channel_class(self.context,
python
{ "resource": "" }
q279915
KernelManager.hb_channel
test
def hb_channel(self): """Get the heartbeat socket channel object to check that the kernel is alive.""" if self._hb_channel is None:
python
{ "resource": "" }
q279916
bind_kernel
test
def bind_kernel(**kwargs): """Bind an Engine's Kernel to be used as a full IPython kernel. This allows a running Engine to be used simultaneously as a full IPython kernel with the QtConsole or other frontends. This function returns immediately. """ from IPython.zmq.ipkernel import IPKernelApp from IPython.parallel.apps.ipengineapp import IPEngineApp
python
{ "resource": "" }
q279917
ExtensionDebugger.debug
test
def debug(self, level, message): """ Emit a debugging message depending on the debugging level. :param level: The debugging level. :param
python
{ "resource": "" }
q279918
ExtensionSet._get_extension_classes
test
def _get_extension_classes(cls): """ Retrieve the extension classes in priority order. :returns: A list of extension classes, in proper priority order. """ if cls._extension_classes is None: exts = {} # Iterate over the entrypoints for ext in entry.points[NAMESPACE_EXTENSIONS]: exts.setdefault(ext.priority, [])
python
{ "resource": "" }
q279919
ExtensionSet.pre_step
test
def pre_step(self, ctxt, step, idx): """ Called prior to executing a step. :param ctxt: An instance of ``timid.context.Context``. :param step: An instance of ``timid.steps.Step`` describing the step to be executed. :param idx: The index of the step in the list of steps. :returns: A ``True`` value if the step is to be skipped, ``False`` otherwise. """ debugger = ExtensionDebugger('pre_step')
python
{ "resource": "" }
q279920
ExtensionSet.post_step
test
def post_step(self, ctxt, step, idx, result): """ Called after executing a step. :param ctxt: An instance of ``timid.context.Context``. :param step: An instance of ``timid.steps.Step`` describing the step that was executed. :param idx: The index of the step in the list of steps. :param result: An instance of ``timid.steps.StepResult`` describing the result of executing the step. May be altered by the extension, e.g., to set
python
{ "resource": "" }
q279921
ExtensionSet.finalize
test
def finalize(self, ctxt, result): """ Called at the end of processing. This call allows extensions to emit any additional data, such as timing information, prior to ``timid``'s exit. Extensions may also alter the return value. :param ctxt: An instance of ``timid.context.Context``. :param result: The return value of the basic ``timid`` call, or an ``Exception`` instance if an exception was raised. Without the extension, this would
python
{ "resource": "" }
q279922
walk_egg
test
def walk_egg(egg_dir): """Walk an unpacked egg's contents, skipping the metadata directory""" walker = os.walk(egg_dir) base,dirs,files = walker.next() if 'EGG-INFO' in dirs:
python
{ "resource": "" }
q279923
scan_module
test
def scan_module(egg_dir, base, name, stubs): """Check whether module possibly uses unsafe-for-zipfile stuff""" filename = os.path.join(base,name) if filename[:-1] in stubs: return True # Extension module pkg = base[len(egg_dir)+1:].replace(os.sep,'.') module = pkg+(pkg and '.' or '')+os.path.splitext(name)[0] if sys.version_info < (3, 3): skip = 8 # skip magic & date else: skip = 12 # skip magic & date & file size f = open(filename,'rb'); f.read(skip) code = marshal.load(f); f.close() safe = True symbols = dict.fromkeys(iter_symbols(code)) for bad in ['__file__', '__path__']: if bad in symbols: log.warn("%s: module references %s", module, bad) safe = False if 'inspect' in symbols:
python
{ "resource": "" }
q279924
launch_new_instance
test
def launch_new_instance(): """Create and run the IPython controller""" if sys.platform == 'win32': # make sure we don't get called from a multiprocessing subprocess # this can result in infinite Controllers being started on Windows # which doesn't have a proper fork, so multiprocessing is wonky # this only comes up when IPython has been installed using vanilla # setuptools, and *not* distribute. import multiprocessing p = multiprocessing.current_process() # the main process has name 'MainProcess'
python
{ "resource": "" }
q279925
IPControllerApp.save_connection_dict
test
def save_connection_dict(self, fname, cdict): """save a connection dict to json file.""" c = self.config url = cdict['url'] location = cdict['location'] if not location: try: proto,ip,port = split_url(url) except AssertionError: pass else: try: location = socket.gethostbyname_ex(socket.gethostname())[2][-1] except (socket.gaierror, IndexError): self.log.warn("Could not identify this machine's IP, assuming 127.0.0.1." " You may need to specify
python
{ "resource": "" }
q279926
IPControllerApp.load_config_from_json
test
def load_config_from_json(self): """load config from existing json connector files.""" c = self.config self.log.debug("loading config from JSON") # load from engine config fname = os.path.join(self.profile_dir.security_dir, self.engine_json_file) self.log.info("loading connection info from %s", fname) with open(fname) as f: cfg = json.loads(f.read()) key = cfg['exec_key'] # json gives unicode, Session.key wants bytes c.Session.key = key.encode('ascii') xport,addr = cfg['url'].split('://') c.HubFactory.engine_transport = xport ip,ports = addr.split(':') c.HubFactory.engine_ip = ip c.HubFactory.regport = int(ports) self.location = cfg['location'] if not self.engine_ssh_server: self.engine_ssh_server = cfg['ssh']
python
{ "resource": "" }
q279927
IPControllerApp.load_secondary_config
test
def load_secondary_config(self): """secondary config, loading from JSON and setting defaults""" if self.reuse_files: try: self.load_config_from_json() except (AssertionError,IOError) as e: self.log.error("Could not load config from JSON: %s" % e) else: # successfully loaded config from JSON, and reuse=True # no need to wite back the same file
python
{ "resource": "" }
q279928
ParallelMagics.parallel_execute
test
def parallel_execute(self, cell, block=None, groupby='type', save_name=None): """implementation used by %px and %%parallel""" # defaults: block = self.view.block if block is None else block base = "Parallel" if block else "Async parallel" targets = self.view.targets if isinstance(targets, list) and len(targets) > 10: str_targets = str(targets[:4])[:-1] + ', ..., ' + str(targets[-4:])[1:] else: str_targets = str(targets) if self.verbose: print base + " execution on engine(s): %s" % str_targets
python
{ "resource": "" }
q279929
ParallelMagics._enable_autopx
test
def _enable_autopx(self): """Enable %autopx mode by saving the original run_cell and installing pxrun_cell.
python
{ "resource": "" }
q279930
ParallelMagics._disable_autopx
test
def _disable_autopx(self): """Disable %autopx by restoring the original InteractiveShell.run_cell.
python
{ "resource": "" }
q279931
ParallelMagics.pxrun_cell
test
def pxrun_cell(self, raw_cell, store_history=False, silent=False): """drop-in replacement for InteractiveShell.run_cell. This executes code remotely, instead of in the local namespace. See InteractiveShell.run_cell for details. """ if (not raw_cell) or raw_cell.isspace(): return ipself = self.shell with ipself.builtin_trap: cell = ipself.prefilter_manager.prefilter_lines(raw_cell) # Store raw and processed history if store_history: ipself.history_manager.store_inputs(ipself.execution_count, cell, raw_cell) # ipself.logger.log(cell, raw_cell) cell_name = ipself.compile.cache(cell, ipself.execution_count) try: ast.parse(cell, filename=cell_name) except (OverflowError, SyntaxError, ValueError, TypeError,
python
{ "resource": "" }
q279932
run_heartbeat
test
def run_heartbeat(message): """Internal ``CLOCK_CHANNEL`` consumer to process task runs""" then = arrow.get(message['time'])
python
{ "resource": "" }
q279933
run_task
test
def run_task(message): """Internal ``RUN_TASK`` consumer to run the task's callable""" task = Task.objects.get(pk=message['id']) if task.allow_overlap: task.run(message) else: if not task.running: task.running = True
python
{ "resource": "" }
q279934
remove_task
test
def remove_task(message): """Internal ``KILL_TASK`` consumer to remove retired tasks""" task
python
{ "resource": "" }
q279935
patch_protocol_for_agent
test
def patch_protocol_for_agent(protocol): """ Patch the protocol's makeConnection and connectionLost methods to make the protocol and its transport behave more like what `Agent` expects. While `Agent` is the driving force behind this, other clients and servers will no doubt have similar requirements. """ old_makeConnection = protocol.makeConnection old_connectionLost = protocol.connectionLost def new_makeConnection(transport): patch_transport_fake_push_producer(transport) patch_transport_abortConnection(transport, protocol) return old_makeConnection(transport)
python
{ "resource": "" }
q279936
patch_if_missing
test
def patch_if_missing(obj, name, method): """ Patch a method onto an object if it isn't already there.
python
{ "resource": "" }
q279937
FakeConnection.accept_connection
test
def accept_connection(self): """ Accept a pending connection. """ assert self.pending, "Connection is not pending." self.server_protocol = self.server.server_factory.buildProtocol(None)
python
{ "resource": "" }
q279938
FakeConnection.reject_connection
test
def reject_connection(self, reason=None): """ Reject a pending connection. """ assert self.pending, "Connection is not pending." if reason
python
{ "resource": "" }
q279939
FakeHttpServer.get_agent
test
def get_agent(self, reactor=None, contextFactory=None): """ Returns an IAgent that makes requests to this fake server. """
python
{ "resource": "" }
q279940
SaveHookMixin.form_valid
test
def form_valid(self, form): """ Calls pre and post save hooks. """ self.object = form.save(commit=False) # Invoke pre_save hook, and allow it to abort the saving # process and do a redirect. response = self.pre_save(self.object) if response:
python
{ "resource": "" }
q279941
SaveHookMixin.delete
test
def delete(self, request, *args, **kwargs): """ Calls pre and post delete hooks for DelteViews. """ self.object = self.get_object() success_url = self.get_success_url()
python
{ "resource": "" }
q279942
UserViewMixin.pre_save
test
def pre_save(self, instance): super(UserViewMixin, self).pre_save(instance) """ Use SaveHookMixin pre_save to set
python
{ "resource": "" }
q279943
SummaryReporter.report
test
def report(self, morfs, outfile=None): """Writes a report summarizing coverage statistics per module. `outfile` is a file object to write the summary to. """ self.find_code_units(morfs) # Prepare the formatting strings max_name = max([len(cu.name) for cu in self.code_units] + [5]) fmt_name = "%%- %ds " % max_name fmt_err = "%s %s: %s\n" header = (fmt_name % "Name") + " Stmts Miss" fmt_coverage = fmt_name + "%6d %6d" if self.branches: header += " Branch BrMiss" fmt_coverage += " %6d %6d" width100 = Numbers.pc_str_width() header += "%*s" % (width100+4, "Cover") fmt_coverage += "%%%ds%%%%" % (width100+3,) if self.config.show_missing: header += " Missing" fmt_coverage += " %s" rule = "-" * len(header) + "\n" header += "\n" fmt_coverage += "\n" if not outfile: outfile = sys.stdout # Write the header outfile.write(header) outfile.write(rule) total = Numbers() for cu in self.code_units: try: analysis = self.coverage._analyze(cu) nums = analysis.numbers args = (cu.name, nums.n_statements, nums.n_missing) if self.branches: args += (nums.n_branches, nums.n_missing_branches) args += (nums.pc_covered_str,) if self.config.show_missing: args += (analysis.missing_formatted(),) outfile.write(fmt_coverage % args) total += nums except KeyboardInterrupt: # pragma: not covered
python
{ "resource": "" }
q279944
ModuleReloader.check
test
def check(self, check_all=False): """Check whether some modules need to be reloaded.""" if not self.enabled and not check_all: return if check_all or self.check_all: modules = sys.modules.keys() else: modules = self.modules.keys() for modname in modules: m = sys.modules.get(modname, None) if modname in self.skip_modules: continue if not hasattr(m, '__file__'): continue if m.__name__ == '__main__': # we cannot reload(__main__) continue filename = m.__file__ path, ext = os.path.splitext(filename) if ext.lower() == '.py': ext = PY_COMPILED_EXT pyc_filename = pyfile.cache_from_source(filename) py_filename = filename else: pyc_filename = filename try: py_filename = pyfile.source_from_cache(filename)
python
{ "resource": "" }
q279945
editor
test
def editor(self, filename, linenum=None, wait=True): """Open the default editor at the given filename and linenumber. This is IPython's default editor hook, you can use it as an example to write your own modified one. To set your own editor function as the new editor hook, call ip.set_hook('editor',yourfunc).""" # IPython configures a default editor at startup by reading $EDITOR from # the environment, and falling back on vi (unix) or notepad (win32). editor = self.editor # marker for at which line to open the file (for existing objects) if linenum is None or editor=='notepad': linemark = '' else: linemark = '+%d' %
python
{ "resource": "" }
q279946
fix_error_editor
test
def fix_error_editor(self,filename,linenum,column,msg): """Open the editor at the given filename, linenumber, column and show an error message. This is used for correcting syntax errors. The current implementation only has special support for the VIM editor, and falls back on the 'editor' hook if VIM is not used. Call ip.set_hook('fix_error_editor',youfunc) to use your own function, """ def vim_quickfix_file(): t = tempfile.NamedTemporaryFile() t.write('%s:%d:%d:%s\n'
python
{ "resource": "" }
q279947
clipboard_get
test
def clipboard_get(self): """ Get text from the clipboard. """ from IPython.lib.clipboard import ( osx_clipboard_get, tkinter_clipboard_get, win32_clipboard_get ) if sys.platform == 'win32': chain = [win32_clipboard_get, tkinter_clipboard_get] elif sys.platform == 'darwin': chain = [osx_clipboard_get, tkinter_clipboard_get]
python
{ "resource": "" }
q279948
CommandChainDispatcher.add
test
def add(self, func, priority=0): """ Add a func to the cmd chain with given priority """
python
{ "resource": "" }
q279949
get_metadata
test
def get_metadata(path_or_module, metadata_version=None): """ Try to create a Distribution 'path_or_module'. o 'path_or_module' may be a module object. o If a string, 'path_or_module' may point to an sdist file, a bdist file, an installed package, or a working checkout (if it contains PKG-INFO). o Return None if 'path_or_module' can't be parsed. """ if isinstance(path_or_module, ModuleType): try: return Installed(path_or_module, metadata_version) except (ValueError, IOError): #pragma NO COVER pass try: __import__(path_or_module) except ImportError: pass else: try: return Installed(path_or_module, metadata_version) except (ValueError, IOError): #pragma NO COVER pass if os.path.isfile(path_or_module): try: return SDist(path_or_module, metadata_version) except (ValueError, IOError): pass
python
{ "resource": "" }
q279950
Pdb.configure
test
def configure(self, options, conf): """Configure which kinds of exceptions trigger plugin. """ self.conf = conf self.enabled = options.debugErrors or options.debugFailures
python
{ "resource": "" }
q279951
import_item
test
def import_item(name): """Import and return bar given the string foo.bar.""" package = '.'.join(name.split('.')[0:-1]) obj = name.split('.')[-1] # Note: the original code for this was the following. We've left it # visible for now in case the new implementation shows any problems down # the road, to make it easier on anyone looking for a problem. This code # should be removed once we're comfortable we didn't break anything. ## execString = 'from %s import %s' % (package, obj) ## try: ## exec execString ## except SyntaxError: ## raise ImportError("Invalid class specification: %s" %
python
{ "resource": "" }
q279952
try_passwordless_ssh
test
def try_passwordless_ssh(server, keyfile, paramiko=None): """Attempt to make an ssh connection without a password. This is mainly used for requiring password input only once when many tunnels may be connected to the same server. If paramiko is None, the default for the platform is chosen. """ if paramiko is None:
python
{ "resource": "" }
q279953
_try_passwordless_openssh
test
def _try_passwordless_openssh(server, keyfile): """Try passwordless login with shell ssh command.""" if pexpect is None: raise ImportError("pexpect unavailable, use paramiko") cmd = 'ssh -f '+ server
python
{ "resource": "" }
q279954
_try_passwordless_paramiko
test
def _try_passwordless_paramiko(server, keyfile): """Try passwordless login with paramiko.""" if paramiko is None: msg = "Paramiko unavaliable, " if sys.platform == 'win32': msg += "Paramiko is required for ssh tunneled connections on Windows." else: msg += "use OpenSSH." raise ImportError(msg) username, server, port = _split_server(server) client = paramiko.SSHClient() client.load_system_host_keys()
python
{ "resource": "" }
q279955
tunnel_connection
test
def tunnel_connection(socket, addr, server, keyfile=None, password=None, paramiko=None, timeout=60): """Connect a socket to an address via an ssh tunnel. This is a wrapper for socket.connect(addr), when addr is not accessible from the local machine.
python
{ "resource": "" }
q279956
open_tunnel
test
def open_tunnel(addr, server, keyfile=None, password=None, paramiko=None, timeout=60): """Open a tunneled connection from a 0MQ url. For use inside tunnel_connection. Returns ------- (url, tunnel): The 0MQ url that has been forwarded, and the tunnel object """ lport = select_random_ports(1)[0] transport, addr = addr.split('://') ip,rport = addr.split(':') rport = int(rport) if paramiko is None:
python
{ "resource": "" }
q279957
Client._stop_scheduling_tasks
test
def _stop_scheduling_tasks(self): """Stop scheduling tasks because an engine has been unregistered from a pure ZMQ scheduler. """ self._task_socket.close() self._task_socket = None msg = "An engine has been unregistered, and we are using pure " +\ "ZMQ task scheduling. Task farming will be disabled."
python
{ "resource": "" }
q279958
Client._unwrap_exception
test
def _unwrap_exception(self, content): """unwrap exception, and remap engine_id to int.""" e = error.unwrap_exception(content)
python
{ "resource": "" }
q279959
Client._register_engine
test
def _register_engine(self, msg): """Register a new engine, and update our connection info.""" content = msg['content']
python
{ "resource": "" }
q279960
Client._unregister_engine
test
def _unregister_engine(self, msg): """Unregister an engine that has died.""" content = msg['content'] eid = int(content['id']) if eid in self._ids: self._ids.remove(eid) uuid = self._engines.pop(eid)
python
{ "resource": "" }
q279961
Client._handle_execute_reply
test
def _handle_execute_reply(self, msg): """Save the reply to an execute_request into our results. execute messages are never actually used. apply is used instead. """ parent = msg['parent_header'] msg_id = parent['msg_id'] if msg_id not in self.outstanding: if msg_id in self.history: print ("got stale result: %s"%msg_id) else: print ("got unknown result: %s"%msg_id) else: self.outstanding.remove(msg_id) content = msg['content'] header = msg['header'] # construct metadata: md = self.metadata[msg_id] md.update(self._extract_metadata(header, parent, content)) # is this redundant? self.metadata[msg_id] = md
python
{ "resource": "" }
q279962
Client._flush_notifications
test
def _flush_notifications(self): """Flush notifications of engine registrations waiting in ZMQ queue.""" idents,msg = self.session.recv(self._notification_socket, mode=zmq.NOBLOCK) while msg is not None: if self.debug: pprint(msg) msg_type = msg['header']['msg_type'] handler = self._notification_handlers.get(msg_type, None) if handler is None:
python
{ "resource": "" }
q279963
Client._flush_results
test
def _flush_results(self, sock): """Flush task or queue results waiting in ZMQ queue.""" idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK) while msg is not None: if self.debug: pprint(msg)
python
{ "resource": "" }
q279964
Client._flush_control
test
def _flush_control(self, sock): """Flush replies from the control channel waiting in the ZMQ queue. Currently: ignore them.""" if self._ignored_control_replies <= 0: return idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK) while
python
{ "resource": "" }
q279965
Client._flush_ignored_control
test
def _flush_ignored_control(self): """flush ignored control replies""" while self._ignored_control_replies
python
{ "resource": "" }
q279966
Client._flush_iopub
test
def _flush_iopub(self, sock): """Flush replies from the iopub channel waiting in the ZMQ queue. """ idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK) while msg is not None: if self.debug: pprint(msg) parent = msg['parent_header'] # ignore IOPub messages with no parent. # Caused by print statements or warnings from before the first execution. if not parent: continue msg_id = parent['msg_id'] content = msg['content'] header = msg['header'] msg_type = msg['header']['msg_type'] # init metadata: md = self.metadata[msg_id] if msg_type == 'stream': name = content['name'] s = md[name] or '' md[name] = s + content['data'] elif msg_type == 'pyerr': md.update({'pyerr' : self._unwrap_exception(content)})
python
{ "resource": "" }
q279967
Client._spin_every
test
def _spin_every(self, interval=1): """target func for use in spin_thread""" while True: if self._stop_spinning.is_set():
python
{ "resource": "" }
q279968
Client.stop_spin_thread
test
def stop_spin_thread(self): """stop background spin_thread, if any""" if self._spin_thread is not None: self._stop_spinning.set()
python
{ "resource": "" }
q279969
Client.spin
test
def spin(self): """Flush any registration notifications and execution results waiting in the ZMQ queue. """ if self._notification_socket: self._flush_notifications() if self._iopub_socket: self._flush_iopub(self._iopub_socket) if self._mux_socket: self._flush_results(self._mux_socket)
python
{ "resource": "" }
q279970
Client.wait
test
def wait(self, jobs=None, timeout=-1): """waits on one or more `jobs`, for up to `timeout` seconds. Parameters ---------- jobs : int, str, or list of ints and/or strs, or one or more AsyncResult objects ints are indices to self.history strs are msg_ids default: wait on all outstanding messages timeout : float a time in seconds, after which to give up. default is -1, which means no timeout Returns ------- True : when all msg_ids are done False : timeout reached, some msg_ids still outstanding """ tic = time.time() if jobs is None: theids = self.outstanding else: if isinstance(jobs, (int, basestring, AsyncResult)): jobs = [jobs] theids = set() for job in jobs: if isinstance(job, int):
python
{ "resource": "" }
q279971
Client.send_apply_request
test
def send_apply_request(self, socket, f, args=None, kwargs=None, subheader=None, track=False, ident=None): """construct and send an apply message via a socket. This is the principal method with which all engine execution is performed by views. """ if self._closed: raise RuntimeError("Client cannot be used after its sockets have been closed") # defaults: args = args if args is not None else [] kwargs = kwargs if kwargs is not None else {} subheader = subheader if subheader is not None else {} # validate arguments if not callable(f) and not isinstance(f, Reference): raise TypeError("f must be callable, not %s"%type(f))
python
{ "resource": "" }
q279972
Client.send_execute_request
test
def send_execute_request(self, socket, code, silent=True, subheader=None, ident=None): """construct and send an execute request via a socket. """ if self._closed: raise RuntimeError("Client cannot be used after its sockets have been closed") # defaults: subheader = subheader if subheader is not None else {} # validate arguments if not isinstance(code, basestring): raise TypeError("code must be text, not %s" % type(code)) if not isinstance(subheader, dict): raise TypeError("subheader must be dict, not %s" % type(subheader)) content = dict(code=code, silent=bool(silent), user_variables=[], user_expressions={}) msg = self.session.send(socket, "execute_request", content=content, ident=ident,
python
{ "resource": "" }
q279973
Client.get_result
test
def get_result(self, indices_or_msg_ids=None, block=None): """Retrieve a result by msg_id or history index, wrapped in an AsyncResult object. If the client already has the results, no request to the Hub will be made. This is a convenient way to construct AsyncResult objects, which are wrappers that include metadata about execution, and allow for awaiting results that were not submitted by this Client. It can also be a convenient way to retrieve the metadata associated with blocking execution, since it always retrieves Examples -------- :: In [10]: r = client.apply() Parameters ---------- indices_or_msg_ids : integer history index, str msg_id, or list of either The indices or msg_ids of indices to be retrieved block : bool Whether to wait for the result to be done Returns ------- AsyncResult A single AsyncResult object will always be returned. AsyncHubResult A subclass of AsyncResult that retrieves results from the Hub """ block = self.block if block is None else block
python
{ "resource": "" }
q279974
Client.queue_status
test
def queue_status(self, targets='all', verbose=False): """Fetch the status of engine queues. Parameters ---------- targets : int/str/list of ints/strs the engines whose states are to be queried. default : all verbose : bool Whether to return lengths only, or lists of ids for each element """ if targets == 'all': # allow 'all' to be evaluated on the engine engine_ids = None else: engine_ids = self._build_targets(targets)[1] content = dict(targets=engine_ids, verbose=verbose) self.session.send(self._query_socket, "queue_request", content=content)
python
{ "resource": "" }
q279975
Client.purge_results
test
def purge_results(self, jobs=[], targets=[]): """Tell the Hub to forget results. Individual results can be purged by msg_id, or the entire history of specific targets can be purged. Use `purge_results('all')` to scrub everything from the Hub's db. Parameters ---------- jobs : str or list of str or AsyncResult objects the msg_ids whose results should be forgotten. targets : int/str/list of ints/strs The targets, by int_id, whose entire history is to be purged. default : None """ if not targets and not jobs: raise ValueError("Must specify at least one of `targets` and `jobs`") if targets: targets = self._build_targets(targets)[1] # construct msg_ids from jobs if jobs == 'all': msg_ids = jobs else: msg_ids = [] if isinstance(jobs, (basestring,AsyncResult)): jobs = [jobs] bad_ids = filter(lambda obj: not isinstance(obj, (basestring, AsyncResult)), jobs)
python
{ "resource": "" }
q279976
Client.hub_history
test
def hub_history(self): """Get the Hub's history Just like the Client, the Hub has a history, which is a list of msg_ids. This will contain the history of all clients, and, depending on configuration, may contain history across multiple cluster sessions. Any msg_id returned here is a valid argument to `get_result`. Returns ------- msg_ids : list of strs list of all msg_ids, ordered by task submission time. """
python
{ "resource": "" }
q279977
Client.db_query
test
def db_query(self, query, keys=None): """Query the Hub's TaskRecord database This will return a list of task record dicts that match `query` Parameters ---------- query : mongodb query dict The search dict. See mongodb query docs for details. keys : list of strs [optional] The subset of keys to be returned. The default is to fetch everything but buffers. 'msg_id' will *always* be included. """ if isinstance(keys, basestring): keys = [keys] content = dict(query=query, keys=keys) self.session.send(self._query_socket, "db_request", content=content) idents, msg = self.session.recv(self._query_socket, 0) if self.debug: pprint(msg) content = msg['content'] if content['status'] != 'ok':
python
{ "resource": "" }
q279978
_opcode_set
test
def _opcode_set(*names): """Return a set of opcodes by the names in `names`.""" s = set()
python
{ "resource": "" }
q279979
CodeParser._get_byte_parser
test
def _get_byte_parser(self): """Create a ByteParser on demand.""" if not self._byte_parser: self._byte_parser = \
python
{ "resource": "" }
q279980
CodeParser.lines_matching
test
def lines_matching(self, *regexes): """Find the lines matching one of a list of regexes. Returns a set of line numbers, the lines that contain a match for one of the regexes in `regexes`. The entire line needn't match, just a part of it. """
python
{ "resource": "" }
q279981
CodeParser._raw_parse
test
def _raw_parse(self): """Parse the source to find the interesting facts about its lines. A handful of member fields are updated. """ # Find lines which match an exclusion pattern. if self.exclude: self.excluded = self.lines_matching(self.exclude) # Tokenize, to find excluded suites, to find docstrings, and to find # multi-line statements. indent = 0 exclude_indent = 0 excluding = False prev_toktype = token.INDENT first_line = None empty = True tokgen = generate_tokens(self.text) for toktype, ttext, (slineno, _), (elineno, _), ltext in tokgen: if self.show_tokens: # pragma: not covered print("%10s %5s %-20r %r" % ( tokenize.tok_name.get(toktype, toktype), nice_pair((slineno, elineno)), ttext, ltext )) if toktype == token.INDENT: indent += 1 elif toktype == token.DEDENT: indent -= 1 elif toktype == token.NAME and ttext == 'class': # Class definitions look like branches in the byte code, so # we need to exclude them. The simplest way is to note the # lines with the 'class' keyword. self.classdefs.add(slineno) elif toktype == token.OP and ttext == ':': if not excluding and elineno in self.excluded: # Start excluding a suite. We trigger off of the colon # token so that the #pragma comment will be recognized on # the same line as the colon. exclude_indent = indent excluding = True elif toktype == token.STRING and prev_toktype == token.INDENT: # Strings that are first on an indented line are docstrings. # (a trick from trace.py in the stdlib.) This works for # 99.9999% of cases. For the rest (!) see:
python
{ "resource": "" }
q279982
CodeParser.first_line
test
def first_line(self, line): """Return the first line number of the statement including `line`."""
python
{ "resource": "" }
q279983
CodeParser.first_lines
test
def first_lines(self, lines, *ignores): """Map the line numbers in `lines` to the correct first line of the statement. Skip any line mentioned in any of the sequences in `ignores`. Returns a set of the first lines. """ ignore = set() for ign in ignores: ignore.update(ign) lset = set() for l in lines:
python
{ "resource": "" }
q279984
CodeParser.parse_source
test
def parse_source(self): """Parse source text to find executable lines, excluded lines, etc. Return values are 1) a set of executable line numbers, and 2) a set of excluded line numbers. Reported line numbers are normalized to the first line of multi-line statements. """ try: self._raw_parse() except (tokenize.TokenError, IndentationError): _, tokerr, _ = sys.exc_info() msg, lineno = tokerr.args raise NotPython(
python
{ "resource": "" }
q279985
CodeParser.arcs
test
def arcs(self): """Get information about the arcs available in the code. Returns a sorted list of line number pairs. Line numbers have been normalized to the first line of multiline statements. """ all_arcs = [] for l1, l2 in
python
{ "resource": "" }
q279986
CodeParser.exit_counts
test
def exit_counts(self): """Get a mapping from line numbers to count of exits from that line. Excluded lines are excluded. """ excluded_lines = self.first_lines(self.excluded) exit_counts = {} for l1, l2 in self.arcs(): if l1 < 0: # Don't ever report -1 as a line number continue if l1 in excluded_lines:
python
{ "resource": "" }
q279987
ByteParser.child_parsers
test
def child_parsers(self): """Iterate over all the code objects nested within this one. The iteration includes `self` as its first value. """
python
{ "resource": "" }
q279988
ByteParser._bytes_lines
test
def _bytes_lines(self): """Map byte offsets to line numbers in `code`. Uses co_lnotab described in Python/compile.c to map byte offsets to line numbers. Produces a sequence: (b0, l0), (b1, l1), ... Only byte offsets that correspond to line numbers are included in the results. """ # Adapted from dis.py in the standard library. byte_increments = bytes_to_ints(self.code.co_lnotab[0::2])
python
{ "resource": "" }
q279989
ByteParser._find_statements
test
def _find_statements(self): """Find the statements in `self.code`. Produce a sequence of line numbers that start statements. Recurses into all code objects reachable from `self.code`. """ for bp in self.child_parsers():
python
{ "resource": "" }
q279990
ByteParser._block_stack_repr
test
def _block_stack_repr(self, block_stack): """Get a string version of `block_stack`, for debugging.""" blocks = ", ".join(
python
{ "resource": "" }
q279991
ByteParser._split_into_chunks
test
def _split_into_chunks(self): """Split the code object into a list of `Chunk` objects. Each chunk is only entered at its first instruction, though there can be many exits from a chunk. Returns a list of `Chunk` objects. """ # The list of chunks so far, and the one we're working on. chunks = [] chunk = None # A dict mapping byte offsets of line starts to the line numbers. bytes_lines_map = dict(self._bytes_lines()) # The block stack: loops and try blocks get pushed here for the # implicit jumps that can occur. # Each entry is a tuple: (block type, destination) block_stack = [] # Some op codes are followed by branches that should be ignored. This # is a count of how many ignores are left. ignore_branch = 0 # We have to handle the last two bytecodes specially. ult = penult = None # Get a set of all of the jump-to points. jump_to = set() bytecodes = list(ByteCodes(self.code.co_code)) for bc in bytecodes: if bc.jump_to >= 0: jump_to.add(bc.jump_to) chunk_lineno = 0 # Walk the byte codes building chunks. for bc in bytecodes: # Maybe have to start a new chunk start_new_chunk = False first_chunk = False if bc.offset in bytes_lines_map: # Start a new chunk for each source line number. start_new_chunk = True chunk_lineno = bytes_lines_map[bc.offset] first_chunk = True elif bc.offset in jump_to: # To make chunks have a single entrance, we have to make a new # chunk when we get to a place some bytecode jumps to. start_new_chunk = True elif bc.op in OPS_CHUNK_BEGIN: # Jumps deserve their own unnumbered chunk. This fixes # problems with jumps to jumps getting confused. start_new_chunk = True if not chunk or start_new_chunk: if chunk: chunk.exits.add(bc.offset) chunk = Chunk(bc.offset, chunk_lineno, first_chunk) chunks.append(chunk) # Look at the opcode if bc.jump_to >= 0 and bc.op not in OPS_NO_JUMP: if ignore_branch: # Someone earlier wanted us to ignore this branch. ignore_branch -= 1 else: # The opcode has a jump, it's an exit for this chunk. chunk.exits.add(bc.jump_to) if bc.op in OPS_CODE_END: # The opcode can exit the code object. chunk.exits.add(-self.code.co_firstlineno) if bc.op in OPS_PUSH_BLOCK: # The opcode adds a block to the block_stack. block_stack.append((bc.op, bc.jump_to)) if bc.op in OPS_POP_BLOCK: # The opcode pops a block from the block stack. block_stack.pop() if bc.op in OPS_CHUNK_END: # This opcode forces the end of the chunk. if bc.op == OP_BREAK_LOOP: # A break is implicit: jump where the top of the # block_stack points. chunk.exits.add(block_stack[-1][1]) chunk = None if bc.op == OP_END_FINALLY: # For the finally clause we need to find the closest exception
python
{ "resource": "" }
q279992
ByteParser.validate_chunks
test
def validate_chunks(self, chunks): """Validate the rule that chunks have a single entrance.""" # starts is the entrances to the chunks starts = set([ch.byte for
python
{ "resource": "" }
q279993
ByteParser._arcs
test
def _arcs(self): """Find the executable arcs in the code. Yields pairs: (from,to). From and to are integer line numbers. If from is < 0, then the arc is an entrance into the code object. If to is < 0, the arc is an exit from the code object. """ chunks = self._split_into_chunks() # A map from byte offsets to chunks jumped into. byte_chunks = dict([(c.byte, c) for c in chunks]) # There's always an entrance at the first chunk. yield (-1, byte_chunks[0].line) # Traverse from the first chunk in each line, and yield arcs where # the trace function will be invoked. for chunk in chunks: if not chunk.first: continue chunks_considered = set() chunks_to_consider = [chunk] while chunks_to_consider: # Get the chunk we're considering, and make sure we don't # consider it again this_chunk = chunks_to_consider.pop() chunks_considered.add(this_chunk) # For each exit, add the line number if the trace function # would be triggered, or add the chunk to those being # considered if not. for ex in this_chunk.exits: if ex < 0: yield (chunk.line, ex) else: next_chunk = byte_chunks[ex]
python
{ "resource": "" }
q279994
ByteParser._all_chunks
test
def _all_chunks(self): """Returns a list of `Chunk` objects for this code and its children. See `_split_into_chunks` for details.
python
{ "resource": "" }
q279995
ByteParser._all_arcs
test
def _all_arcs(self): """Get the set of all arcs in this code object and its children. See `_arcs` for
python
{ "resource": "" }
q279996
Coverage.options
test
def options(self, parser, env): """ Add options to command line. """ super(Coverage, self).options(parser, env) parser.add_option("--cover-package", action="append", default=env.get('NOSE_COVER_PACKAGE'), metavar="PACKAGE", dest="cover_packages", help="Restrict coverage output to selected packages " "[NOSE_COVER_PACKAGE]") parser.add_option("--cover-erase", action="store_true", default=env.get('NOSE_COVER_ERASE'), dest="cover_erase", help="Erase previously collected coverage " "statistics before run") parser.add_option("--cover-tests", action="store_true", dest="cover_tests", default=env.get('NOSE_COVER_TESTS'), help="Include test modules in coverage report " "[NOSE_COVER_TESTS]") parser.add_option("--cover-min-percentage", action="store", dest="cover_min_percentage", default=env.get('NOSE_COVER_MIN_PERCENTAGE'), help="Minimum percentage of coverage for tests" "to pass [NOSE_COVER_MIN_PERCENTAGE]") parser.add_option("--cover-inclusive", action="store_true", dest="cover_inclusive", default=env.get('NOSE_COVER_INCLUSIVE'), help="Include all python files under working " "directory in coverage report. Useful for " "discovering holes in test coverage if not all " "files are imported by the test suite. " "[NOSE_COVER_INCLUSIVE]") parser.add_option("--cover-html", action="store_true",
python
{ "resource": "" }
q279997
Coverage.begin
test
def begin(self): """ Begin recording coverage information. """ log.debug("Coverage begin") self.skipModules = sys.modules.keys()[:] if self.coverErase: log.debug("Clearing previously collected coverage statistics") self.coverInstance.combine()
python
{ "resource": "" }
q279998
Coverage.report
test
def report(self, stream): """ Output code coverage report. """ log.debug("Coverage report") self.coverInstance.stop() self.coverInstance.combine() self.coverInstance.save() modules = [module for name, module in sys.modules.items() if self.wantModuleCoverage(name, module)] log.debug("Coverage report will cover modules: %s", modules) self.coverInstance.report(modules, file=stream) if self.coverHtmlDir: log.debug("Generating HTML coverage report") self.coverInstance.html_report(modules, self.coverHtmlDir) if self.coverXmlFile: log.debug("Generating XML coverage report") self.coverInstance.xml_report(modules, self.coverXmlFile) # make sure we have minimum required coverage if self.coverMinPercentage:
python
{ "resource": "" }
q279999
Coverage.wantFile
test
def wantFile(self, file, package=None): """If inclusive coverage enabled, return true for all source files in wanted packages. """ if self.coverInclusive: if file.endswith(".py"):
python
{ "resource": "" }