|
"""The IPython kernel implementation""" |
|
|
|
import asyncio |
|
import builtins |
|
import gc |
|
import getpass |
|
import os |
|
import signal |
|
import sys |
|
import threading |
|
import typing as t |
|
from contextlib import contextmanager |
|
from functools import partial |
|
|
|
import comm |
|
from IPython.core import release |
|
from IPython.utils.tokenutil import line_at_cursor, token_at_cursor |
|
from jupyter_client.session import extract_header |
|
from traitlets import Any, Bool, HasTraits, Instance, List, Type, observe, observe_compat |
|
from zmq.eventloop.zmqstream import ZMQStream |
|
|
|
from .comm.comm import BaseComm |
|
from .comm.manager import CommManager |
|
from .compiler import XCachingCompiler |
|
from .eventloops import _use_appnope |
|
from .iostream import OutStream |
|
from .kernelbase import Kernel as KernelBase |
|
from .kernelbase import _accepts_parameters |
|
from .zmqshell import ZMQInteractiveShell |
|
|
|
try: |
|
from IPython.core.interactiveshell import _asyncio_runner |
|
except ImportError: |
|
_asyncio_runner = None |
|
|
|
try: |
|
from IPython.core.completer import provisionalcompleter as _provisionalcompleter |
|
from IPython.core.completer import rectify_completions as _rectify_completions |
|
|
|
_use_experimental_60_completion = True |
|
except ImportError: |
|
_use_experimental_60_completion = False |
|
|
|
|
|
_EXPERIMENTAL_KEY_NAME = "_jupyter_types_experimental" |
|
|
|
|
|
def _create_comm(*args, **kwargs): |
|
"""Create a new Comm.""" |
|
return BaseComm(*args, **kwargs) |
|
|
|
|
|
|
|
_comm_lock = threading.Lock() |
|
_comm_manager: t.Optional[CommManager] = None |
|
|
|
|
|
def _get_comm_manager(*args, **kwargs): |
|
"""Create a new CommManager.""" |
|
global _comm_manager |
|
if _comm_manager is None: |
|
with _comm_lock: |
|
if _comm_manager is None: |
|
_comm_manager = CommManager(*args, **kwargs) |
|
return _comm_manager |
|
|
|
|
|
comm.create_comm = _create_comm |
|
comm.get_comm_manager = _get_comm_manager |
|
|
|
|
|
class IPythonKernel(KernelBase): |
|
"""The IPython Kernel class.""" |
|
|
|
shell = Instance("IPython.core.interactiveshell.InteractiveShellABC", allow_none=True) |
|
shell_class = Type(ZMQInteractiveShell) |
|
|
|
use_experimental_completions = Bool( |
|
True, |
|
help="Set this flag to False to deactivate the use of experimental IPython completion APIs.", |
|
).tag(config=True) |
|
|
|
debugpy_stream = Instance(ZMQStream, allow_none=True) |
|
|
|
user_module = Any() |
|
|
|
@observe("user_module") |
|
@observe_compat |
|
def _user_module_changed(self, change): |
|
if self.shell is not None: |
|
self.shell.user_module = change["new"] |
|
|
|
user_ns = Instance(dict, args=None, allow_none=True) |
|
|
|
@observe("user_ns") |
|
@observe_compat |
|
def _user_ns_changed(self, change): |
|
if self.shell is not None: |
|
self.shell.user_ns = change["new"] |
|
self.shell.init_user_ns() |
|
|
|
|
|
|
|
_sys_raw_input = Any() |
|
_sys_eval_input = Any() |
|
|
|
def __init__(self, **kwargs): |
|
"""Initialize the kernel.""" |
|
super().__init__(**kwargs) |
|
|
|
from .debugger import Debugger, _is_debugpy_available |
|
|
|
|
|
if _is_debugpy_available: |
|
self.debugger = Debugger( |
|
self.log, |
|
self.debugpy_stream, |
|
self._publish_debug_event, |
|
self.debug_shell_socket, |
|
self.session, |
|
self.debug_just_my_code, |
|
) |
|
|
|
|
|
self.shell = self.shell_class.instance( |
|
parent=self, |
|
profile_dir=self.profile_dir, |
|
user_module=self.user_module, |
|
user_ns=self.user_ns, |
|
kernel=self, |
|
compiler_class=XCachingCompiler, |
|
) |
|
self.shell.displayhook.session = self.session |
|
|
|
jupyter_session_name = os.environ.get("JPY_SESSION_NAME") |
|
if jupyter_session_name: |
|
self.shell.user_ns["__session__"] = jupyter_session_name |
|
|
|
self.shell.displayhook.pub_socket = self.iopub_socket |
|
self.shell.displayhook.topic = self._topic("execute_result") |
|
self.shell.display_pub.session = self.session |
|
self.shell.display_pub.pub_socket = self.iopub_socket |
|
|
|
self.comm_manager = comm.get_comm_manager() |
|
|
|
assert isinstance(self.comm_manager, HasTraits) |
|
self.shell.configurables.append(self.comm_manager) |
|
comm_msg_types = ["comm_open", "comm_msg", "comm_close"] |
|
for msg_type in comm_msg_types: |
|
self.shell_handlers[msg_type] = getattr(self.comm_manager, msg_type) |
|
|
|
if _use_appnope() and self._darwin_app_nap: |
|
|
|
import appnope |
|
|
|
appnope.nope() |
|
|
|
self._new_threads_parent_header = {} |
|
self._initialize_thread_hooks() |
|
|
|
if hasattr(gc, "callbacks"): |
|
|
|
|
|
gc.callbacks.append(self._clean_thread_parent_frames) |
|
|
|
help_links = List( |
|
[ |
|
{ |
|
"text": "Python Reference", |
|
"url": "https://docs.python.org/%i.%i" % sys.version_info[:2], |
|
}, |
|
{ |
|
"text": "IPython Reference", |
|
"url": "https://ipython.org/documentation.html", |
|
}, |
|
{ |
|
"text": "NumPy Reference", |
|
"url": "https://docs.scipy.org/doc/numpy/reference/", |
|
}, |
|
{ |
|
"text": "SciPy Reference", |
|
"url": "https://docs.scipy.org/doc/scipy/reference/", |
|
}, |
|
{ |
|
"text": "Matplotlib Reference", |
|
"url": "https://matplotlib.org/contents.html", |
|
}, |
|
{ |
|
"text": "SymPy Reference", |
|
"url": "http://docs.sympy.org/latest/index.html", |
|
}, |
|
{ |
|
"text": "pandas Reference", |
|
"url": "https://pandas.pydata.org/pandas-docs/stable/", |
|
}, |
|
] |
|
).tag(config=True) |
|
|
|
|
|
implementation = "ipython" |
|
implementation_version = release.version |
|
language_info = { |
|
"name": "python", |
|
"version": sys.version.split()[0], |
|
"mimetype": "text/x-python", |
|
"codemirror_mode": {"name": "ipython", "version": sys.version_info[0]}, |
|
"pygments_lexer": "ipython%d" % 3, |
|
"nbconvert_exporter": "python", |
|
"file_extension": ".py", |
|
} |
|
|
|
def dispatch_debugpy(self, msg): |
|
from .debugger import _is_debugpy_available |
|
|
|
if _is_debugpy_available: |
|
|
|
frame = msg[1].bytes.decode("utf-8") |
|
self.log.debug("Debugpy received: %s", frame) |
|
self.debugger.tcp_client.receive_dap_frame(frame) |
|
|
|
@property |
|
def banner(self): |
|
if self.shell: |
|
return self.shell.banner |
|
return None |
|
|
|
async def poll_stopped_queue(self): |
|
"""Poll the stopped queue.""" |
|
while True: |
|
await self.debugger.handle_stopped_event() |
|
|
|
def start(self): |
|
"""Start the kernel.""" |
|
if self.shell: |
|
self.shell.exit_now = False |
|
if self.debugpy_stream is None: |
|
self.log.warning("debugpy_stream undefined, debugging will not be enabled") |
|
else: |
|
self.debugpy_stream.on_recv(self.dispatch_debugpy, copy=False) |
|
super().start() |
|
if self.debugpy_stream: |
|
asyncio.run_coroutine_threadsafe( |
|
self.poll_stopped_queue(), self.control_thread.io_loop.asyncio_loop |
|
) |
|
|
|
def set_parent(self, ident, parent, channel="shell"): |
|
"""Overridden from parent to tell the display hook and output streams |
|
about the parent message. |
|
""" |
|
super().set_parent(ident, parent, channel) |
|
if channel == "shell" and self.shell: |
|
self.shell.set_parent(parent) |
|
|
|
def init_metadata(self, parent): |
|
"""Initialize metadata. |
|
|
|
Run at the beginning of each execution request. |
|
""" |
|
md = super().init_metadata(parent) |
|
|
|
|
|
md.update( |
|
{ |
|
"dependencies_met": True, |
|
"engine": self.ident, |
|
} |
|
) |
|
return md |
|
|
|
def finish_metadata(self, parent, metadata, reply_content): |
|
"""Finish populating metadata. |
|
|
|
Run after completing an execution request. |
|
""" |
|
|
|
|
|
metadata["status"] = reply_content["status"] |
|
if reply_content["status"] == "error" and reply_content["ename"] == "UnmetDependency": |
|
metadata["dependencies_met"] = False |
|
|
|
return metadata |
|
|
|
def _forward_input(self, allow_stdin=False): |
|
"""Forward raw_input and getpass to the current frontend. |
|
|
|
via input_request |
|
""" |
|
self._allow_stdin = allow_stdin |
|
|
|
self._sys_raw_input = builtins.input |
|
builtins.input = self.raw_input |
|
|
|
self._save_getpass = getpass.getpass |
|
getpass.getpass = self.getpass |
|
|
|
def _restore_input(self): |
|
"""Restore raw_input, getpass""" |
|
builtins.input = self._sys_raw_input |
|
|
|
getpass.getpass = self._save_getpass |
|
|
|
@property |
|
def execution_count(self): |
|
if self.shell: |
|
return self.shell.execution_count |
|
return None |
|
|
|
@execution_count.setter |
|
def execution_count(self, value): |
|
|
|
|
|
pass |
|
|
|
@contextmanager |
|
def _cancel_on_sigint(self, future): |
|
"""ContextManager for capturing SIGINT and cancelling a future |
|
|
|
SIGINT raises in the event loop when running async code, |
|
but we want it to halt a coroutine. |
|
|
|
Ideally, it would raise KeyboardInterrupt, |
|
but this turns it into a CancelledError. |
|
At least it gets a decent traceback to the user. |
|
""" |
|
sigint_future: asyncio.Future[int] = asyncio.Future() |
|
|
|
|
|
|
|
def cancel_unless_done(f, _ignored): |
|
if f.cancelled() or f.done(): |
|
return |
|
f.cancel() |
|
|
|
|
|
|
|
sigint_future.add_done_callback(partial(cancel_unless_done, future)) |
|
|
|
|
|
future.add_done_callback(partial(cancel_unless_done, sigint_future)) |
|
|
|
def handle_sigint(*args): |
|
def set_sigint_result(): |
|
if sigint_future.cancelled() or sigint_future.done(): |
|
return |
|
sigint_future.set_result(1) |
|
|
|
|
|
self.io_loop.add_callback(set_sigint_result) |
|
|
|
|
|
save_sigint = signal.signal(signal.SIGINT, handle_sigint) |
|
try: |
|
yield |
|
finally: |
|
|
|
signal.signal(signal.SIGINT, save_sigint) |
|
|
|
async def execute_request(self, stream, ident, parent): |
|
"""Override for cell output - cell reconciliation.""" |
|
parent_header = extract_header(parent) |
|
self._associate_new_top_level_threads_with(parent_header) |
|
await super().execute_request(stream, ident, parent) |
|
|
|
async def do_execute( |
|
self, |
|
code, |
|
silent, |
|
store_history=True, |
|
user_expressions=None, |
|
allow_stdin=False, |
|
*, |
|
cell_meta=None, |
|
cell_id=None, |
|
): |
|
"""Handle code execution.""" |
|
shell = self.shell |
|
assert shell is not None |
|
|
|
self._forward_input(allow_stdin) |
|
|
|
reply_content: t.Dict[str, t.Any] = {} |
|
if hasattr(shell, "run_cell_async") and hasattr(shell, "should_run_async"): |
|
run_cell = shell.run_cell_async |
|
should_run_async = shell.should_run_async |
|
accepts_params = _accepts_parameters(run_cell, ["cell_id"]) |
|
else: |
|
should_run_async = lambda cell: False |
|
|
|
|
|
|
|
async def run_cell(*args, **kwargs): |
|
return shell.run_cell(*args, **kwargs) |
|
|
|
accepts_params = _accepts_parameters(shell.run_cell, ["cell_id"]) |
|
try: |
|
|
|
|
|
|
|
preprocessing_exc_tuple = None |
|
try: |
|
transformed_cell = shell.transform_cell(code) |
|
except Exception: |
|
transformed_cell = code |
|
preprocessing_exc_tuple = sys.exc_info() |
|
|
|
if ( |
|
_asyncio_runner |
|
and shell.loop_runner is _asyncio_runner |
|
and asyncio.get_event_loop().is_running() |
|
and should_run_async( |
|
code, |
|
transformed_cell=transformed_cell, |
|
preprocessing_exc_tuple=preprocessing_exc_tuple, |
|
) |
|
): |
|
if accepts_params["cell_id"]: |
|
coro = run_cell( |
|
code, |
|
store_history=store_history, |
|
silent=silent, |
|
transformed_cell=transformed_cell, |
|
preprocessing_exc_tuple=preprocessing_exc_tuple, |
|
cell_id=cell_id, |
|
) |
|
else: |
|
coro = run_cell( |
|
code, |
|
store_history=store_history, |
|
silent=silent, |
|
transformed_cell=transformed_cell, |
|
preprocessing_exc_tuple=preprocessing_exc_tuple, |
|
) |
|
|
|
coro_future = asyncio.ensure_future(coro) |
|
|
|
with self._cancel_on_sigint(coro_future): |
|
res = None |
|
try: |
|
res = await coro_future |
|
finally: |
|
shell.events.trigger("post_execute") |
|
if not silent: |
|
shell.events.trigger("post_run_cell", res) |
|
else: |
|
|
|
|
|
|
|
if accepts_params["cell_id"]: |
|
res = shell.run_cell( |
|
code, |
|
store_history=store_history, |
|
silent=silent, |
|
cell_id=cell_id, |
|
) |
|
else: |
|
res = shell.run_cell(code, store_history=store_history, silent=silent) |
|
finally: |
|
self._restore_input() |
|
|
|
err = res.error_before_exec if res.error_before_exec is not None else res.error_in_exec |
|
|
|
if res.success: |
|
reply_content["status"] = "ok" |
|
else: |
|
reply_content["status"] = "error" |
|
|
|
reply_content.update( |
|
{ |
|
"traceback": shell._last_traceback or [], |
|
"ename": str(type(err).__name__), |
|
"evalue": str(err), |
|
} |
|
) |
|
|
|
|
|
e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method="execute") |
|
reply_content["engine_info"] = e_info |
|
|
|
|
|
reply_content["execution_count"] = shell.execution_count - 1 |
|
|
|
if "traceback" in reply_content: |
|
self.log.info( |
|
"Exception in execute request:\n%s", |
|
"\n".join(reply_content["traceback"]), |
|
) |
|
|
|
|
|
|
|
if reply_content["status"] == "ok": |
|
reply_content["user_expressions"] = shell.user_expressions(user_expressions or {}) |
|
else: |
|
|
|
reply_content["user_expressions"] = {} |
|
|
|
|
|
|
|
|
|
reply_content["payload"] = shell.payload_manager.read_payload() |
|
|
|
|
|
shell.payload_manager.clear_payload() |
|
|
|
return reply_content |
|
|
|
def do_complete(self, code, cursor_pos): |
|
"""Handle code completion.""" |
|
if _use_experimental_60_completion and self.use_experimental_completions: |
|
return self._experimental_do_complete(code, cursor_pos) |
|
|
|
|
|
|
|
|
|
if cursor_pos is None: |
|
cursor_pos = len(code) |
|
line, offset = line_at_cursor(code, cursor_pos) |
|
line_cursor = cursor_pos - offset |
|
assert self.shell is not None |
|
txt, matches = self.shell.complete("", line, line_cursor) |
|
return { |
|
"matches": matches, |
|
"cursor_end": cursor_pos, |
|
"cursor_start": cursor_pos - len(txt), |
|
"metadata": {}, |
|
"status": "ok", |
|
} |
|
|
|
async def do_debug_request(self, msg): |
|
"""Handle a debug request.""" |
|
from .debugger import _is_debugpy_available |
|
|
|
if _is_debugpy_available: |
|
return await self.debugger.process_request(msg) |
|
return None |
|
|
|
def _experimental_do_complete(self, code, cursor_pos): |
|
""" |
|
Experimental completions from IPython, using Jedi. |
|
""" |
|
if cursor_pos is None: |
|
cursor_pos = len(code) |
|
with _provisionalcompleter(): |
|
assert self.shell is not None |
|
raw_completions = self.shell.Completer.completions(code, cursor_pos) |
|
completions = list(_rectify_completions(code, raw_completions)) |
|
|
|
comps = [] |
|
for comp in completions: |
|
comps.append( |
|
dict( |
|
start=comp.start, |
|
end=comp.end, |
|
text=comp.text, |
|
type=comp.type, |
|
signature=comp.signature, |
|
) |
|
) |
|
|
|
if completions: |
|
s = completions[0].start |
|
e = completions[0].end |
|
matches = [c.text for c in completions] |
|
else: |
|
s = cursor_pos |
|
e = cursor_pos |
|
matches = [] |
|
|
|
return { |
|
"matches": matches, |
|
"cursor_end": e, |
|
"cursor_start": s, |
|
"metadata": {_EXPERIMENTAL_KEY_NAME: comps}, |
|
"status": "ok", |
|
} |
|
|
|
def do_inspect(self, code, cursor_pos, detail_level=0, omit_sections=()): |
|
"""Handle code inspection.""" |
|
name = token_at_cursor(code, cursor_pos) |
|
|
|
reply_content: t.Dict[str, t.Any] = {"status": "ok"} |
|
reply_content["data"] = {} |
|
reply_content["metadata"] = {} |
|
assert self.shell is not None |
|
try: |
|
if release.version_info >= (8,): |
|
|
|
|
|
bundle = self.shell.object_inspect_mime( |
|
name, |
|
detail_level=detail_level, |
|
omit_sections=omit_sections, |
|
) |
|
else: |
|
bundle = self.shell.object_inspect_mime(name, detail_level=detail_level) |
|
reply_content["data"].update(bundle) |
|
if not self.shell.enable_html_pager: |
|
reply_content["data"].pop("text/html") |
|
reply_content["found"] = True |
|
except KeyError: |
|
reply_content["found"] = False |
|
|
|
return reply_content |
|
|
|
def do_history( |
|
self, |
|
hist_access_type, |
|
output, |
|
raw, |
|
session=0, |
|
start=0, |
|
stop=None, |
|
n=None, |
|
pattern=None, |
|
unique=False, |
|
): |
|
"""Handle code history.""" |
|
assert self.shell is not None |
|
if hist_access_type == "tail": |
|
hist = self.shell.history_manager.get_tail( |
|
n, raw=raw, output=output, include_latest=True |
|
) |
|
|
|
elif hist_access_type == "range": |
|
hist = self.shell.history_manager.get_range( |
|
session, start, stop, raw=raw, output=output |
|
) |
|
|
|
elif hist_access_type == "search": |
|
hist = self.shell.history_manager.search( |
|
pattern, raw=raw, output=output, n=n, unique=unique |
|
) |
|
else: |
|
hist = [] |
|
|
|
return { |
|
"status": "ok", |
|
"history": list(hist), |
|
} |
|
|
|
def do_shutdown(self, restart): |
|
"""Handle kernel shutdown.""" |
|
if self.shell: |
|
self.shell.exit_now = True |
|
return dict(status="ok", restart=restart) |
|
|
|
def do_is_complete(self, code): |
|
"""Handle an is_complete request.""" |
|
transformer_manager = getattr(self.shell, "input_transformer_manager", None) |
|
if transformer_manager is None: |
|
|
|
assert self.shell is not None |
|
transformer_manager = self.shell.input_splitter |
|
status, indent_spaces = transformer_manager.check_complete(code) |
|
r = {"status": status} |
|
if status == "incomplete": |
|
r["indent"] = " " * indent_spaces |
|
return r |
|
|
|
def do_apply(self, content, bufs, msg_id, reply_metadata): |
|
"""Handle an apply request.""" |
|
try: |
|
from ipyparallel.serialize import serialize_object, unpack_apply_message |
|
except ImportError: |
|
from .serialize import serialize_object, unpack_apply_message |
|
|
|
shell = self.shell |
|
assert shell is not None |
|
try: |
|
working = shell.user_ns |
|
|
|
prefix = "_" + str(msg_id).replace("-", "") + "_" |
|
f, args, kwargs = unpack_apply_message(bufs, working, copy=False) |
|
|
|
fname = getattr(f, "__name__", "f") |
|
|
|
fname = prefix + "f" |
|
argname = prefix + "args" |
|
kwargname = prefix + "kwargs" |
|
resultname = prefix + "result" |
|
|
|
ns = {fname: f, argname: args, kwargname: kwargs, resultname: None} |
|
|
|
working.update(ns) |
|
code = f"{resultname} = {fname}(*{argname},**{kwargname})" |
|
try: |
|
exec(code, shell.user_global_ns, shell.user_ns) |
|
result = working.get(resultname) |
|
finally: |
|
for key in ns: |
|
working.pop(key) |
|
|
|
assert self.session is not None |
|
result_buf = serialize_object( |
|
result, |
|
buffer_threshold=self.session.buffer_threshold, |
|
item_threshold=self.session.item_threshold, |
|
) |
|
|
|
except BaseException as e: |
|
|
|
shell.showtraceback() |
|
reply_content = { |
|
"traceback": shell._last_traceback or [], |
|
"ename": str(type(e).__name__), |
|
"evalue": str(e), |
|
} |
|
|
|
e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method="apply") |
|
reply_content["engine_info"] = e_info |
|
|
|
self.send_response( |
|
self.iopub_socket, |
|
"error", |
|
reply_content, |
|
ident=self._topic("error"), |
|
) |
|
self.log.info("Exception in apply request:\n%s", "\n".join(reply_content["traceback"])) |
|
result_buf = [] |
|
reply_content["status"] = "error" |
|
else: |
|
reply_content = {"status": "ok"} |
|
|
|
return reply_content, result_buf |
|
|
|
def do_clear(self): |
|
"""Clear the kernel.""" |
|
if self.shell: |
|
self.shell.reset(False) |
|
return dict(status="ok") |
|
|
|
def _associate_new_top_level_threads_with(self, parent_header): |
|
"""Store the parent header to associate it with new top-level threads""" |
|
self._new_threads_parent_header = parent_header |
|
|
|
def _initialize_thread_hooks(self): |
|
"""Store thread hierarchy and thread-parent_header associations.""" |
|
stdout = self._stdout |
|
stderr = self._stderr |
|
kernel_thread_ident = threading.get_ident() |
|
kernel = self |
|
_threading_Thread_run = threading.Thread.run |
|
_threading_Thread__init__ = threading.Thread.__init__ |
|
|
|
def run_closure(self: threading.Thread): |
|
"""Wrap the `threading.Thread.start` to intercept thread identity. |
|
|
|
This is needed because there is no "start" hook yet, but there |
|
might be one in the future: https://bugs.python.org/issue14073 |
|
|
|
This is a no-op if the `self._stdout` and `self._stderr` are not |
|
sub-classes of `OutStream`. |
|
""" |
|
|
|
try: |
|
parent = self._ipykernel_parent_thread_ident |
|
except AttributeError: |
|
return |
|
for stream in [stdout, stderr]: |
|
if isinstance(stream, OutStream): |
|
if parent == kernel_thread_ident: |
|
stream._thread_to_parent_header[ |
|
self.ident |
|
] = kernel._new_threads_parent_header |
|
else: |
|
stream._thread_to_parent[self.ident] = parent |
|
_threading_Thread_run(self) |
|
|
|
def init_closure(self: threading.Thread, *args, **kwargs): |
|
_threading_Thread__init__(self, *args, **kwargs) |
|
self._ipykernel_parent_thread_ident = threading.get_ident() |
|
|
|
threading.Thread.__init__ = init_closure |
|
threading.Thread.run = run_closure |
|
|
|
def _clean_thread_parent_frames( |
|
self, phase: t.Literal["start", "stop"], info: t.Dict[str, t.Any] |
|
): |
|
"""Clean parent frames of threads which are no longer running. |
|
This is meant to be invoked by garbage collector callback hook. |
|
|
|
The implementation enumerates the threads because there is no "exit" hook yet, |
|
but there might be one in the future: https://bugs.python.org/issue14073 |
|
|
|
This is a no-op if the `self._stdout` and `self._stderr` are not |
|
sub-classes of `OutStream`. |
|
""" |
|
|
|
if phase != "start": |
|
return |
|
active_threads = {thread.ident for thread in threading.enumerate()} |
|
for stream in [self._stdout, self._stderr]: |
|
if isinstance(stream, OutStream): |
|
thread_to_parent_header = stream._thread_to_parent_header |
|
for identity in list(thread_to_parent_header.keys()): |
|
if identity not in active_threads: |
|
try: |
|
del thread_to_parent_header[identity] |
|
except KeyError: |
|
pass |
|
thread_to_parent = stream._thread_to_parent |
|
for identity in list(thread_to_parent.keys()): |
|
if identity not in active_threads: |
|
try: |
|
del thread_to_parent[identity] |
|
except KeyError: |
|
pass |
|
|
|
|
|
|
|
|
|
|
|
class Kernel(IPythonKernel): |
|
"""DEPRECATED. An alias for the IPython kernel class.""" |
|
|
|
def __init__(self, *args, **kwargs): |
|
"""DEPRECATED.""" |
|
import warnings |
|
|
|
warnings.warn( |
|
"Kernel is a deprecated alias of ipykernel.ipkernel.IPythonKernel", |
|
DeprecationWarning, |
|
stacklevel=2, |
|
) |
|
super().__init__(*args, **kwargs) |
|
|