File size: 29,539 Bytes
d1ceb73 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 |
"""The IPython kernel implementation"""
import asyncio
import builtins
import gc
import getpass
import os
import signal
import sys
import threading
import typing as t
from contextlib import contextmanager
from functools import partial
import comm
from IPython.core import release
from IPython.utils.tokenutil import line_at_cursor, token_at_cursor
from jupyter_client.session import extract_header
from traitlets import Any, Bool, HasTraits, Instance, List, Type, observe, observe_compat
from zmq.eventloop.zmqstream import ZMQStream
from .comm.comm import BaseComm
from .comm.manager import CommManager
from .compiler import XCachingCompiler
from .eventloops import _use_appnope
from .iostream import OutStream
from .kernelbase import Kernel as KernelBase
from .kernelbase import _accepts_parameters
from .zmqshell import ZMQInteractiveShell
try:
from IPython.core.interactiveshell import _asyncio_runner # type:ignore[attr-defined]
except ImportError:
_asyncio_runner = None # type:ignore[assignment]
try:
from IPython.core.completer import provisionalcompleter as _provisionalcompleter
from IPython.core.completer import rectify_completions as _rectify_completions
_use_experimental_60_completion = True
except ImportError:
_use_experimental_60_completion = False
_EXPERIMENTAL_KEY_NAME = "_jupyter_types_experimental"
def _create_comm(*args, **kwargs):
"""Create a new Comm."""
return BaseComm(*args, **kwargs)
# there can only be one comm manager in a ipykernel process
_comm_lock = threading.Lock()
_comm_manager: t.Optional[CommManager] = None
def _get_comm_manager(*args, **kwargs):
"""Create a new CommManager."""
global _comm_manager # noqa: PLW0603
if _comm_manager is None:
with _comm_lock:
if _comm_manager is None:
_comm_manager = CommManager(*args, **kwargs)
return _comm_manager
comm.create_comm = _create_comm
comm.get_comm_manager = _get_comm_manager
class IPythonKernel(KernelBase):
"""The IPython Kernel class."""
shell = Instance("IPython.core.interactiveshell.InteractiveShellABC", allow_none=True)
shell_class = Type(ZMQInteractiveShell)
use_experimental_completions = Bool(
True,
help="Set this flag to False to deactivate the use of experimental IPython completion APIs.",
).tag(config=True)
debugpy_stream = Instance(ZMQStream, allow_none=True)
user_module = Any()
@observe("user_module")
@observe_compat
def _user_module_changed(self, change):
if self.shell is not None:
self.shell.user_module = change["new"]
user_ns = Instance(dict, args=None, allow_none=True)
@observe("user_ns")
@observe_compat
def _user_ns_changed(self, change):
if self.shell is not None:
self.shell.user_ns = change["new"]
self.shell.init_user_ns()
# A reference to the Python builtin 'raw_input' function.
# (i.e., __builtin__.raw_input for Python 2.7, builtins.input for Python 3)
_sys_raw_input = Any()
_sys_eval_input = Any()
def __init__(self, **kwargs):
"""Initialize the kernel."""
super().__init__(**kwargs)
from .debugger import Debugger, _is_debugpy_available
# Initialize the Debugger
if _is_debugpy_available:
self.debugger = Debugger(
self.log,
self.debugpy_stream,
self._publish_debug_event,
self.debug_shell_socket,
self.session,
self.debug_just_my_code,
)
# Initialize the InteractiveShell subclass
self.shell = self.shell_class.instance(
parent=self,
profile_dir=self.profile_dir,
user_module=self.user_module,
user_ns=self.user_ns,
kernel=self,
compiler_class=XCachingCompiler,
)
self.shell.displayhook.session = self.session # type:ignore[attr-defined]
jupyter_session_name = os.environ.get("JPY_SESSION_NAME")
if jupyter_session_name:
self.shell.user_ns["__session__"] = jupyter_session_name
self.shell.displayhook.pub_socket = self.iopub_socket # type:ignore[attr-defined]
self.shell.displayhook.topic = self._topic("execute_result") # type:ignore[attr-defined]
self.shell.display_pub.session = self.session # type:ignore[attr-defined]
self.shell.display_pub.pub_socket = self.iopub_socket # type:ignore[attr-defined]
self.comm_manager = comm.get_comm_manager()
assert isinstance(self.comm_manager, HasTraits)
self.shell.configurables.append(self.comm_manager) # type:ignore[arg-type]
comm_msg_types = ["comm_open", "comm_msg", "comm_close"]
for msg_type in comm_msg_types:
self.shell_handlers[msg_type] = getattr(self.comm_manager, msg_type)
if _use_appnope() and self._darwin_app_nap:
# Disable app-nap as the kernel is not a gui but can have guis
import appnope # type:ignore[import-untyped]
appnope.nope()
self._new_threads_parent_header = {}
self._initialize_thread_hooks()
if hasattr(gc, "callbacks"):
# while `gc.callbacks` exists since Python 3.3, pypy does not
# implement it even as of 3.9.
gc.callbacks.append(self._clean_thread_parent_frames)
help_links = List(
[
{
"text": "Python Reference",
"url": "https://docs.python.org/%i.%i" % sys.version_info[:2],
},
{
"text": "IPython Reference",
"url": "https://ipython.org/documentation.html",
},
{
"text": "NumPy Reference",
"url": "https://docs.scipy.org/doc/numpy/reference/",
},
{
"text": "SciPy Reference",
"url": "https://docs.scipy.org/doc/scipy/reference/",
},
{
"text": "Matplotlib Reference",
"url": "https://matplotlib.org/contents.html",
},
{
"text": "SymPy Reference",
"url": "http://docs.sympy.org/latest/index.html",
},
{
"text": "pandas Reference",
"url": "https://pandas.pydata.org/pandas-docs/stable/",
},
]
).tag(config=True)
# Kernel info fields
implementation = "ipython"
implementation_version = release.version
language_info = {
"name": "python",
"version": sys.version.split()[0],
"mimetype": "text/x-python",
"codemirror_mode": {"name": "ipython", "version": sys.version_info[0]},
"pygments_lexer": "ipython%d" % 3,
"nbconvert_exporter": "python",
"file_extension": ".py",
}
def dispatch_debugpy(self, msg):
from .debugger import _is_debugpy_available
if _is_debugpy_available:
# The first frame is the socket id, we can drop it
frame = msg[1].bytes.decode("utf-8")
self.log.debug("Debugpy received: %s", frame)
self.debugger.tcp_client.receive_dap_frame(frame)
@property
def banner(self):
if self.shell:
return self.shell.banner
return None
async def poll_stopped_queue(self):
"""Poll the stopped queue."""
while True:
await self.debugger.handle_stopped_event()
def start(self):
"""Start the kernel."""
if self.shell:
self.shell.exit_now = False
if self.debugpy_stream is None:
self.log.warning("debugpy_stream undefined, debugging will not be enabled")
else:
self.debugpy_stream.on_recv(self.dispatch_debugpy, copy=False)
super().start()
if self.debugpy_stream:
asyncio.run_coroutine_threadsafe(
self.poll_stopped_queue(), self.control_thread.io_loop.asyncio_loop
)
def set_parent(self, ident, parent, channel="shell"):
"""Overridden from parent to tell the display hook and output streams
about the parent message.
"""
super().set_parent(ident, parent, channel)
if channel == "shell" and self.shell:
self.shell.set_parent(parent)
def init_metadata(self, parent):
"""Initialize metadata.
Run at the beginning of each execution request.
"""
md = super().init_metadata(parent)
# FIXME: remove deprecated ipyparallel-specific code
# This is required for ipyparallel < 5.0
md.update(
{
"dependencies_met": True,
"engine": self.ident,
}
)
return md
def finish_metadata(self, parent, metadata, reply_content):
"""Finish populating metadata.
Run after completing an execution request.
"""
# FIXME: remove deprecated ipyparallel-specific code
# This is required by ipyparallel < 5.0
metadata["status"] = reply_content["status"]
if reply_content["status"] == "error" and reply_content["ename"] == "UnmetDependency":
metadata["dependencies_met"] = False
return metadata
def _forward_input(self, allow_stdin=False):
"""Forward raw_input and getpass to the current frontend.
via input_request
"""
self._allow_stdin = allow_stdin
self._sys_raw_input = builtins.input
builtins.input = self.raw_input
self._save_getpass = getpass.getpass
getpass.getpass = self.getpass
def _restore_input(self):
"""Restore raw_input, getpass"""
builtins.input = self._sys_raw_input
getpass.getpass = self._save_getpass
@property
def execution_count(self):
if self.shell:
return self.shell.execution_count
return None
@execution_count.setter
def execution_count(self, value):
# Ignore the incrementing done by KernelBase, in favour of our shell's
# execution counter.
pass
@contextmanager
def _cancel_on_sigint(self, future):
"""ContextManager for capturing SIGINT and cancelling a future
SIGINT raises in the event loop when running async code,
but we want it to halt a coroutine.
Ideally, it would raise KeyboardInterrupt,
but this turns it into a CancelledError.
At least it gets a decent traceback to the user.
"""
sigint_future: asyncio.Future[int] = asyncio.Future()
# whichever future finishes first,
# cancel the other one
def cancel_unless_done(f, _ignored):
if f.cancelled() or f.done():
return
f.cancel()
# when sigint finishes,
# abort the coroutine with CancelledError
sigint_future.add_done_callback(partial(cancel_unless_done, future))
# when the main future finishes,
# stop watching for SIGINT events
future.add_done_callback(partial(cancel_unless_done, sigint_future))
def handle_sigint(*args):
def set_sigint_result():
if sigint_future.cancelled() or sigint_future.done():
return
sigint_future.set_result(1)
# use add_callback for thread safety
self.io_loop.add_callback(set_sigint_result)
# set the custom sigint handler during this context
save_sigint = signal.signal(signal.SIGINT, handle_sigint)
try:
yield
finally:
# restore the previous sigint handler
signal.signal(signal.SIGINT, save_sigint)
async def execute_request(self, stream, ident, parent):
"""Override for cell output - cell reconciliation."""
parent_header = extract_header(parent)
self._associate_new_top_level_threads_with(parent_header)
await super().execute_request(stream, ident, parent)
async def do_execute(
self,
code,
silent,
store_history=True,
user_expressions=None,
allow_stdin=False,
*,
cell_meta=None,
cell_id=None,
):
"""Handle code execution."""
shell = self.shell # we'll need this a lot here
assert shell is not None
self._forward_input(allow_stdin)
reply_content: t.Dict[str, t.Any] = {}
if hasattr(shell, "run_cell_async") and hasattr(shell, "should_run_async"):
run_cell = shell.run_cell_async
should_run_async = shell.should_run_async
accepts_params = _accepts_parameters(run_cell, ["cell_id"])
else:
should_run_async = lambda cell: False # noqa: ARG005, E731
# older IPython,
# use blocking run_cell and wrap it in coroutine
async def run_cell(*args, **kwargs):
return shell.run_cell(*args, **kwargs)
accepts_params = _accepts_parameters(shell.run_cell, ["cell_id"])
try:
# default case: runner is asyncio and asyncio is already running
# TODO: this should check every case for "are we inside the runner",
# not just asyncio
preprocessing_exc_tuple = None
try:
transformed_cell = shell.transform_cell(code)
except Exception:
transformed_cell = code
preprocessing_exc_tuple = sys.exc_info()
if (
_asyncio_runner # type:ignore[truthy-bool]
and shell.loop_runner is _asyncio_runner
and asyncio.get_event_loop().is_running()
and should_run_async(
code,
transformed_cell=transformed_cell,
preprocessing_exc_tuple=preprocessing_exc_tuple,
)
):
if accepts_params["cell_id"]:
coro = run_cell(
code,
store_history=store_history,
silent=silent,
transformed_cell=transformed_cell,
preprocessing_exc_tuple=preprocessing_exc_tuple,
cell_id=cell_id,
)
else:
coro = run_cell(
code,
store_history=store_history,
silent=silent,
transformed_cell=transformed_cell,
preprocessing_exc_tuple=preprocessing_exc_tuple,
)
coro_future = asyncio.ensure_future(coro)
with self._cancel_on_sigint(coro_future):
res = None
try:
res = await coro_future
finally:
shell.events.trigger("post_execute")
if not silent:
shell.events.trigger("post_run_cell", res)
else:
# runner isn't already running,
# make synchronous call,
# letting shell dispatch to loop runners
if accepts_params["cell_id"]:
res = shell.run_cell(
code,
store_history=store_history,
silent=silent,
cell_id=cell_id,
)
else:
res = shell.run_cell(code, store_history=store_history, silent=silent)
finally:
self._restore_input()
err = res.error_before_exec if res.error_before_exec is not None else res.error_in_exec
if res.success:
reply_content["status"] = "ok"
else:
reply_content["status"] = "error"
reply_content.update(
{
"traceback": shell._last_traceback or [],
"ename": str(type(err).__name__),
"evalue": str(err),
}
)
# FIXME: deprecated piece for ipyparallel (remove in 5.0):
e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method="execute")
reply_content["engine_info"] = e_info
# Return the execution counter so clients can display prompts
reply_content["execution_count"] = shell.execution_count - 1
if "traceback" in reply_content:
self.log.info(
"Exception in execute request:\n%s",
"\n".join(reply_content["traceback"]),
)
# At this point, we can tell whether the main code execution succeeded
# or not. If it did, we proceed to evaluate user_expressions
if reply_content["status"] == "ok":
reply_content["user_expressions"] = shell.user_expressions(user_expressions or {})
else:
# If there was an error, don't even try to compute expressions
reply_content["user_expressions"] = {}
# Payloads should be retrieved regardless of outcome, so we can both
# recover partial output (that could have been generated early in a
# block, before an error) and always clear the payload system.
reply_content["payload"] = shell.payload_manager.read_payload()
# Be aggressive about clearing the payload because we don't want
# it to sit in memory until the next execute_request comes in.
shell.payload_manager.clear_payload()
return reply_content
def do_complete(self, code, cursor_pos):
"""Handle code completion."""
if _use_experimental_60_completion and self.use_experimental_completions:
return self._experimental_do_complete(code, cursor_pos)
# FIXME: IPython completers currently assume single line,
# but completion messages give multi-line context
# For now, extract line from cell, based on cursor_pos:
if cursor_pos is None:
cursor_pos = len(code)
line, offset = line_at_cursor(code, cursor_pos)
line_cursor = cursor_pos - offset
assert self.shell is not None
txt, matches = self.shell.complete("", line, line_cursor)
return {
"matches": matches,
"cursor_end": cursor_pos,
"cursor_start": cursor_pos - len(txt),
"metadata": {},
"status": "ok",
}
async def do_debug_request(self, msg):
"""Handle a debug request."""
from .debugger import _is_debugpy_available
if _is_debugpy_available:
return await self.debugger.process_request(msg)
return None
def _experimental_do_complete(self, code, cursor_pos):
"""
Experimental completions from IPython, using Jedi.
"""
if cursor_pos is None:
cursor_pos = len(code)
with _provisionalcompleter():
assert self.shell is not None
raw_completions = self.shell.Completer.completions(code, cursor_pos)
completions = list(_rectify_completions(code, raw_completions))
comps = []
for comp in completions:
comps.append(
dict(
start=comp.start,
end=comp.end,
text=comp.text,
type=comp.type,
signature=comp.signature,
)
)
if completions:
s = completions[0].start
e = completions[0].end
matches = [c.text for c in completions]
else:
s = cursor_pos
e = cursor_pos
matches = []
return {
"matches": matches,
"cursor_end": e,
"cursor_start": s,
"metadata": {_EXPERIMENTAL_KEY_NAME: comps},
"status": "ok",
}
def do_inspect(self, code, cursor_pos, detail_level=0, omit_sections=()):
"""Handle code inspection."""
name = token_at_cursor(code, cursor_pos)
reply_content: t.Dict[str, t.Any] = {"status": "ok"}
reply_content["data"] = {}
reply_content["metadata"] = {}
assert self.shell is not None
try:
if release.version_info >= (8,):
# `omit_sections` keyword will be available in IPython 8, see
# https://github.com/ipython/ipython/pull/13343
bundle = self.shell.object_inspect_mime(
name,
detail_level=detail_level,
omit_sections=omit_sections,
)
else:
bundle = self.shell.object_inspect_mime(name, detail_level=detail_level)
reply_content["data"].update(bundle)
if not self.shell.enable_html_pager:
reply_content["data"].pop("text/html")
reply_content["found"] = True
except KeyError:
reply_content["found"] = False
return reply_content
def do_history(
self,
hist_access_type,
output,
raw,
session=0,
start=0,
stop=None,
n=None,
pattern=None,
unique=False,
):
"""Handle code history."""
assert self.shell is not None
if hist_access_type == "tail":
hist = self.shell.history_manager.get_tail(
n, raw=raw, output=output, include_latest=True
)
elif hist_access_type == "range":
hist = self.shell.history_manager.get_range(
session, start, stop, raw=raw, output=output
)
elif hist_access_type == "search":
hist = self.shell.history_manager.search(
pattern, raw=raw, output=output, n=n, unique=unique
)
else:
hist = []
return {
"status": "ok",
"history": list(hist),
}
def do_shutdown(self, restart):
"""Handle kernel shutdown."""
if self.shell:
self.shell.exit_now = True
return dict(status="ok", restart=restart)
def do_is_complete(self, code):
"""Handle an is_complete request."""
transformer_manager = getattr(self.shell, "input_transformer_manager", None)
if transformer_manager is None:
# input_splitter attribute is deprecated
assert self.shell is not None
transformer_manager = self.shell.input_splitter
status, indent_spaces = transformer_manager.check_complete(code)
r = {"status": status}
if status == "incomplete":
r["indent"] = " " * indent_spaces
return r
def do_apply(self, content, bufs, msg_id, reply_metadata):
"""Handle an apply request."""
try:
from ipyparallel.serialize import serialize_object, unpack_apply_message
except ImportError:
from .serialize import serialize_object, unpack_apply_message
shell = self.shell
assert shell is not None
try:
working = shell.user_ns
prefix = "_" + str(msg_id).replace("-", "") + "_"
f, args, kwargs = unpack_apply_message(bufs, working, copy=False)
fname = getattr(f, "__name__", "f")
fname = prefix + "f"
argname = prefix + "args"
kwargname = prefix + "kwargs"
resultname = prefix + "result"
ns = {fname: f, argname: args, kwargname: kwargs, resultname: None}
# print ns
working.update(ns)
code = f"{resultname} = {fname}(*{argname},**{kwargname})"
try:
exec(code, shell.user_global_ns, shell.user_ns)
result = working.get(resultname)
finally:
for key in ns:
working.pop(key)
assert self.session is not None
result_buf = serialize_object(
result,
buffer_threshold=self.session.buffer_threshold,
item_threshold=self.session.item_threshold,
)
except BaseException as e:
# invoke IPython traceback formatting
shell.showtraceback()
reply_content = {
"traceback": shell._last_traceback or [],
"ename": str(type(e).__name__),
"evalue": str(e),
}
# FIXME: deprecated piece for ipyparallel (remove in 5.0):
e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method="apply")
reply_content["engine_info"] = e_info
self.send_response(
self.iopub_socket,
"error",
reply_content,
ident=self._topic("error"),
)
self.log.info("Exception in apply request:\n%s", "\n".join(reply_content["traceback"]))
result_buf = []
reply_content["status"] = "error"
else:
reply_content = {"status": "ok"}
return reply_content, result_buf
def do_clear(self):
"""Clear the kernel."""
if self.shell:
self.shell.reset(False)
return dict(status="ok")
def _associate_new_top_level_threads_with(self, parent_header):
"""Store the parent header to associate it with new top-level threads"""
self._new_threads_parent_header = parent_header
def _initialize_thread_hooks(self):
"""Store thread hierarchy and thread-parent_header associations."""
stdout = self._stdout
stderr = self._stderr
kernel_thread_ident = threading.get_ident()
kernel = self
_threading_Thread_run = threading.Thread.run
_threading_Thread__init__ = threading.Thread.__init__
def run_closure(self: threading.Thread):
"""Wrap the `threading.Thread.start` to intercept thread identity.
This is needed because there is no "start" hook yet, but there
might be one in the future: https://bugs.python.org/issue14073
This is a no-op if the `self._stdout` and `self._stderr` are not
sub-classes of `OutStream`.
"""
try:
parent = self._ipykernel_parent_thread_ident # type:ignore[attr-defined]
except AttributeError:
return
for stream in [stdout, stderr]:
if isinstance(stream, OutStream):
if parent == kernel_thread_ident:
stream._thread_to_parent_header[
self.ident
] = kernel._new_threads_parent_header
else:
stream._thread_to_parent[self.ident] = parent
_threading_Thread_run(self)
def init_closure(self: threading.Thread, *args, **kwargs):
_threading_Thread__init__(self, *args, **kwargs)
self._ipykernel_parent_thread_ident = threading.get_ident() # type:ignore[attr-defined]
threading.Thread.__init__ = init_closure # type:ignore[method-assign]
threading.Thread.run = run_closure # type:ignore[method-assign]
def _clean_thread_parent_frames(
self, phase: t.Literal["start", "stop"], info: t.Dict[str, t.Any]
):
"""Clean parent frames of threads which are no longer running.
This is meant to be invoked by garbage collector callback hook.
The implementation enumerates the threads because there is no "exit" hook yet,
but there might be one in the future: https://bugs.python.org/issue14073
This is a no-op if the `self._stdout` and `self._stderr` are not
sub-classes of `OutStream`.
"""
# Only run before the garbage collector starts
if phase != "start":
return
active_threads = {thread.ident for thread in threading.enumerate()}
for stream in [self._stdout, self._stderr]:
if isinstance(stream, OutStream):
thread_to_parent_header = stream._thread_to_parent_header
for identity in list(thread_to_parent_header.keys()):
if identity not in active_threads:
try:
del thread_to_parent_header[identity]
except KeyError:
pass
thread_to_parent = stream._thread_to_parent
for identity in list(thread_to_parent.keys()):
if identity not in active_threads:
try:
del thread_to_parent[identity]
except KeyError:
pass
# This exists only for backwards compatibility - use IPythonKernel instead
class Kernel(IPythonKernel):
"""DEPRECATED. An alias for the IPython kernel class."""
def __init__(self, *args, **kwargs): # pragma: no cover
"""DEPRECATED."""
import warnings
warnings.warn(
"Kernel is a deprecated alias of ipykernel.ipkernel.IPythonKernel",
DeprecationWarning,
stacklevel=2,
)
super().__init__(*args, **kwargs)
|