Skip to content

Commit b5246e8

Browse files
authored
Merge pull request #737 from jupyter-incubator/devstein/ipykernel-6
Support ipykernel >= 6
2 parents 691018f + bab2c21 commit b5246e8

File tree

8 files changed

+243
-83
lines changed

8 files changed

+243
-83
lines changed

.github/workflows/tests.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ jobs:
2121
sudo apt-get install -y libkrb5-dev
2222
- name: Install package dependencies
2323
run: |
24+
python -m pip install --upgrade pip
2425
pip install -r hdijupyterutils/requirements.txt -e hdijupyterutils
2526
pip install -r autovizwidget/requirements.txt -e autovizwidget
2627
pip install -r sparkmagic/requirements.txt -e sparkmagic

hdijupyterutils/requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ ipython>=4.0.2
22
nose
33
mock
44
ipywidgets>5.0.0
5-
ipykernel>=4.2.2, <6.0.0
5+
ipykernel>=4.2.2
66
jupyter>=1
77
pandas>=0.17.1
88
numpy>=1.16.5

hdijupyterutils/setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ def version(path):
6060
"nose",
6161
"mock",
6262
"ipywidgets>5.0.0",
63-
"ipykernel>=4.2.2,<6.0.0",
63+
"ipykernel>=4.2.2",
6464
"jupyter>=1",
6565
"pandas>=0.17.1",
6666
"numpy",

sparkmagic/requirements.txt

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,9 @@ mock
66
pandas>=0.17.1
77
numpy
88
requests
9-
ipykernel>=4.2.2, <6.0.0
9+
ipykernel>=4.2.2
1010
ipywidgets>5.0.0
1111
notebook>=4.2
1212
tornado>=4
1313
requests_kerberos>=0.8.0
14+
nest_asyncio==1.5.5

sparkmagic/setup.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,10 +88,11 @@ def version(path):
8888
"pandas>=0.17.1",
8989
"numpy",
9090
"requests",
91-
"ipykernel<6.0.0",
91+
"ipykernel>=4.2.2",
9292
"ipywidgets>5.0.0",
9393
"notebook>=4.2",
9494
"tornado>=4",
9595
"requests_kerberos>=0.8.0",
96+
"nest_asyncio==1.5.5",
9697
],
9798
)

sparkmagic/sparkmagic/kernels/wrapperkernel/sparkkernelbase.py

Lines changed: 104 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,10 @@
11
# Copyright (c) 2015 [email protected]
22
# Distributed under the terms of the Modified BSD License.
3-
try:
4-
from asyncio import Future
5-
except ImportError:
6-
7-
class Future(object):
8-
"""A class nothing will use."""
3+
import asyncio
4+
import inspect
5+
import requests
96

107

11-
import requests
128
from ipykernel.ipkernel import IPythonKernel
139
from hdijupyterutils.ipythondisplay import IpythonDisplay
1410

@@ -17,6 +13,55 @@ class Future(object):
1713
from sparkmagic.livyclientlib.exceptions import wrap_unexpected_exceptions
1814
from sparkmagic.kernels.wrapperkernel.usercodeparser import UserCodeParser
1915

16+
# NOTE: This is a (hopefully) temporary workaround to accommodate async do_execute in ipykernel>=6
17+
import nest_asyncio
18+
19+
20+
# NOTE: This is a (hopefully) temporary workaround to accommodate async do_execute in ipykernel>=6
21+
# Taken from: https://github.com/jupyter/notebook/blob/eb3a1c24839205afcef0ba65ace2309d38300a2b/notebook/utils.py#L332
22+
def run_sync(maybe_async):
23+
"""If async, runs maybe_async and blocks until it has executed,
24+
possibly creating an event loop.
25+
If not async, just returns maybe_async as it is the result of something
26+
that has already executed.
27+
Parameters
28+
----------
29+
maybe_async : async or non-async object
30+
The object to be executed, if it is async.
31+
Returns
32+
-------
33+
result :
34+
Whatever the async object returns, or the object itself.
35+
"""
36+
if not inspect.isawaitable(maybe_async):
37+
# that was not something async, just return it
38+
return maybe_async
39+
# it is async, we need to run it in an event loop
40+
41+
def wrapped():
42+
create_new_event_loop = False
43+
result = None
44+
loop = None
45+
try:
46+
loop = asyncio.get_event_loop()
47+
except RuntimeError:
48+
create_new_event_loop = True
49+
else:
50+
if loop.is_closed():
51+
create_new_event_loop = True
52+
if create_new_event_loop:
53+
loop = asyncio.new_event_loop()
54+
asyncio.set_event_loop(loop)
55+
try:
56+
result = loop.run_until_complete(maybe_async)
57+
except RuntimeError as e:
58+
if str(e) == "This event loop is already running":
59+
# just return a Future, hoping that it will be awaited
60+
result = asyncio.ensure_future(maybe_async)
61+
return result
62+
63+
return wrapped()
64+
2065

2166
class SparkKernelBase(IPythonKernel):
2267
def __init__(
@@ -40,7 +85,16 @@ def __init__(
4085
# Override
4186
self.session_language = session_language
4287

43-
super(SparkKernelBase, self).__init__(**kwargs)
88+
# NOTE: This is a (hopefully) temporary workaround to accommodate async do_execute in ipykernel>=6
89+
# Patch loop.run_until_complete as early as possible
90+
try:
91+
nest_asyncio.apply()
92+
except RuntimeError:
93+
# nest_asyncio requires a running loop in order to patch.
94+
# In tests the loop may not have been created yet.
95+
pass
96+
97+
super().__init__(**kwargs)
4498

4599
self.logger = SparkLog("{}_jupyter_kernel".format(self.session_language))
46100
self._fatal_error = None
@@ -54,11 +108,15 @@ def __init__(
54108
# Disable warnings for test env in HDI
55109
requests.packages.urllib3.disable_warnings()
56110

57-
if not kwargs.get("testing", False):
58-
self._load_magics_extension()
59-
self._change_language()
60-
if conf.use_auto_viz():
61-
self._register_auto_viz()
111+
# Do not load magics in testing
112+
if kwargs.get("testing", False):
113+
return
114+
115+
# Load magics on init
116+
self._load_magics_extension()
117+
self._change_language()
118+
if conf.use_auto_viz():
119+
self._register_auto_viz()
62120

63121
def do_execute(
64122
self, code, silent, store_history=True, user_expressions=None, allow_stdin=False
@@ -71,7 +129,9 @@ def f(self):
71129
code, silent, store_history, user_expressions, allow_stdin
72130
)
73131

74-
return wrap_unexpected_exceptions(f, self._complete_cell)(self)
132+
# Execute the code and handle exceptions
133+
wrapped = wrap_unexpected_exceptions(f, self._complete_cell)
134+
return wrapped(self)
75135

76136
def do_shutdown(self, restart):
77137
# Cleanup
@@ -164,30 +224,50 @@ def _execute_cell(
164224
def _execute_cell_for_user(
165225
self, code, silent, store_history=True, user_expressions=None, allow_stdin=False
166226
):
167-
result = super(SparkKernelBase, self).do_execute(
227+
result = super().do_execute(
168228
code, silent, store_history, user_expressions, allow_stdin
169229
)
170-
if isinstance(result, Future):
171-
result = result.result()
230+
231+
# In ipykernel 6, this returns native asyncio coroutine
232+
if asyncio.iscoroutine(result):
233+
return run_sync(result)
234+
235+
# In ipykernel 5, this returns gen.coroutine
236+
if asyncio.isfuture(result):
237+
return result.result()
238+
239+
# In ipykernel 4, this func is synchronous
172240
return result
173241

174242
def _do_shutdown_ipykernel(self, restart):
175-
return super(SparkKernelBase, self).do_shutdown(restart)
243+
result = super().do_shutdown(restart)
244+
245+
# In tests, super() calls this SparkKernelBase.do_shutdown, which is async
246+
if asyncio.iscoroutine(result):
247+
return run_sync(result)
248+
249+
return result
176250

177251
def _complete_cell(self):
178-
"""A method that runs a cell with no effect. Call this and return the value it
179-
returns when there's some sort of error preventing the user's cell from executing; this
180-
will register the cell from the Jupyter UI as being completed."""
252+
"""A method that runs a cell with no effect.
253+
254+
Call this and return the value it returns when there's some sort
255+
of error preventing the user's cell from executing; this will
256+
register the cell from the Jupyter UI as being completed.
257+
"""
181258
return self._execute_cell("None", False, True, None, False)
182259

183260
def _show_user_error(self, message):
184261
self.logger.error(message)
185262
self.ipython_display.send_error(message)
186263

187264
def _queue_fatal_error(self, message):
188-
"""Queues up a fatal error to be thrown when the next cell is executed; does not
189-
raise an error immediately. We use this for errors that happen on kernel startup,
190-
since IPython crashes if we throw an exception in the __init__ method."""
265+
"""Queues up a fatal error to be thrown when the next cell is executed;
266+
does not raise an error immediately.
267+
268+
We use this for errors that happen on kernel startup, since
269+
IPython crashes if we throw an exception in the __init__ method.
270+
"""
191271
self._fatal_error = message
192272

193273
def _abort_with_fatal_error(self, message):

sparkmagic/sparkmagic/livyclientlib/exceptions.py

Lines changed: 76 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1,61 +1,67 @@
1-
from __future__ import print_function
1+
import asyncio
22
import sys
33
import traceback
44
from sparkmagic.utils.constants import EXPECTED_ERROR_MSG, INTERNAL_ERROR_MSG
55

66

77
# == EXCEPTIONS ==
88
class LivyClientLibException(Exception):
9-
"""Base class for all LivyClientLib exceptions. All exceptions that are explicitly raised by
10-
code in this package should be a subclass of LivyClientLibException. If you need to account for a
11-
new error condition, either use one of the existing LivyClientLibException subclasses,
12-
or create a new subclass with a descriptive name and add it to this file.
9+
"""Base class for all LivyClientLib exceptions. All exceptions that are
10+
explicitly raised by code in this package should be a subclass of
11+
LivyClientLibException. If you need to account for a new error condition,
12+
either use one of the existing LivyClientLibException subclasses, or create
13+
a new subclass with a descriptive name and add it to this file.
1314
14-
We distinguish between "expected" errors, which represent errors that a user is likely
15-
to encounter in normal use, and "internal" errors, which represents exceptions that happen
16-
due to a bug in the library. Check EXPECTED_EXCEPTIONS to see which exceptions
17-
are considered "expected"."""
15+
We distinguish between "expected" errors, which represent errors
16+
that a user is likely to encounter in normal use, and "internal"
17+
errors, which represents exceptions that happen due to a bug in the
18+
library. Check EXPECTED_EXCEPTIONS to see which exceptions are
19+
considered "expected".
20+
"""
1821

1922

2023
class HttpClientException(LivyClientLibException):
21-
"""An exception thrown by the HTTP client when it fails to make a request."""
24+
"""An exception thrown by the HTTP client when it fails to make a
25+
request."""
2226

2327

2428
class LivyClientTimeoutException(LivyClientLibException):
2529
"""An exception for timeouts while interacting with Livy."""
2630

2731

2832
class DataFrameParseException(LivyClientLibException):
29-
"""An internal error which suggests a bad implementation of dataframe parsing from JSON --
30-
if we get a JSON parsing error when parsing the results from the Livy server, this exception
31-
is thrown."""
33+
"""An internal error which suggests a bad implementation of dataframe
34+
parsing from JSON -- if we get a JSON parsing error when parsing the
35+
results from the Livy server, this exception is thrown."""
3236

3337

3438
class LivyUnexpectedStatusException(LivyClientLibException):
35-
"""An exception that will be shown if some unexpected error happens on the Livy side."""
39+
"""An exception that will be shown if some unexpected error happens on the
40+
Livy side."""
3641

3742

3843
class SessionManagementException(LivyClientLibException):
39-
"""An exception that is thrown by the Session Manager when it is a
40-
given session name is invalid in some way."""
44+
"""An exception that is thrown by the Session Manager when it is a given
45+
session name is invalid in some way."""
4146

4247

4348
class BadUserConfigurationException(LivyClientLibException):
44-
"""An exception that is thrown when configuration provided by the user is invalid
45-
in some way."""
49+
"""An exception that is thrown when configuration provided by the user is
50+
invalid in some way."""
4651

4752

4853
class BadUserDataException(LivyClientLibException):
49-
"""An exception that is thrown when data provided by the user is invalid
50-
in some way."""
54+
"""An exception that is thrown when data provided by the user is invalid in
55+
some way."""
5156

5257

5358
class SqlContextNotFoundException(LivyClientLibException):
5459
"""Exception that is thrown when the SQL context is not found."""
5560

5661

5762
class SparkStatementException(LivyClientLibException):
58-
"""Exception that is thrown when an error occurs while parsing or executing Spark statements."""
63+
"""Exception that is thrown when an error occurs while parsing or executing
64+
Spark statements."""
5965

6066

6167
# It has to be a KeyboardInterrupt to interrupt the notebook
@@ -85,7 +91,8 @@ def _show_tb(exc_type, exc_val, tb):
8591

8692

8793
class SparkStatementCancellationFailedException(KeyboardInterrupt):
88-
"""Exception that is thrown when a Spark statement is interrupted but fails to be cancelled in Livy."""
94+
"""Exception that is thrown when a Spark statement is interrupted but fails
95+
to be cancelled in Livy."""
8996

9097

9198
# == DECORATORS FOR EXCEPTION HANDLING ==
@@ -166,3 +173,50 @@ def wrapped(self, *args, **kwargs):
166173
wrapped.__name__ = f.__name__
167174
wrapped.__doc__ = f.__doc__
168175
return wrapped
176+
177+
178+
# async_wrap_unexpected_exceptions was created to handle async behavior ipykernel >=6
179+
# It was safer to create a separate async wrapper than modify the original to accommodate both use-cases
180+
def async_wrap_unexpected_exceptions(f, execute_if_error=None):
181+
"""A decorator that catches all exceptions from the async function f and alerts the user about them.
182+
Self can be any object with a "logger" attribute and a "ipython_display" attribute.
183+
All exceptions are logged as "unexpected" exceptions, and a request is made to the user to file an issue
184+
at the Github repository. If there is an error, returns None if execute_if_error is None, or else
185+
returns the output of the function execute_if_error.
186+
Usage:
187+
@async_wrap_unexpected_exceptions
188+
async def fn(self, ...):
189+
..etc"""
190+
from sparkmagic.utils import configuration as conf
191+
192+
async def handle_exception(self, e):
193+
self.logger.error(
194+
"ENCOUNTERED AN INTERNAL ERROR: {}\n\tTraceback:\n{}".format(
195+
e, traceback.format_exc()
196+
)
197+
)
198+
self.ipython_display.send_error(INTERNAL_ERROR_MSG.format(e))
199+
if execute_if_error is None:
200+
return None
201+
202+
result = execute_if_error()
203+
if asyncio.iscoroutine(result):
204+
return await result
205+
206+
return result
207+
208+
async def wrapped(self, *args, **kwargs):
209+
try:
210+
out = f(self, *args, **kwargs)
211+
if asyncio.iscoroutine(out):
212+
out = await out
213+
except Exception as err:
214+
if conf.all_errors_are_fatal():
215+
raise err
216+
return await handle_exception(self, err)
217+
else:
218+
return out
219+
220+
wrapped.__name__ = f.__name__
221+
wrapped.__doc__ = f.__doc__
222+
return wrapped

0 commit comments

Comments
 (0)