Commit 24af33cb authored by Theresa Pollinger's avatar Theresa Pollinger
Browse files

welcome message in new notebook via kernel.js, generally using markdown replies now

parent e172bb58
This diff is collapsed.
This diff is collapsed.
......@@ -7,13 +7,10 @@ from interview_kernel import Interview
from jupyter_client.kernelspec import KernelSpecManager
from IPython.utils.tempdir import TemporaryDirectory
from shutil import copyfile
kernel_json = Interview.kernel_json
#{
# "argv": [sys.executable, "-m", "interview_kernel", "-f", "{connection_file}"],
# "display_name": "Interview",
# "language": "text",
#}
def install_my_kernel_spec(user=True, prefix=None):
with TemporaryDirectory() as td:
......@@ -21,9 +18,35 @@ def install_my_kernel_spec(user=True, prefix=None):
with open(os.path.join(td, 'kernel.json'), 'w') as f:
json.dump(kernel_json, f, sort_keys=True)
# TODO: Copy any resources
try:
# copyfile('./kernel.js', os.path.join(td, 'kernel.js'))
interview = Interview()
with open(os.path.join(td, 'kernel.js'), 'w') as f:
# javascript code that sets an initial markdown cell
js = """define(['base/js/namespace'], function(Jupyter)
{{
function onload()
{{
if (Jupyter.notebook.get_cells().length ===1)
{{
Jupyter.notebook.insert_cell_above('markdown').set_text(`{}`);
}}
console.log("interview kernel.js loaded")
}}
return {{
onload: onload
}};
}});""".format(interview.my_markdown_greeting.replace("`", "\\`"))
f.write(js)
print(js)
except Exception:
print('could not copy kernel.js, will not see initial message in notebook')
raise
print('Installing Jupyter kernel spec')
KernelSpecManager().install_kernel_spec(td, 'Interview', user=user, replace=True, prefix=prefix)
print("Installing Jupyter kernel spec")
KernelSpecManager().install_kernel_spec(td, 'Interview', user=user, prefix=prefix)
def _is_root():
try:
......
......@@ -15,13 +15,14 @@ from bokeh.io import output_notebook
from pde_state_machine import *
from string_handling import build_url, get_recursively
from distutils.util import strtobool
"""This is a Jupyter kernel derived from MetaKernel. To use it, install it with the install.py script and run
"jupyter notebook --debug --NotebookApp.token='' " from terminal. """
class Interview(MetaKernel):
implementation = 'Interview'
implementation_version = '1.0'
implementation_version = '0.1'
language = 'text'
language_version = '0.1'
language_info = {
......@@ -30,11 +31,6 @@ class Interview(MetaKernel):
'file_extension': '.txt',
'help_links': MetaKernel.help_links,
}
banner = "Interview kernel\n\n" \
"Hello, " + getpass.getuser() + "! I am " + "TheInterview" + \
", your partial differential equations and simulations expert. " \
"Let's set up a simulation together.\n" \
"Please enter anything to start the interview."
kernel_json = {
"argv": [
......@@ -44,6 +40,20 @@ class Interview(MetaKernel):
"name": "interview_kernel"
}
banner = \
"""**Hello, """ + getpass.getuser() + """! I am TheInterview, your partial differential equations and simulations expert.**
Let's set up a model and simulation together.
To get explanations, enter `explain <optional keyword>`.
To see a recap of what we know so far, enter `recap <optional keyword>`.
To interactively visualize ther current theory graph, enter `tgwiev` or `tgview mpd`.
Otherwise, you can always answer with \LaTeX-type input.
You can inspect the currently loaded MMT theories under http://localhost:43397
"""
def __init__(self, **kwargs):
self.state_machine = PDE_States(self.poutput, self.update_prompt, self.please_prompt, self.display_html)
......@@ -51,9 +61,6 @@ class Interview(MetaKernel):
# call superclass constructor
super(Interview, self).__init__(**kwargs)
self.do_execute("%matplotlib nbagg")
#plt.ion()
# To make custom magics happen, cf. https://github.com/Calysto/metakernel
# from IPython import get_ipython
# from metakernel import register_ipython_magics
......@@ -62,7 +69,11 @@ class Interview(MetaKernel):
self.update_prompt()
self.poutstring = ""# to collect string output to send
self.outstream_name = 'stdout'
self.richcontent = None # to collect rich contents (images etc)
# already send some input to state machine, to capture initial output and have it displayed via kernel.js
self.state_machine.handle_state_dependent_input("anything") # TODO compatibility with not-notebook?
self.my_markdown_greeting = Interview.banner + self.poutstring
self.poutstring = ""
# bokeh notebook setup
output_notebook()
......@@ -75,7 +86,7 @@ class Interview(MetaKernel):
############# input processing if not explain or undo
# def do_execute(self, code, silent=False, store_history=True, user_expressions=None,
# allow_stdin=False):
def do_execute_direct(self, code, silent=False):
def do_execute_direct(self, code, silent=False, allow_stdin=True):
"""This is where the user input enters our code"""
arg = LatexNodes2Text().latex_to_text(code)
......@@ -85,14 +96,24 @@ class Interview(MetaKernel):
self.state_machine.handle_state_dependent_input(arg)
if not silent:
stream_content = {'name': self.outstream_name, 'text': self.poutstring}
self.send_response(self.iopub_socket, 'stream', stream_content)
if self.richcontent is not None:
# We send the display_data message with the contents.
self.send_response(self.iopub_socket, 'display_data', self.richcontent)
self.richcontent = None
#if self.outstream_name == "stderr": #TODO make errors markdown but red
# # string output
# stream_content = {'name': self.outstream_name, 'text': self.poutstring}
# self.send_response(self.iopub_socket, 'stream', stream_content)
# data_content = {
# "ename": "InterviewError",
# "evalue": self.poutstring,
# "traceback": [self.poutstring],
# }
# self.send_response(self.iopub_socket, 'error', data_content)
#else:
# for other mime types, cf http://ipython.org/ipython-doc/stable/notebook/nbformat.html
data_content = {"data": {
"text/markdown": self.poutstring,
},
"metadata": {}
}
self.send_response(self.iopub_socket, 'display_data', data_content)
self.poutstring = ""
self.outstream_name = 'stdout'
......@@ -107,7 +128,7 @@ class Interview(MetaKernel):
self.state_machine.pass_other = pass_other
self.display_widget()
def prompt_input_handling(self, arg):
def prompt_input_handling(self, arg): # TODO make this widget-ed
""" If we asked for a yes-no answer, execute what was specified in please_prompt.
return true if the input was handled here, and false if not."""
if self.state_machine.prompted:
......
/*
cf.
https://github.com/jupyter/notebook/issues/1451
https://jupyter-notebook.readthedocs.io/en/latest/extending/frontend_extensions.html
to install (from shell): jupyter nbextension install preload.js --user
to enable (from shell): jupyter nbextension enable preload
TODO: Make it markup,
do only if kernel_name == interview_kernel
so according to jupyter-book, we are making it a kernel.js now.
*/
define([
'base/js/namespace'
], function(
Jupyter
) {
function onload() {
if (Jupyter.notebook.get_cells().length===1){
Jupyter.notebook.insert_cell_above('code', 0).set_text("pre-text");
}
console.log("interview kernel.js loaded")
}
return {
onload: onload
};
});
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment