Skip to content

Commit 16cbe16

Browse files
committed
WIP: migrate Jupyternaut code into this package
1 parent e27be18 commit 16cbe16

File tree

4 files changed

+172
-2
lines changed

4 files changed

+172
-2
lines changed

jupyter_ai_jupyternaut/extension_app.py

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88

99
from .config import ConfigManager, ConfigRestAPI
1010
from .handlers import RouteHandler
11+
from .jupyternaut import JupyternautPersona
1112
from .models import ChatModelsRestAPI, ModelParametersRestAPI
1213
from .secrets import EnvSecretsManager, SecretsRestAPI
1314

@@ -176,7 +177,7 @@ def initialize_settings(self):
176177
}
177178

178179
# Initialize ConfigManager
179-
self.settings["jupyternaut.config_manager"] = ConfigManager(
180+
config_manager = ConfigManager(
180181
config=self.config,
181182
log=self.log,
182183
allowed_providers=self.allowed_providers,
@@ -185,8 +186,14 @@ def initialize_settings(self):
185186
blocked_models=self.blocked_models,
186187
defaults=defaults,
187188
)
189+
190+
# Bind ConfigManager instance to global settings dictionary
191+
self.settings["jupyternaut.config_manager"] = config_manager
188192

189-
# Initialize SecretsManager
193+
# Bind ConfigManager instance to Jupyternaut as a class variable
194+
JupyternautPersona.config_manager = config_manager
195+
196+
# Initialize SecretsManager and bind it to global settings dictionary
190197
self.settings["jupyternaut.secrets_manager"] = EnvSecretsManager(parent=self)
191198

192199
def _link_jupyter_server_extension(self, server_app: ServerApp):
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
from .jupyternaut import JupyternautPersona
Lines changed: 107 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,107 @@
1+
from typing import Any, Optional
2+
3+
from jupyterlab_chat.models import Message
4+
from litellm import acompletion
5+
6+
from ..base_persona import BasePersona, PersonaDefaults
7+
from ..persona_manager import SYSTEM_USERNAME
8+
from .prompt_template import (
9+
JUPYTERNAUT_SYSTEM_PROMPT_TEMPLATE,
10+
JupyternautSystemPromptArgs,
11+
)
12+
13+
14+
class JupyternautPersona(BasePersona):
15+
"""
16+
The Jupyternaut persona, the main persona provided by Jupyter AI.
17+
"""
18+
19+
def __init__(self, *args, **kwargs):
20+
super().__init__(*args, **kwargs)
21+
22+
@property
23+
def defaults(self):
24+
return PersonaDefaults(
25+
name="Jupyternaut",
26+
avatar_path="/api/ai/static/jupyternaut.svg",
27+
description="The standard agent provided by JupyterLab. Currently has no tools.",
28+
system_prompt="...",
29+
)
30+
31+
async def process_message(self, message: Message) -> None:
32+
if not hasattr(self, 'config_manager'):
33+
self.send_message(
34+
"Jupyternaut requires the `jupyter_ai_jupyternaut` server extension package.\n\n",
35+
"Please make sure to first install that package in your environment & restart the server."
36+
)
37+
if not self.config_manager.chat_model:
38+
self.send_message(
39+
"No chat model is configured.\n\n"
40+
"You must set one first in the Jupyter AI settings, found in 'Settings > AI Settings' from the menu bar."
41+
)
42+
return
43+
44+
model_id = self.config_manager.chat_model
45+
model_args = self.config_manager.chat_model_args
46+
context_as_messages = self.get_context_as_messages(model_id, message)
47+
response_aiter = await acompletion(
48+
**model_args,
49+
model=model_id,
50+
messages=[
51+
*context_as_messages,
52+
{
53+
"role": "user",
54+
"content": message.body,
55+
},
56+
],
57+
stream=True,
58+
)
59+
60+
await self.stream_message(response_aiter)
61+
62+
def get_context_as_messages(
63+
self, model_id: str, message: Message
64+
) -> list[dict[str, Any]]:
65+
"""
66+
Returns the current context, including attachments and recent messages,
67+
as a list of messages accepted by `litellm.acompletion()`.
68+
"""
69+
system_msg_args = JupyternautSystemPromptArgs(
70+
model_id=model_id,
71+
persona_name=self.name,
72+
context=self.process_attachments(message),
73+
).model_dump()
74+
75+
system_msg = {
76+
"role": "system",
77+
"content": JUPYTERNAUT_SYSTEM_PROMPT_TEMPLATE.render(**system_msg_args),
78+
}
79+
80+
context_as_messages = [system_msg, *self._get_history_as_messages()]
81+
return context_as_messages
82+
83+
def _get_history_as_messages(self, k: Optional[int] = 2) -> list[dict[str, Any]]:
84+
"""
85+
Returns the current history as a list of messages accepted by
86+
`litellm.acompletion()`.
87+
"""
88+
# TODO: consider bounding history based on message size (e.g. total
89+
# char/token count) instead of message count.
90+
all_messages = self.ychat.get_messages()
91+
92+
# gather last k * 2 messages and return
93+
# we exclude the last message since that is the human message just
94+
# submitted by a user.
95+
start_idx = 0 if k is None else -2 * k - 1
96+
recent_messages: list[Message] = all_messages[start_idx:-1]
97+
98+
history: list[dict[str, Any]] = []
99+
for msg in recent_messages:
100+
role = (
101+
"assistant"
102+
if msg.sender.startswith("jupyter-ai-personas::")
103+
else "system" if msg.sender == SYSTEM_USERNAME else "user"
104+
)
105+
history.append({"role": role, "content": msg.body})
106+
107+
return history
Lines changed: 55 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,55 @@
1+
from typing import Optional
2+
3+
from jinja2 import Template
4+
from pydantic import BaseModel
5+
6+
_JUPYTERNAUT_SYSTEM_PROMPT_FORMAT = """
7+
<instructions>
8+
9+
You are {{persona_name}}, an AI agent provided in JupyterLab through the 'Jupyter AI' extension.
10+
11+
Jupyter AI is an installable software package listed on PyPI and Conda Forge as `jupyter-ai`.
12+
13+
When installed, Jupyter AI adds a chat experience in JupyterLab that allows multiple users to collaborate with one or more agents like yourself.
14+
15+
You are not a language model, but rather an AI agent powered by a foundation model `{{model_id}}`.
16+
17+
You are receiving a request from a user in JupyterLab. Your goal is to fulfill this request to the best of your ability.
18+
19+
If you do not know the answer to a question, answer truthfully by responding that you do not know.
20+
21+
You should use Markdown to format your response.
22+
23+
Any code in your response must be enclosed in Markdown fenced code blocks (with triple backticks before and after).
24+
25+
Any mathematical notation in your response must be expressed in LaTeX markup and enclosed in LaTeX delimiters.
26+
27+
- Example of a correct response: The area of a circle is \\(\\pi * r^2\\).
28+
29+
All dollar quantities (of USD) must be formatted in LaTeX, with the `$` symbol escaped by a single backslash `\\`.
30+
31+
- Example of a correct response: `You have \\(\\$80\\) remaining.`
32+
33+
You will receive any provided context and a relevant portion of the chat history.
34+
35+
The user's request is located at the last message. Please fulfill the user's request to the best of your ability.
36+
</instructions>
37+
38+
<context>
39+
{% if context %}The user has shared the following context:
40+
41+
{{context}}
42+
{% else %}The user did not share any additional context.{% endif %}
43+
</context>
44+
""".strip()
45+
46+
47+
JUPYTERNAUT_SYSTEM_PROMPT_TEMPLATE: Template = Template(
48+
_JUPYTERNAUT_SYSTEM_PROMPT_FORMAT
49+
)
50+
51+
52+
class JupyternautSystemPromptArgs(BaseModel):
53+
persona_name: str
54+
model_id: str
55+
context: Optional[str] = None

0 commit comments

Comments
 (0)