From 39ab6e0756b9eeab58098b030e5908ccfaf267d3 Mon Sep 17 00:00:00 2001 From: "David L. Qiu" Date: Wed, 15 Jan 2025 15:09:28 -0800 Subject: [PATCH 1/4] use StrOutputParser in default chat --- packages/jupyter-ai/jupyter_ai/chat_handlers/default.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/jupyter-ai/jupyter_ai/chat_handlers/default.py b/packages/jupyter-ai/jupyter_ai/chat_handlers/default.py index 266ad73ad..82b609a11 100644 --- a/packages/jupyter-ai/jupyter_ai/chat_handlers/default.py +++ b/packages/jupyter-ai/jupyter_ai/chat_handlers/default.py @@ -5,6 +5,7 @@ from jupyter_ai_magics.providers import BaseProvider from langchain_core.runnables import ConfigurableFieldSpec from langchain_core.runnables.history import RunnableWithMessageHistory +from langchain_core.output_parsers import StrOutputParser from ..context_providers import ContextProviderException, find_commands from .base import BaseChatHandler, SlashCommandRoutingType @@ -37,7 +38,7 @@ def create_llm_chain( self.llm = llm self.prompt_template = prompt_template - runnable = prompt_template | llm # type:ignore + runnable = prompt_template | llm | StrOutputParser() # type:ignore if not llm.manages_history: runnable = RunnableWithMessageHistory( runnable=runnable, # type:ignore[arg-type] From 78ba46a6989326d7a45d7d1acc94f42b7253d872 Mon Sep 17 00:00:00 2001 From: "David L. Qiu" Date: Wed, 15 Jan 2025 15:10:26 -0800 Subject: [PATCH 2/4] encourage using StrOutputParser in docs --- docs/source/developers/index.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/developers/index.md b/docs/source/developers/index.md index 46b6c5719..465b5d1ee 100644 --- a/docs/source/developers/index.md +++ b/docs/source/developers/index.md @@ -492,7 +492,7 @@ def create_llm_chain( prompt_template = FIX_PROMPT_TEMPLATE self.prompt_template = prompt_template - runnable = prompt_template | llm # type:ignore + runnable = prompt_template | llm | StrOutputParser() # type:ignore self.llm_chain = runnable ``` From b69e2f7bc529c146f7a007a66e237835d15cdfc5 Mon Sep 17 00:00:00 2001 From: "David L. Qiu" Date: Wed, 15 Jan 2025 15:29:03 -0800 Subject: [PATCH 3/4] pre-commit --- packages/jupyter-ai/jupyter_ai/chat_handlers/default.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/jupyter-ai/jupyter_ai/chat_handlers/default.py b/packages/jupyter-ai/jupyter_ai/chat_handlers/default.py index 82b609a11..852220be1 100644 --- a/packages/jupyter-ai/jupyter_ai/chat_handlers/default.py +++ b/packages/jupyter-ai/jupyter_ai/chat_handlers/default.py @@ -3,9 +3,9 @@ from jupyter_ai.models import HumanChatMessage from jupyter_ai_magics.providers import BaseProvider +from langchain_core.output_parsers import StrOutputParser from langchain_core.runnables import ConfigurableFieldSpec from langchain_core.runnables.history import RunnableWithMessageHistory -from langchain_core.output_parsers import StrOutputParser from ..context_providers import ContextProviderException, find_commands from .base import BaseChatHandler, SlashCommandRoutingType From c6cfe8f305704f5147d332d52581031c0f74da62 Mon Sep 17 00:00:00 2001 From: "David L. Qiu" Date: Wed, 15 Jan 2025 15:30:01 -0800 Subject: [PATCH 4/4] use StrOutputParser in /fix --- packages/jupyter-ai/jupyter_ai/chat_handlers/fix.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/jupyter-ai/jupyter_ai/chat_handlers/fix.py b/packages/jupyter-ai/jupyter_ai/chat_handlers/fix.py index 390b93cf6..27ec4d024 100644 --- a/packages/jupyter-ai/jupyter_ai/chat_handlers/fix.py +++ b/packages/jupyter-ai/jupyter_ai/chat_handlers/fix.py @@ -3,6 +3,7 @@ from jupyter_ai.models import CellWithErrorSelection, HumanChatMessage from jupyter_ai_magics.providers import BaseProvider from langchain.prompts import PromptTemplate +from langchain_core.output_parsers import StrOutputParser from .base import BaseChatHandler, SlashCommandRoutingType @@ -76,7 +77,7 @@ def create_llm_chain( self.llm = llm prompt_template = FIX_PROMPT_TEMPLATE - runnable = prompt_template | llm # type:ignore + runnable = prompt_template | llm | StrOutputParser() # type:ignore self.llm_chain = runnable async def process_message(self, message: HumanChatMessage):