-
Checked other resources
Commit to Help
Example Codeimport os, json
from dotenv import load_dotenv
from langchain_openai import AzureChatOpenAI
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.runnables.history import RunnableWithMessageHistory
from app_assistant.history import SessionStore
load_dotenv(override=True)
from langchain_core.messages import AIMessage, HumanMessage
# Initialize AzureChatOpenAI LLM
llm = AzureChatOpenAI(
azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],
azure_deployment=os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT_NAME"],
openai_api_version=os.environ["AZURE_OPENAI_API_VERSION"],
temperature=0.5,
)
answer_schema = {
"title": "box",
"description": "Schema for the box.",
"type": "object",
"properties": {
"All_params_filled": {
"type": "boolean",
"description": "Indicates if all mandatory parameters are filled.",
},
"naturalLanguage": {
"type": "string",
"description": "Natural language description of the box specifications.",
},
"proc": {
"type": "object",
"description": "Description of a box by parameters as json",
"properties": {
"box": {
"type": "object",
"description": "Box details.",
"properties": {
"id": {
"type": ["integer", "null"],
"description": "ID of the box.",
},
"name": {
"type": ["string", "null"],
"description": "Name of the box.",
},
"dimensions": {
"type": "object",
"description": "Dimensions of the box.",
"properties": {
"height": {
"type": "integer",
"description": "Height of the box.",
},
"length": {
"type": "integer",
"description": "Length of the box.",
},
"weight": {
"type": "integer",
"description": "Weight of the box.",
},
"width": {
"type": "integer",
"description": "Width of the box.",
},
},
"required": ["height", "length", "weight", "width"],
},
},
}
},
"required": [
"box",
],
},
},
"required": ["All_params_filled", "naturalLanguage", "proc"],
}
examples = [
HumanMessage(
"I want to have a json description of boxes with a length of 300mm, a width of 200mm and a height of 50mm, with a weight of 3kg per box.",
name="example_user",
),
AIMessage(
name="example_assistant",
content=json.dumps(
{
"All_params_filled": True,
"naturalLanguage": "This is your box: \nLength: 300mm \nWidth: 200mm \nHeight: 50mm \nWeight: 3kg per box",
"proc": {
"box": {
"id": None,
"name": None,
"dimensions": {
"height": 50,
"length": 300,
"weight": 3,
"width": 200,
},
}
},
}
),
),
]
system_prompt = """
You are an bot to provide a json representation for a box given as natural language. Your output shall be in json format.
The answer shall be in json format as string. The json answer has three keys "All_params_filled","naturalLanguage" and "proc".
The naturalLanguage part shall contain the summary of the result in a user readable form.
If there are missings values for the mandatory keys, include the request for it in the naturalLanguage part.
The box as json shall be the value of "proc" key.
If there are missing values for the key fill them with null values.
If there is a value for every mandatory key, change the key "All_params_filled" in your answer to true, otherwise set to false.
Now introduce yourselve and ask the user for input.
"""
def _get_prompt_template() -> ChatPromptTemplate:
return ChatPromptTemplate.from_messages(
[
("system", system_prompt),
("placeholder", "{examples}"),
("placeholder", "{chat_history}"),
("human", "{input}"),
]
)
session_store = SessionStore()
def _get_chat_llm_with_history():
chat_llm = llm # .with_structured_output(json_schema, include_raw=True)
prompt_template: ChatPromptTemplate = _get_prompt_template()
runnable = prompt_template | chat_llm
return RunnableWithMessageHistory(
runnable=runnable,
get_session_history=session_store.get_session_history,
input_messages_key="input",
history_messages_key="chat_history",
output_messages_key=None,
)
def interact_with_llm(user_question: str, session_id: str):
chat_llm_with_history = _get_chat_llm_with_history()
config = {"configurable": {"session_id": session_id}}
return chat_llm_with_history.invoke(
{"examples": examples, "input": user_question}, config=config
)
response = interact_with_llm("100x200x300", "some-test-session-id")
print(response)
response = interact_with_llm("Weight: 10kg", "some-test-session-id")
print(response) Description
System Infolangchain-community==0.3.1 platform: Windows |
Beta Was this translation helpful? Give feedback.
Replies: 1 comment 1 reply
-
Hey @filgit! I'm here to help you out while you wait for a human maintainer. Let's tackle this issue together! I found a similar discussion that might help you with your issue. In the discussion Use RunnableWithMessageHistory with structured output, it was mentioned that Here are some suggested solutions from the discussion:
Additionally, you might need to adjust the These approaches might require some adjustments to your current setup, but they could help in resolving the issue with structured output and message history integration. |
Beta Was this translation helpful? Give feedback.
At time of writing my original post, I was not aware of how to wrap the tool response into an AI message.
I ran some tests and it's working now.