Skip to content

Commit 34db378

Browse files
authored
Enhance langgraph integration to preserve metadata (#1878)
# Enhance langgraph integration to preserve AI metadata ## Description This PR updates the `langgraph.py` integration to ensure that metadata are preserved. This enhancement is crucial for multi-agent scenarios where identifying the source AI is important for evaluation. ## Changes - Updated `langgraph.py` to ensure AI names and metadata are preserved. ## Motivation and Context In the current implementation, metadata such as the name assigned to an AI is not saved. In the era of multi-agent systems, it is essential to have information about which AI made a particular statement for accurate evaluation. This update addresses this issue by preserving the necessary metadata. ### Example Code and Output ```python import json from typing import List, Union from langchain_core.messages import AIMessage, HumanMessage, SystemMessage, ToolMessage import ragas.messages as r from ragas.integrations.langgraph import convert_message_with_metadata def test_convert_message_with_metadata(): from langchain_core.messages import HumanMessage, AIMessage human_message = HumanMessage(content="Hello", name="me", additional_kwargs={"key1": "value1"}) ai_message = AIMessage(content="Hi", name="ai_1", additional_kwargs={"tool_calls": [{"function": {"name": "tool1", "arguments": '{"arg1": "val1"}'}}]}) converted_messages = convert_message_with_metadata([human_message, ai_message]) for msg in converted_messages: print(f"Content: {msg.content}, Metadata: {msg.metadata}") if __name__ == "__main__": test_convert_message_with_metadata() ``` ``` Output Content: Hello, Metadata: {'additional_kwargs': {'key1': 'value1'}, 'response_metadata': {}, 'type': 'human', 'name': 'me', 'id': None, 'example': False} Content: Hi, Metadata: {'additional_kwargs': {'tool_calls': [{'function': {'name': 'tool1', 'arguments': '{"arg1": "val1"}'}}]}, 'response_metadata': {}, 'type': 'ai', 'name': 'ai_1', 'id': None, 'example': False, 'tool_calls': [{'name': 'tool1', 'args': {'arg1': 'val1'}, 'id': None, 'type': 'tool_call'}], 'invalid_tool_calls': [], 'usage_metadata': None} ```
1 parent 0626b5d commit 34db378

File tree

1 file changed

+43
-18
lines changed

1 file changed

+43
-18
lines changed

src/ragas/integrations/langgraph.py

Lines changed: 43 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -7,20 +7,22 @@
77

88

99
def convert_to_ragas_messages(
10-
messages: List[Union[HumanMessage, SystemMessage, AIMessage, ToolMessage]]
10+
messages: List[Union[HumanMessage, SystemMessage, AIMessage, ToolMessage]], metadata: bool = False
1111
) -> List[Union[r.HumanMessage, r.AIMessage, r.ToolMessage]]:
1212
"""
13-
Convert LangChain messages into Ragas messages for agent evaluation.
13+
Convert LangChain messages into Ragas messages with metadata for agent evaluation.
1414
1515
Parameters
1616
----------
1717
messages : List[Union[HumanMessage, SystemMessage, AIMessage, ToolMessage]]
1818
List of LangChain message objects to be converted.
19+
metadata : bool, optional (default=False)
20+
Whether to include metadata in the converted messages.
1921
2022
Returns
2123
-------
2224
List[Union[r.HumanMessage, r.AIMessage, r.ToolMessage]]
23-
List of corresponding Ragas message objects.
25+
List of corresponding Ragas message objects with metadata.
2426
2527
Raises
2628
------
@@ -42,14 +44,30 @@ def _validate_string_content(message, message_type: str) -> str:
4244
)
4345
return message.content
4446

45-
MESSAGE_TYPE_MAP = {
46-
HumanMessage: lambda m: r.HumanMessage(
47-
content=_validate_string_content(m, "HumanMessage")
48-
),
49-
ToolMessage: lambda m: r.ToolMessage(
50-
content=_validate_string_content(m, "ToolMessage")
51-
),
52-
}
47+
def _extract_metadata(message) -> dict:
48+
49+
return {k: v for k, v in message.__dict__.items() if k != "content"}
50+
51+
if metadata:
52+
MESSAGE_TYPE_MAP = {
53+
HumanMessage: lambda m: r.HumanMessage(
54+
content=_validate_string_content(m, "HumanMessage"),
55+
metadata=_extract_metadata(m)
56+
),
57+
ToolMessage: lambda m: r.ToolMessage(
58+
content=_validate_string_content(m, "ToolMessage"),
59+
metadata=_extract_metadata(m)
60+
),
61+
}
62+
else:
63+
MESSAGE_TYPE_MAP = {
64+
HumanMessage: lambda m: r.HumanMessage(
65+
content=_validate_string_content(m, "HumanMessage")
66+
),
67+
ToolMessage: lambda m: r.ToolMessage(
68+
content=_validate_string_content(m, "ToolMessage")
69+
),
70+
}
5371

5472
def _extract_tool_calls(message: AIMessage) -> List[r.ToolCall]:
5573
tool_calls = message.additional_kwargs.get("tool_calls", [])
@@ -61,18 +79,25 @@ def _extract_tool_calls(message: AIMessage) -> List[r.ToolCall]:
6179
for tool_call in tool_calls
6280
]
6381

64-
def _convert_ai_message(message: AIMessage) -> r.AIMessage:
82+
def _convert_ai_message(message: AIMessage, metadata: bool) -> r.AIMessage:
6583
tool_calls = _extract_tool_calls(message) if message.additional_kwargs else None
66-
return r.AIMessage(
67-
content=_validate_string_content(message, "AIMessage"),
68-
tool_calls=tool_calls,
69-
)
84+
if metadata:
85+
return r.AIMessage(
86+
content=_validate_string_content(message, "AIMessage"),
87+
tool_calls=tool_calls,
88+
metadata=_extract_metadata(message)
89+
)
90+
else:
91+
return r.AIMessage(
92+
content=_validate_string_content(message, "AIMessage"),
93+
tool_calls=tool_calls
94+
)
7095

71-
def _convert_message(message):
96+
def _convert_message(message, metadata: bool = False):
7297
if isinstance(message, SystemMessage):
7398
return None # Skip SystemMessages
7499
if isinstance(message, AIMessage):
75-
return _convert_ai_message(message)
100+
return _convert_ai_message(message, metadata)
76101
converter = MESSAGE_TYPE_MAP.get(type(message))
77102
if converter is None:
78103
raise ValueError(f"Unsupported message type: {type(message).__name__}")

0 commit comments

Comments
 (0)