Skip to content

Commit be5612d

Browse files
committed
add azure-ai-agentserver-langgraph package
1 parent 7533353 commit be5612d

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

63 files changed

+3240
-0
lines changed
Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
# Release History
2+
3+
## 1.0.0
4+
5+
### Features Added
6+
7+
First version
Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
# Azure AI Agent Server adapter for LangGraph
2+
3+
## Install
4+
5+
In current folder, run:
6+
```bash
7+
pip install -e .
8+
```
9+
10+
## Usage
11+
12+
```python
13+
# your existing agent
14+
from my_langgraph_agent import my_awesome_agent
15+
16+
# langgraph utils
17+
from azure.ai.agentserver.langgraph import from_langgraph
18+
19+
if __name__ == "__main__":
20+
# with this simple line, your agent will be hosted on http://localhost:8088
21+
from_langgraph(my_awesome_agent).run()
22+
23+
```
24+
25+
**Note**
26+
If your langgraph agent was not using langgraph's builtin [MessageState](https://langchain-ai.github.io/langgraph/concepts/low_level/?h=messagesstate#messagesstate), you should implement your own `LanggraphStateConverter` and provide to `from_langgraph`.
27+
28+
Reference this [example](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/ai/azure-ai-agentserver-langgraph/samples/custom_state/main.py) for more details.
29+
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
# ---------------------------------------------------------
2+
# Copyright (c) Microsoft Corporation. All rights reserved.
3+
# ---------------------------------------------------------
4+
__path__ = __import__("pkgutil").extend_path(__path__, __name__)
5+
6+
from typing import TYPE_CHECKING, Optional
7+
8+
from ._version import VERSION
9+
10+
if TYPE_CHECKING: # pragma: no cover
11+
from . import models
12+
13+
14+
def from_langgraph(agent, state_converter: Optional["models.LanggraphStateConverter"] = None):
15+
from .langgraph import LangGraphAdapter
16+
17+
return LangGraphAdapter(agent, state_converter=state_converter)
18+
19+
20+
__all__ = ["from_langgraph"]
21+
__version__ = VERSION
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
# coding=utf-8
2+
# --------------------------------------------------------------------------
3+
# Copyright (c) Microsoft Corporation. All rights reserved.
4+
# Licensed under the MIT License. See License.txt in the project root for license information.
5+
# Code generated by Microsoft (R) Python Code Generator.
6+
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
7+
# --------------------------------------------------------------------------
8+
9+
VERSION = "1.0.0a1"
Lines changed: 153 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,153 @@
1+
# ---------------------------------------------------------
2+
# Copyright (c) Microsoft Corporation. All rights reserved.
3+
# ---------------------------------------------------------
4+
import os
5+
import re
6+
from typing import Optional
7+
8+
from azure.ai.agentserver.core.constants import Constants
9+
from azure.ai.agentserver.core.logger import get_logger
10+
from azure.ai.agentserver.core.server.base import FoundryCBAgent
11+
from azure.ai.agentserver.core.server.common.agent_run_context import AgentRunContext
12+
from langchain_core.runnables import RunnableConfig
13+
14+
from langgraph.graph.state import CompiledStateGraph
15+
16+
from .models import (
17+
LanggraphMessageStateConverter,
18+
LanggraphStateConverter,
19+
)
20+
from .models.utils import is_state_schema_valid
21+
22+
logger = get_logger()
23+
24+
25+
class LangGraphAdapter(FoundryCBAgent):
26+
"""
27+
Adapter for LangGraph Agent.
28+
"""
29+
30+
def __init__(self, graph: CompiledStateGraph, state_converter: Optional[LanggraphStateConverter] = None):
31+
"""
32+
Initialize the LangGraphAdapter with a CompiledStateGraph.
33+
34+
Args:
35+
graph (StateGraph): The LangGraph StateGraph to adapt.
36+
state_converter: custom state converter. Required if graph state is not MessagesState.
37+
"""
38+
super().__init__()
39+
self.graph = graph
40+
self.azure_ai_tracer = None
41+
if not state_converter:
42+
if is_state_schema_valid(self.graph.builder.state_schema):
43+
self.state_converter = LanggraphMessageStateConverter()
44+
else:
45+
raise ValueError("state_converter is required for non-MessagesState graph.")
46+
else:
47+
self.state_converter = state_converter
48+
49+
async def agent_run(self, context: AgentRunContext):
50+
input_data = self.state_converter.request_to_state(context)
51+
logger.debug(f"Converted input data: {input_data}")
52+
if not context.stream:
53+
response = await self.agent_run_non_stream(input_data, context)
54+
return response
55+
else:
56+
return self.agent_run_astream(input_data, context)
57+
58+
def init_tracing_internal(self, exporter_endpoint=None, app_insights_conn_str=None):
59+
# set env vars for langsmith
60+
os.environ["LANGSMITH_OTEL_ENABLED"] = "true"
61+
os.environ["LANGSMITH_TRACING"] = "true"
62+
os.environ["LANGSMITH_OTEL_ONLY"] = "true"
63+
if app_insights_conn_str:
64+
# setup azure ai telemetry callbacks
65+
try:
66+
from langchain_azure_ai.callbacks.tracers import AzureAIOpenTelemetryTracer
67+
68+
self.azure_ai_tracer = AzureAIOpenTelemetryTracer(
69+
connection_string=app_insights_conn_str,
70+
enable_content_recording=True,
71+
name=self.get_agent_identifier(),
72+
)
73+
logger.info("AzureAIOpenTelemetryTracer initialized successfully.")
74+
except Exception as e:
75+
logger.error(f"Failed to import AzureAIOpenTelemetryTracer, ignore: {e}")
76+
77+
def setup_otlp_exporter(self, endpoint, provider):
78+
endpoint = self.format_otlp_endpoint(endpoint)
79+
return super().setup_otlp_exporter(endpoint, provider)
80+
81+
def get_trace_attributes(self):
82+
attrs = super().get_trace_attributes()
83+
attrs["service.namespace"] = "azure.ai.agentshosting.langgraph"
84+
return attrs
85+
86+
async def agent_run_non_stream(self, input_data: dict, context: AgentRunContext):
87+
"""
88+
Run the agent with non-streaming response.
89+
90+
Args:
91+
context (AgentRunContext): The context for the agent run.
92+
93+
Returns:
94+
RunObject: The result of the agent run.
95+
"""
96+
97+
try:
98+
config = self.create_runnable_config(context)
99+
stream_mode = self.state_converter.get_stream_mode(context)
100+
result = await self.graph.ainvoke(input_data, config=config, stream_mode=stream_mode)
101+
output = self.state_converter.state_to_response(result, context)
102+
return output
103+
except Exception as e:
104+
logger.error(f"Error during agent run: {e}")
105+
raise e
106+
107+
async def agent_run_astream(self, input_data: dict, context: AgentRunContext):
108+
"""
109+
Run the agent with streaming response.
110+
111+
Args:
112+
request_body (CreateResponse): The request body to run the agent with.
113+
114+
Returns:
115+
StreamingResponse: The streaming response of the agent run.
116+
"""
117+
try:
118+
logger.info(f"Starting streaming agent run {context.response_id}")
119+
config = self.create_runnable_config(context)
120+
stream_mode = self.state_converter.get_stream_mode(context)
121+
stream = self.graph.astream(input=input_data, config=config, stream_mode=stream_mode)
122+
async for result in self.state_converter.state_to_response_stream(stream, context):
123+
yield result
124+
except Exception as e:
125+
logger.error(f"Error during streaming agent run: {e}")
126+
raise e
127+
128+
def create_runnable_config(self, context: AgentRunContext) -> RunnableConfig:
129+
"""
130+
Create a RunnableConfig from the converted request data.
131+
"""
132+
config = RunnableConfig(
133+
configurable={
134+
"thread_id": context.conversation_id,
135+
},
136+
callbacks=[self.azure_ai_tracer] if self.azure_ai_tracer else None,
137+
)
138+
return config
139+
140+
def format_otlp_endpoint(self, endpoint: str) -> str:
141+
m = re.match(r"^(https?://[^/]+)", endpoint)
142+
if m:
143+
return f"{m.group(1)}/v1/traces"
144+
return endpoint
145+
146+
def get_agent_identifier(self) -> str:
147+
agent_name = os.getenv(Constants.AGENT_NAME)
148+
if agent_name:
149+
return agent_name
150+
agent_id = os.getenv(Constants.AGENT_ID)
151+
if agent_id:
152+
return agent_id
153+
return "AgentsHosting-LangGraph"
Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
# ---------------------------------------------------------
2+
# Copyright (c) Microsoft Corporation. All rights reserved.
3+
# ---------------------------------------------------------
4+
from .langgraph_request_converter import LangGraphRequestConverter
5+
from .langgraph_response_converter import LangGraphResponseConverter
6+
from .langgraph_state_converter import LanggraphMessageStateConverter, LanggraphStateConverter
7+
from .langgraph_stream_response_converter import LangGraphStreamResponseConverter
8+
9+
__all__ = [
10+
"LangGraphRequestConverter",
11+
"LangGraphResponseConverter",
12+
"LangGraphStreamResponseConverter",
13+
"LanggraphStateConverter",
14+
"LanggraphMessageStateConverter",
15+
]
Lines changed: 130 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,130 @@
1+
# ---------------------------------------------------------
2+
# Copyright (c) Microsoft Corporation. All rights reserved.
3+
# ---------------------------------------------------------
4+
import json
5+
from typing import Dict, List
6+
7+
from azure.ai.agentserver.core.logger import get_logger
8+
from azure.ai.agentserver.core.models import CreateResponse, openai as openai_models, projects as project_models
9+
from langchain_core.messages import (
10+
AIMessage,
11+
AnyMessage,
12+
HumanMessage,
13+
SystemMessage,
14+
ToolMessage,
15+
)
16+
from langchain_core.messages.tool import ToolCall
17+
18+
logger = get_logger()
19+
20+
role_mapping = {
21+
project_models.ResponsesMessageRole.USER: HumanMessage,
22+
project_models.ResponsesMessageRole.SYSTEM: SystemMessage,
23+
project_models.ResponsesMessageRole.ASSISTANT: AIMessage,
24+
# project_models.ResponsesMessageRole.DEVELOPER: ToolMessage,
25+
}
26+
27+
item_content_type_mapping = {
28+
project_models.ItemContentType.INPUT_TEXT: "text",
29+
project_models.ItemContentType.INPUT_AUDIO: "audio",
30+
project_models.ItemContentType.INPUT_IMAGE: "image",
31+
project_models.ItemContentType.INPUT_FILE: "file",
32+
project_models.ItemContentType.OUTPUT_TEXT: "text",
33+
project_models.ItemContentType.OUTPUT_AUDIO: "audio",
34+
# project_models.ItemContentType.REFUSAL: "refusal",
35+
}
36+
37+
38+
class LangGraphRequestConverter:
39+
def __init__(self, data: CreateResponse):
40+
self.data: CreateResponse = data
41+
42+
def convert(self) -> dict:
43+
# Convert the CreateRunRequest input to a format suitable for LangGraph
44+
langgraph_input = {"messages": []}
45+
46+
instructions = self.data.get("instructions")
47+
if instructions and isinstance(instructions, str):
48+
langgraph_input["messages"].append(SystemMessage(content=instructions))
49+
50+
input = self.data.get("input")
51+
if isinstance(input, str):
52+
langgraph_input["messages"].append(HumanMessage(content=input))
53+
elif isinstance(input, List):
54+
for inner in input:
55+
message = self.convert_input(inner)
56+
langgraph_input["messages"].append(message)
57+
else:
58+
raise ValueError(f"Unsupported input type: {type(input)}, {input}")
59+
return langgraph_input
60+
61+
def convert_input(self, item: openai_models.ResponseInputItemParam) -> AnyMessage:
62+
"""
63+
Convert ResponseInputItemParam to a LangGraph message
64+
"""
65+
item_type = item.get("type", project_models.ItemType.MESSAGE)
66+
if item_type == project_models.ItemType.MESSAGE:
67+
# this is a message
68+
return self.convert_message(item)
69+
elif item_type == project_models.ItemType.FUNCTION_CALL:
70+
return self.convert_function_call(item)
71+
elif item_type == project_models.ItemType.FUNCTION_CALL_OUTPUT:
72+
return self.convert_function_call_output(item)
73+
else:
74+
raise ValueError(f"Unsupported OpenAIItemParam type: {item_type}, {item}")
75+
76+
def convert_message(self, message: dict) -> AnyMessage:
77+
"""
78+
Convert a message dict to a LangGraph message
79+
"""
80+
content = message.get("content")
81+
role = message.get("role", project_models.ResponsesMessageRole.USER)
82+
if not content:
83+
raise ValueError(f"Message missing content: {message}")
84+
if isinstance(content, str):
85+
return role_mapping[role](content=content)
86+
elif isinstance(content, list):
87+
return role_mapping[role](content=self.convert_OpenAIItemContentList(content))
88+
raise ValueError(f"Unsupported ResponseMessagesItemParam content type: {type(content)}, {content}")
89+
90+
def convert_function_call(self, item: dict) -> AnyMessage:
91+
try:
92+
item = openai_models.ResponseFunctionToolCallParam(**item)
93+
argument = item.get("arguments", None)
94+
args = json.loads(argument) if argument else {}
95+
except json.JSONDecodeError as e:
96+
raise ValueError(f"Invalid JSON in function call arguments: {argument}") from e
97+
except Exception as e:
98+
raise ValueError(f"Invalid function call item: {item}") from e
99+
return AIMessage(tool_calls=[ToolCall(id=item.get("call_id"), name=item.get("name"), args=args)], content="")
100+
101+
def convert_function_call_output(self, item: dict) -> ToolMessage:
102+
try:
103+
item = openai_models.response_input_item_param.FunctionCallOutput(**item)
104+
except Exception as e:
105+
raise ValueError(f"Invalid function call output item: {item}") from e
106+
107+
output = item.get("output", None)
108+
if isinstance(output, str):
109+
return ToolMessage(content=output, tool_call_id=item.get("call_id"))
110+
elif isinstance(output, list):
111+
return ToolMessage(content=self.convert_OpenAIItemContentList(output), tool_call_id=item.get("call_id"))
112+
raise ValueError(f"Unsupported function call output type: {type(output)}, {output}")
113+
114+
def convert_OpenAIItemContentList(self, content: List[Dict]) -> List:
115+
"""
116+
Convert ItemContent to a list format
117+
"""
118+
result = []
119+
for item in content:
120+
result.append(self.convert_OpenAIItemContent(item))
121+
return result
122+
123+
def convert_OpenAIItemContent(self, content: Dict) -> Dict:
124+
"""
125+
Convert ItemContent to a dict format
126+
"""
127+
res = content.copy()
128+
content_type = content.get("type")
129+
res["type"] = item_content_type_mapping.get(content_type, content_type)
130+
return res

0 commit comments

Comments
 (0)