Skip to content

Commit e5bcd27

Browse files
dmytrostrukalliscode
authored andcommitted
Create/Get Agent API - fixes and example improvements (microsoft#3246)
1 parent 8147e2e commit e5bcd27

File tree

9 files changed

+147
-37
lines changed

9 files changed

+147
-37
lines changed

python/packages/azure-ai/agent_framework_azure_ai/_project_provider.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -358,6 +358,7 @@ def _to_chat_agent_from_details(
358358
agent_name=details.name,
359359
agent_version=details.version,
360360
agent_description=details.description,
361+
model_deployment_name=details.definition.model,
361362
)
362363

363364
# Merge tools: hosted tools from definition + user-provided function tools

python/packages/azure-ai/agent_framework_azure_ai/_shared.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -501,6 +501,7 @@ def create_text_format_config(
501501
return ResponseTextFormatConfigurationJsonSchema(
502502
name=response_format.__name__,
503503
schema=schema,
504+
strict=True,
504505
)
505506

506507
if isinstance(response_format, Mapping):

python/packages/azure-ai/tests/test_provider.py

Lines changed: 32 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -317,11 +317,21 @@ def test_provider_as_agent(mock_project_client: MagicMock) -> None:
317317
mock_agent_version.definition.top_p = 0.9
318318
mock_agent_version.definition.tools = []
319319

320-
agent = provider.as_agent(mock_agent_version)
320+
with patch("agent_framework_azure_ai._project_provider.AzureAIClient") as mock_azure_ai_client:
321+
agent = provider.as_agent(mock_agent_version)
321322

322-
assert isinstance(agent, ChatAgent)
323-
assert agent.name == "test-agent"
324-
assert agent.description == "Test Agent"
323+
assert isinstance(agent, ChatAgent)
324+
assert agent.name == "test-agent"
325+
assert agent.description == "Test Agent"
326+
327+
# Verify AzureAIClient was called with correct parameters
328+
mock_azure_ai_client.assert_called_once()
329+
call_kwargs = mock_azure_ai_client.call_args[1]
330+
assert call_kwargs["project_client"] is mock_project_client
331+
assert call_kwargs["agent_name"] == "test-agent"
332+
assert call_kwargs["agent_version"] == "1.0"
333+
assert call_kwargs["agent_description"] == "Test Agent"
334+
assert call_kwargs["model_deployment_name"] == "gpt-4"
325335

326336

327337
async def test_provider_context_manager(mock_project_client: MagicMock) -> None:
@@ -370,6 +380,24 @@ async def test_provider_close_method(mock_project_client: MagicMock) -> None:
370380
mock_client.close.assert_called_once()
371381

372382

383+
def test_create_text_format_config_sets_strict_for_pydantic_models() -> None:
384+
"""Test that create_text_format_config sets strict=True for Pydantic models."""
385+
from pydantic import BaseModel
386+
387+
from agent_framework_azure_ai._shared import create_text_format_config
388+
389+
class TestSchema(BaseModel):
390+
subject: str
391+
summary: str
392+
393+
result = create_text_format_config(TestSchema)
394+
395+
# Verify strict=True is set
396+
assert result["strict"] is True
397+
assert result["name"] == "TestSchema"
398+
assert "schema" in result
399+
400+
373401
@pytest.mark.flaky
374402
@skip_if_azure_ai_integration_tests_disabled
375403
async def test_provider_create_and_get_agent_integration() -> None:

python/packages/core/agent_framework/openai/_assistants_client.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -660,6 +660,7 @@ def _prepare_options(
660660
"json_schema": {
661661
"name": response_format.__name__,
662662
"schema": response_format.model_json_schema(),
663+
"strict": True,
663664
},
664665
}
665666

python/packages/core/tests/openai/test_openai_assistants_client.py

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -784,6 +784,27 @@ def test_prepare_options_with_mapping_tool(mock_async_openai: MagicMock) -> None
784784
assert run_options["tool_choice"] == "auto"
785785

786786

787+
def test_prepare_options_with_pydantic_response_format(mock_async_openai: MagicMock) -> None:
788+
"""Test _prepare_options sets strict=True for Pydantic response_format."""
789+
from pydantic import BaseModel, ConfigDict
790+
791+
class TestResponse(BaseModel):
792+
name: str
793+
value: int
794+
model_config = ConfigDict(extra="forbid")
795+
796+
chat_client = create_test_openai_assistants_client(mock_async_openai)
797+
messages = [ChatMessage(role=Role.USER, text="Test")]
798+
options = {"response_format": TestResponse}
799+
800+
run_options, _ = chat_client._prepare_options(messages, options) # type: ignore
801+
802+
assert "response_format" in run_options
803+
assert run_options["response_format"]["type"] == "json_schema"
804+
assert run_options["response_format"]["json_schema"]["name"] == "TestResponse"
805+
assert run_options["response_format"]["json_schema"]["strict"] is True
806+
807+
787808
def test_prepare_options_with_system_message(mock_async_openai: MagicMock) -> None:
788809
"""Test _prepare_options with system message converted to instructions."""
789810
chat_client = create_test_openai_assistants_client(mock_async_openai)

python/samples/getting_started/agents/azure_ai/azure_ai_with_response_format.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ async def main() -> None:
3333
agent = await provider.create_agent(
3434
name="ProductMarketerAgent",
3535
instructions="Return launch briefs as structured JSON.",
36-
# Specify type to use as response
36+
# Specify Pydantic model for structured output via default_options
3737
default_options={"response_format": ReleaseBrief},
3838
)
3939

python/samples/getting_started/agents/azure_ai/azure_ai_with_runtime_json_schema.py

Lines changed: 10 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -37,17 +37,19 @@ async def main() -> None:
3737
AzureCliCredential() as credential,
3838
AzureAIProjectAgentProvider(credential=credential) as provider,
3939
):
40-
# Pass response_format at agent creation time using dict schema format
40+
# Pass response_format via default_options using dict schema format
4141
agent = await provider.create_agent(
4242
name="WeatherDigestAgent",
4343
instructions="Return sample weather digest as structured JSON.",
44-
response_format={
45-
"type": "json_schema",
46-
"json_schema": {
47-
"name": runtime_schema["title"],
48-
"strict": True,
49-
"schema": runtime_schema,
50-
},
44+
default_options={
45+
"response_format": {
46+
"type": "json_schema",
47+
"json_schema": {
48+
"name": runtime_schema["title"],
49+
"strict": True,
50+
"schema": runtime_schema,
51+
},
52+
}
5153
},
5254
)
5355

python/samples/getting_started/agents/azure_ai_agent/azure_ai_with_response_format.py

Lines changed: 42 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,10 @@
99
"""
1010
Azure AI Agent Provider Response Format Example
1111
12-
This sample demonstrates using AzureAIAgentsProvider with default_options
13-
containing response_format for structured outputs.
12+
This sample demonstrates using AzureAIAgentsProvider with response_format
13+
for structured outputs in two ways:
14+
1. Setting default response_format at agent creation time (default_options)
15+
2. Overriding response_format at runtime (options parameter in agent.run)
1416
"""
1517

1618

@@ -24,31 +26,57 @@ class WeatherInfo(BaseModel):
2426
model_config = ConfigDict(extra="forbid")
2527

2628

29+
class CityInfo(BaseModel):
30+
"""Structured city information."""
31+
32+
city_name: str
33+
population: int
34+
country: str
35+
model_config = ConfigDict(extra="forbid")
36+
37+
2738
async def main() -> None:
28-
"""Example of using default_options with response_format in AzureAIAgentsProvider."""
39+
"""Example of using response_format at creation time and runtime."""
2940

3041
async with (
3142
AzureCliCredential() as credential,
3243
AzureAIAgentsProvider(credential=credential) as provider,
3344
):
45+
# Create agent with default response_format (WeatherInfo)
3446
agent = await provider.create_agent(
35-
name="WeatherReporter",
36-
instructions="You provide weather reports in structured JSON format.",
47+
name="StructuredReporter",
48+
instructions="Return structured JSON based on the requested format.",
3749
default_options={"response_format": WeatherInfo},
3850
)
3951

40-
query = "What's the weather like in Paris today?"
41-
print(f"User: {query}")
52+
# Request 1: Uses default response_format from agent creation
53+
print("--- Request 1: Using default response_format (WeatherInfo) ---")
54+
query1 = "What's the weather like in Paris today?"
55+
print(f"User: {query1}")
56+
57+
result1 = await agent.run(query1)
58+
59+
if isinstance(result1.value, WeatherInfo):
60+
weather = result1.value
61+
print("Agent:")
62+
print(f" Location: {weather.location}")
63+
print(f" Temperature: {weather.temperature}")
64+
print(f" Conditions: {weather.conditions}")
65+
print(f" Recommendation: {weather.recommendation}")
66+
67+
# Request 2: Override response_format at runtime with CityInfo
68+
print("\n--- Request 2: Runtime override with CityInfo ---")
69+
query2 = "Tell me about Tokyo."
70+
print(f"User: {query2}")
4271

43-
result = await agent.run(query)
72+
result2 = await agent.run(query2, options={"response_format": CityInfo})
4473

45-
if isinstance(result.value, WeatherInfo):
46-
weather = result.value
74+
if isinstance(result2.value, CityInfo):
75+
city = result2.value
4776
print("Agent:")
48-
print(f"Location: {weather.location}")
49-
print(f"Temperature: {weather.temperature}")
50-
print(f"Conditions: {weather.conditions}")
51-
print(f"Recommendation: {weather.recommendation}")
77+
print(f" City: {city.city_name}")
78+
print(f" Population: {city.population}")
79+
print(f" Country: {city.country}")
5280

5381

5482
if __name__ == "__main__":

python/samples/getting_started/agents/openai/openai_assistants_with_response_format.py

Lines changed: 38 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,10 @@
1010
"""
1111
OpenAI Assistant Provider Response Format Example
1212
13-
This sample demonstrates using OpenAIAssistantProvider with default_options
14-
containing response_format for structured outputs.
13+
This sample demonstrates using OpenAIAssistantProvider with response_format
14+
for structured outputs in two ways:
15+
1. Setting default response_format at agent creation time (default_options)
16+
2. Overriding response_format at runtime (options parameter in agent.run)
1517
"""
1618

1719

@@ -25,33 +27,59 @@ class WeatherInfo(BaseModel):
2527
model_config = ConfigDict(extra="forbid")
2628

2729

30+
class CityInfo(BaseModel):
31+
"""Structured city information."""
32+
33+
city_name: str
34+
population: int
35+
country: str
36+
model_config = ConfigDict(extra="forbid")
37+
38+
2839
async def main() -> None:
29-
"""Example of using default_options with response_format in OpenAIAssistantProvider."""
40+
"""Example of using response_format at creation time and runtime."""
3041

3142
async with (
3243
AsyncOpenAI() as client,
3344
OpenAIAssistantProvider(client) as provider,
3445
):
46+
# Create agent with default response_format (WeatherInfo)
3547
agent = await provider.create_agent(
36-
name="WeatherReporter",
48+
name="StructuredReporter",
3749
model=os.environ.get("OPENAI_CHAT_MODEL_ID", "gpt-4"),
38-
instructions="You provide weather reports in structured JSON format.",
50+
instructions="Return structured JSON based on the requested format.",
3951
default_options={"response_format": WeatherInfo},
4052
)
4153

4254
try:
43-
query = "What's the weather like in Paris today?"
44-
print(f"User: {query}")
55+
# Request 1: Uses default response_format from agent creation
56+
print("--- Request 1: Using default response_format (WeatherInfo) ---")
57+
query1 = "What's the weather like in Paris today?"
58+
print(f"User: {query1}")
4559

46-
result = await agent.run(query)
60+
result1 = await agent.run(query1)
4761

48-
if isinstance(result.value, WeatherInfo):
49-
weather = result.value
62+
if isinstance(result1.value, WeatherInfo):
63+
weather = result1.value
5064
print("Agent:")
5165
print(f" Location: {weather.location}")
5266
print(f" Temperature: {weather.temperature}")
5367
print(f" Conditions: {weather.conditions}")
5468
print(f" Recommendation: {weather.recommendation}")
69+
70+
# Request 2: Override response_format at runtime with CityInfo
71+
print("\n--- Request 2: Runtime override with CityInfo ---")
72+
query2 = "Tell me about Tokyo."
73+
print(f"User: {query2}")
74+
75+
result2 = await agent.run(query2, options={"response_format": CityInfo})
76+
77+
if isinstance(result2.value, CityInfo):
78+
city = result2.value
79+
print("Agent:")
80+
print(f" City: {city.city_name}")
81+
print(f" Population: {city.population}")
82+
print(f" Country: {city.country}")
5583
finally:
5684
await client.beta.assistants.delete(agent.id)
5785

0 commit comments

Comments
 (0)