Skip to content

Commit 15f2678

Browse files
Enable native structured output support for Ollama provider (#4536)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
1 parent 7db7152 commit 15f2678

File tree

2 files changed

+5
-1
lines changed

2 files changed

+5
-1
lines changed

pydantic_ai_slim/pydantic_ai/providers/ollama.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,8 @@ def model_profile(model_name: str) -> ModelProfile | None:
6666
return OpenAIModelProfile(
6767
json_schema_transformer=OpenAIJsonSchemaTransformer,
6868
openai_chat_thinking_field='reasoning',
69+
supports_json_schema_output=True,
70+
supports_json_object_output=True,
6971
).update(profile)
7072

7173
def __init__(

tests/providers/test_ollama.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -105,7 +105,7 @@ def test_ollama_provider_model_profile(mocker: MockerFixture):
105105
assert qwen_profile is not None
106106
assert qwen_profile.json_schema_transformer == InlineDefsJsonSchemaTransformer
107107
assert qwen_profile.ignore_streamed_leading_whitespace is True
108-
assert qwen_profile.supports_json_schema_output is False
108+
assert qwen_profile.supports_json_schema_output is True
109109

110110
qwen_profile = provider.model_profile('qwen3.5')
111111
qwen_model_profile_mock.assert_called_with('qwen3.5')
@@ -143,3 +143,5 @@ def test_ollama_provider_model_profile(mocker: MockerFixture):
143143
unknown_profile = provider.model_profile('unknown-model')
144144
assert unknown_profile is not None
145145
assert unknown_profile.json_schema_transformer == OpenAIJsonSchemaTransformer
146+
assert unknown_profile.supports_json_schema_output is True
147+
assert unknown_profile.supports_json_object_output is True

0 commit comments

Comments
 (0)