Skip to content

Commit b610859

Browse files
authored
openai[patch]: support Responses streaming in AzureChatOpenAI (#31641)
Resolves #31303, #31624
1 parent bc1b5ff commit b610859

File tree

2 files changed

+53
-2
lines changed

2 files changed

+53
-2
lines changed

libs/partners/openai/langchain_openai/chat_models/azure.py

Lines changed: 20 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,14 +4,14 @@
44

55
import logging
66
import os
7-
from collections.abc import Awaitable
7+
from collections.abc import AsyncIterator, Awaitable, Iterator
88
from typing import Any, Callable, Optional, TypedDict, TypeVar, Union
99

1010
import openai
1111
from langchain_core.language_models import LanguageModelInput
1212
from langchain_core.language_models.chat_models import LangSmithParams
1313
from langchain_core.messages import BaseMessage
14-
from langchain_core.outputs import ChatResult
14+
from langchain_core.outputs import ChatGenerationChunk, ChatResult
1515
from langchain_core.runnables import Runnable
1616
from langchain_core.utils import from_env, secret_from_env
1717
from langchain_core.utils.pydantic import is_basemodel_subclass
@@ -736,6 +736,24 @@ def _create_chat_result(
736736

737737
return chat_result
738738

739+
def _stream(self, *args: Any, **kwargs: Any) -> Iterator[ChatGenerationChunk]:
740+
"""Route to Chat Completions or Responses API."""
741+
if self._use_responses_api({**kwargs, **self.model_kwargs}):
742+
return super()._stream_responses(*args, **kwargs)
743+
else:
744+
return super()._stream(*args, **kwargs)
745+
746+
async def _astream(
747+
self, *args: Any, **kwargs: Any
748+
) -> AsyncIterator[ChatGenerationChunk]:
749+
"""Route to Chat Completions or Responses API."""
750+
if self._use_responses_api({**kwargs, **self.model_kwargs}):
751+
async for chunk in super()._astream_responses(*args, **kwargs):
752+
yield chunk
753+
else:
754+
async for chunk in super()._astream(*args, **kwargs):
755+
yield chunk
756+
739757
def with_structured_output(
740758
self,
741759
schema: Optional[_DictOrPydanticClass] = None,

libs/partners/openai/tests/integration_tests/chat_models/test_azure_standard.py

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22

33
import os
44

5+
import pytest
56
from langchain_core.language_models import BaseChatModel
67
from langchain_tests.integration_tests import ChatModelIntegrationTests
78

@@ -39,6 +40,38 @@ def supports_json_mode(self) -> bool:
3940
return True
4041

4142

43+
class TestAzureOpenAIResponses(ChatModelIntegrationTests):
44+
@property
45+
def chat_model_class(self) -> type[BaseChatModel]:
46+
return AzureChatOpenAI
47+
48+
@property
49+
def chat_model_params(self) -> dict:
50+
return {
51+
"deployment_name": os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT_NAME"],
52+
"model": "gpt-4o-mini",
53+
"openai_api_version": OPENAI_API_VERSION,
54+
"azure_endpoint": OPENAI_API_BASE,
55+
"use_responses_api": True,
56+
}
57+
58+
@property
59+
def supports_image_inputs(self) -> bool:
60+
return True
61+
62+
@property
63+
def supports_image_urls(self) -> bool:
64+
return True
65+
66+
@property
67+
def supports_json_mode(self) -> bool:
68+
return True
69+
70+
@pytest.mark.xfail(reason="Unsupported.")
71+
def test_stop_sequence(self, model: BaseChatModel) -> None:
72+
super().test_stop_sequence(model)
73+
74+
4275
class TestAzureOpenAIStandardLegacy(ChatModelIntegrationTests):
4376
"""Test a legacy model."""
4477

0 commit comments

Comments
 (0)