|
4 | 4 |
|
5 | 5 | import logging
|
6 | 6 | import os
|
7 |
| -from collections.abc import AsyncIterator, Awaitable, Iterator |
8 |
| -from typing import Any, Callable, Optional, TypeVar, Union |
| 7 | +from collections.abc import AsyncIterator, Awaitable, Callable, Iterator |
| 8 | +from typing import Any, Literal, Optional, TypeVar, Union |
9 | 9 |
|
10 | 10 | import openai
|
11 | 11 | from langchain_core.language_models import LanguageModelInput
|
|
15 | 15 | from langchain_core.utils import from_env, secret_from_env
|
16 | 16 | from langchain_core.utils.pydantic import is_basemodel_subclass
|
17 | 17 | from pydantic import BaseModel, Field, SecretStr, model_validator
|
18 |
| -from typing_extensions import Literal, Self |
| 18 | +from typing_extensions import Self |
19 | 19 |
|
20 | 20 | from langchain_openai.chat_models.base import BaseChatOpenAI
|
21 | 21 |
|
@@ -758,7 +758,7 @@ def _create_chat_result(
|
758 | 758 | "prompt_filter_results"
|
759 | 759 | ]
|
760 | 760 | for chat_gen, response_choice in zip(
|
761 |
| - chat_result.generations, response["choices"] |
| 761 | + chat_result.generations, response["choices"], strict=False |
762 | 762 | ):
|
763 | 763 | chat_gen.generation_info = chat_gen.generation_info or {}
|
764 | 764 | chat_gen.generation_info["content_filter_results"] = response_choice.get(
|
|
0 commit comments