Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 27 additions & 0 deletions libs/aws/langchain_aws/chat_models/bedrock.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import warnings
from collections import defaultdict
from operator import itemgetter
from pathlib import Path
from typing import (
Any,
Callable,
Expand All @@ -26,6 +27,10 @@
LanguageModelInput,
)
from langchain_core.language_models.chat_models import generate_from_stream
from langchain_core.language_models.profile import ModelProfile, ModelProfileRegistry
from langchain_core.language_models.profile._loader_utils import (
load_profiles_from_data_dir,
)
from langchain_core.messages import (
AIMessage,
AIMessageChunk,
Expand All @@ -49,6 +54,7 @@
from langchain_core.utils.pydantic import TypeBaseModel, is_basemodel_subclass
from langchain_core.utils.utils import _build_model_kwargs
from pydantic import BaseModel, ConfigDict, Field, model_validator
from typing_extensions import Self

from langchain_aws.chat_models._compat import _convert_from_v1_to_anthropic
from langchain_aws.chat_models.bedrock_converse import ChatBedrockConverse
Expand Down Expand Up @@ -77,6 +83,19 @@
logger = logging.getLogger(__name__)


_MODEL_PROFILES = cast(
"ModelProfileRegistry",
load_profiles_from_data_dir(
Path(__file__).parent.parent / "data", "amazon-bedrock"
),
)


def _get_default_model_profile(model_name: str) -> ModelProfile:
default = _MODEL_PROFILES.get(model_name) or {}
return default.copy()


def _convert_one_message_to_text_llama(message: BaseMessage) -> str:
if isinstance(message, ChatMessage):
message_text = f"\n\n{message.role.capitalize()}: {message.content}"
Expand Down Expand Up @@ -844,6 +863,14 @@ def build_extra(cls, values: dict[str, Any]) -> Any:
}
return values

@model_validator(mode="after")
def _set_model_profile(self) -> Self:
"""Set model profile if not overridden."""
if self.profile is None:
model_id = re.sub(r"^[A-Za-z]{2}\.", "", self.model_id)
self.profile = _get_default_model_profile(model_id)
return self

@property
def lc_attributes(self) -> Dict[str, Any]:
attributes: Dict[str, Any] = {}
Expand Down
28 changes: 28 additions & 0 deletions libs/aws/langchain_aws/chat_models/bedrock_converse.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import re
import warnings
from operator import itemgetter
from pathlib import Path
from typing import (
Any,
Callable,
Expand All @@ -26,6 +27,10 @@
from langchain_core.exceptions import OutputParserException
from langchain_core.language_models import BaseChatModel, LanguageModelInput
from langchain_core.language_models.base import LangSmithParams
from langchain_core.language_models.profile import ModelProfile, ModelProfileRegistry
from langchain_core.language_models.profile._loader_utils import (
load_profiles_from_data_dir,
)
from langchain_core.messages import (
AIMessage,
BaseMessage,
Expand Down Expand Up @@ -66,6 +71,21 @@
)

logger = logging.getLogger(__name__)


_MODEL_PROFILES = cast(
"ModelProfileRegistry",
load_profiles_from_data_dir(
Path(__file__).parent.parent / "data", "amazon-bedrock"
),
)


def _get_default_model_profile(model_name: str) -> ModelProfile:
default = _MODEL_PROFILES.get(model_name) or {}
return default.copy()


_BM = TypeVar("_BM", bound=BaseModel)

EMPTY_CONTENT = "."
Expand Down Expand Up @@ -837,6 +857,14 @@ def validate_environment(self) -> Self:

return self

@model_validator(mode="after")
def _set_model_profile(self) -> Self:
"""Set model profile if not overridden."""
if self.profile is None:
model_id = re.sub(r"^[A-Za-z]{2}\.", "", self.model_id)
self.profile = _get_default_model_profile(model_id)
return self

def _get_base_model(self) -> str:
"""Return base model id, stripping any regional prefix."""

Expand Down
Loading