Skip to content

Commit b100328

Browse files
committed
Add test coverage for Bedrock Titan V2 encoding_format parameter
- Test encoding_format='float' parameter mapping and response handling - Test encoding_format='base64' parameter mapping to binary format - Verify parameter transformation and response processing - Mock AWS API responses for both float and binary formats - Ensure OpenAI compatibility with new encoding_format support
1 parent 72b492c commit b100328

File tree

1 file changed

+85
-3
lines changed

1 file changed

+85
-3
lines changed

tests/test_litellm/llms/bedrock/embed/test_bedrock_embedding.py

Lines changed: 85 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,12 @@
22
import os
33
import sys
44
from unittest.mock import Mock, patch
5+
56
import pytest
67

78
sys.path.insert(0, os.path.abspath("../../../../..")) # Adds the parent directory to the system path
89
import litellm
9-
from litellm.llms.custom_httpx.http_handler import HTTPHandler, AsyncHTTPHandler
10+
from litellm.llms.custom_httpx.http_handler import AsyncHTTPHandler, HTTPHandler
1011

1112
# Mock responses for different embedding models
1213
titan_embedding_response = {
@@ -137,7 +138,7 @@ def test_bedrock_embedding_with_sigv4():
137138
"""Test embedding falls back to SigV4 auth when no bearer token is provided"""
138139
litellm.set_verbose = True
139140
model = "bedrock/amazon.titan-embed-text-v1"
140-
141+
141142
with patch("litellm.llms.bedrock.embed.embedding.BedrockEmbedding.embeddings") as mock_bedrock_embed:
142143
mock_embedding_response = litellm.EmbeddingResponse()
143144
mock_embedding_response.data = [{"embedding": [0.1, 0.2, 0.3]}]
@@ -150,4 +151,85 @@ def test_bedrock_embedding_with_sigv4():
150151
)
151152

152153
assert isinstance(response, litellm.EmbeddingResponse)
153-
mock_bedrock_embed.assert_called_once()
154+
mock_bedrock_embed.assert_called_once()
155+
156+
157+
def test_bedrock_titan_v2_encoding_format_float():
158+
"""Test amazon.titan-embed-text-v2:0 with encoding_format=float parameter"""
159+
litellm.set_verbose = True
160+
client = HTTPHandler()
161+
test_api_key = "test-bearer-token-12345"
162+
model = "bedrock/amazon.titan-embed-text-v2:0"
163+
164+
# Mock response with embeddingsByType for binary format (addressing issue #14680)
165+
titan_v2_response = {
166+
"embedding": [0.1, 0.2, 0.3],
167+
"inputTextTokenCount": 10
168+
}
169+
170+
with patch.object(client, "post") as mock_post:
171+
mock_response = Mock()
172+
mock_response.status_code = 200
173+
mock_response.text = json.dumps(titan_v2_response)
174+
mock_response.json = lambda: json.loads(mock_response.text)
175+
mock_post.return_value = mock_response
176+
177+
response = litellm.embedding(
178+
model=model,
179+
input=test_input,
180+
encoding_format="float", # This should work but currently throws UnsupportedParamsError
181+
client=client,
182+
aws_region_name="us-east-1",
183+
aws_bedrock_runtime_endpoint="https://bedrock-runtime.us-east-1.amazonaws.com",
184+
api_key=test_api_key
185+
)
186+
187+
assert isinstance(response, litellm.EmbeddingResponse)
188+
assert isinstance(response.data[0]['embedding'], list)
189+
assert len(response.data[0]['embedding']) == 3
190+
191+
# Verify that the request contains embeddingTypes: ["float"] instead of encoding_format
192+
request_body = json.loads(mock_post.call_args.kwargs.get("data", "{}"))
193+
assert "embeddingTypes" in request_body
194+
assert request_body["embeddingTypes"] == ["float"]
195+
assert "encoding_format" not in request_body
196+
197+
198+
def test_bedrock_titan_v2_encoding_format_base64():
199+
"""Test amazon.titan-embed-text-v2:0 with encoding_format=base64 parameter (maps to binary)"""
200+
litellm.set_verbose = True
201+
client = HTTPHandler()
202+
test_api_key = "test-bearer-token-12345"
203+
model = "bedrock/amazon.titan-embed-text-v2:0"
204+
205+
# Mock response with embeddingsByType for binary format
206+
titan_v2_binary_response = {
207+
"embeddingsByType": {
208+
"binary": "YmluYXJ5X2VtYmVkZGluZ19kYXRh" # base64 encoded binary data
209+
},
210+
"inputTextTokenCount": 10
211+
}
212+
213+
with patch.object(client, "post") as mock_post:
214+
mock_response = Mock()
215+
mock_response.status_code = 200
216+
mock_response.text = json.dumps(titan_v2_binary_response)
217+
mock_response.json = lambda: json.loads(mock_response.text)
218+
mock_post.return_value = mock_response
219+
220+
response = litellm.embedding(
221+
model=model,
222+
input=test_input,
223+
encoding_format="base64", # This should map to embeddingTypes: ["binary"]
224+
client=client,
225+
aws_region_name="us-east-1",
226+
aws_bedrock_runtime_endpoint="https://bedrock-runtime.us-east-1.amazonaws.com",
227+
api_key=test_api_key
228+
)
229+
230+
assert isinstance(response, litellm.EmbeddingResponse)
231+
232+
# Verify that the request contains embeddingTypes: ["binary"] for base64 encoding
233+
request_body = json.loads(mock_post.call_args.kwargs.get("data", "{}"))
234+
assert "embeddingTypes" in request_body
235+
assert request_body["embeddingTypes"] == ["binary"]

0 commit comments

Comments
 (0)