Skip to content

Commit 52a56bd

Browse files
Merge pull request #14570 from timelfrink/feat/issue-14562-bedrock-converse-request-metadata
feat: Support requestMetadata in Bedrock Converse API
2 parents cf429e7 + 9096d9c commit 52a56bd

File tree

5 files changed

+825
-115
lines changed

5 files changed

+825
-115
lines changed

docs/my-website/docs/completion/provider_specific_params.md

Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -433,4 +433,54 @@ curl -X POST 'http://0.0.0.0:4000/chat/completions' \
433433
],
434434
"adapater_id": "my-special-adapter-id" # 👈 PROVIDER-SPECIFIC PARAM
435435
}'
436+
437+
## Provider-Specific Metadata Parameters
438+
439+
| Provider | Parameter | Use Case |
440+
|----------|-----------|----------|
441+
| **AWS Bedrock** | `requestMetadata` | Cost attribution, logging |
442+
| **Gemini/Vertex AI** | `labels` | Resource labeling |
443+
| **Anthropic** | `metadata` | User identification |
444+
445+
<Tabs>
446+
<TabItem value="bedrock" label="AWS Bedrock">
447+
448+
```python
449+
import litellm
450+
451+
response = litellm.completion(
452+
model="bedrock/anthropic.claude-3-5-sonnet-20240620-v1:0",
453+
messages=[{"role": "user", "content": "Hello!"}],
454+
requestMetadata={"cost_center": "engineering"}
455+
)
456+
```
457+
458+
</TabItem>
459+
<TabItem value="gemini" label="Gemini/Vertex AI">
460+
461+
```python
462+
import litellm
463+
464+
response = litellm.completion(
465+
model="vertex_ai/gemini-pro",
466+
messages=[{"role": "user", "content": "Hello!"}],
467+
labels={"environment": "production"}
468+
)
469+
```
470+
471+
</TabItem>
472+
<TabItem value="anthropic" label="Anthropic">
473+
474+
```python
475+
import litellm
476+
477+
response = litellm.completion(
478+
model="anthropic/claude-3-sonnet-20240229",
479+
messages=[{"role": "user", "content": "Hello!"}],
480+
metadata={"user_id": "user123"}
481+
)
482+
```
483+
484+
</TabItem>
485+
</Tabs>
436486
```

docs/my-website/docs/providers/bedrock.md

Lines changed: 59 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -308,6 +308,65 @@ print(response)
308308
</TabItem>
309309
</Tabs>
310310

311+
## Usage - Request Metadata
312+
313+
Attach metadata to Bedrock requests for logging and cost attribution.
314+
315+
<Tabs>
316+
<TabItem value="sdk" label="SDK">
317+
318+
```python
319+
import os
320+
from litellm import completion
321+
322+
os.environ["AWS_ACCESS_KEY_ID"] = ""
323+
os.environ["AWS_SECRET_ACCESS_KEY"] = ""
324+
os.environ["AWS_REGION_NAME"] = ""
325+
326+
response = completion(
327+
model="bedrock/anthropic.claude-3-5-sonnet-20240620-v1:0",
328+
messages=[{"role": "user", "content": "Hello, how are you?"}],
329+
requestMetadata={
330+
"cost_center": "engineering",
331+
"user_id": "user123"
332+
}
333+
)
334+
```
335+
</TabItem>
336+
<TabItem value="proxy" label="PROXY">
337+
338+
**Set on yaml**
339+
340+
```yaml
341+
model_list:
342+
- model_name: bedrock-claude-v1
343+
litellm_params:
344+
model: bedrock/anthropic.claude-3-5-sonnet-20240620-v1:0
345+
requestMetadata:
346+
cost_center: "engineering"
347+
```
348+
349+
**Set on request**
350+
351+
```python
352+
import openai
353+
client = openai.OpenAI(
354+
api_key="anything",
355+
base_url="http://0.0.0.0:4000"
356+
)
357+
358+
response = client.chat.completions.create(
359+
model="bedrock-claude-v1",
360+
messages=[{"role": "user", "content": "Hello"}],
361+
extra_body={
362+
"requestMetadata": {"cost_center": "engineering"}
363+
}
364+
)
365+
```
366+
367+
</TabItem>
368+
</Tabs>
369+
311370
## Usage - Function Calling / Tool calling
312371

313372
LiteLLM supports tool calling via Bedrock's Converse and Invoke API's.

litellm/llms/bedrock/chat/converse_transformation.py

Lines changed: 128 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -175,6 +175,77 @@ def get_config(cls):
175175
and v is not None
176176
}
177177

178+
def _validate_request_metadata(self, metadata: dict) -> None:
179+
"""
180+
Validate requestMetadata according to AWS Bedrock Converse API constraints.
181+
182+
Constraints:
183+
- Maximum of 16 items
184+
- Keys: 1-256 characters, pattern [a-zA-Z0-9\\s:_@$#=/+,-.]{1,256}
185+
- Values: 0-256 characters, pattern [a-zA-Z0-9\\s:_@$#=/+,-.]{0,256}
186+
"""
187+
import re
188+
189+
if not isinstance(metadata, dict):
190+
raise litellm.exceptions.BadRequestError(
191+
message="requestMetadata must be a dictionary",
192+
model="bedrock",
193+
llm_provider="bedrock",
194+
)
195+
196+
if len(metadata) > 16:
197+
raise litellm.exceptions.BadRequestError(
198+
message="requestMetadata can contain a maximum of 16 items",
199+
model="bedrock",
200+
llm_provider="bedrock",
201+
)
202+
203+
key_pattern = re.compile(r'^[a-zA-Z0-9\s:_@$#=/+,.-]{1,256}$')
204+
value_pattern = re.compile(r'^[a-zA-Z0-9\s:_@$#=/+,.-]{0,256}$')
205+
206+
for key, value in metadata.items():
207+
if not isinstance(key, str):
208+
raise litellm.exceptions.BadRequestError(
209+
message="requestMetadata keys must be strings",
210+
model="bedrock",
211+
llm_provider="bedrock",
212+
)
213+
214+
if not isinstance(value, str):
215+
raise litellm.exceptions.BadRequestError(
216+
message="requestMetadata values must be strings",
217+
model="bedrock",
218+
llm_provider="bedrock",
219+
)
220+
221+
if len(key) == 0 or len(key) > 256:
222+
raise litellm.exceptions.BadRequestError(
223+
message="requestMetadata key length must be 1-256 characters",
224+
model="bedrock",
225+
llm_provider="bedrock",
226+
)
227+
228+
if len(value) > 256:
229+
raise litellm.exceptions.BadRequestError(
230+
message="requestMetadata value length must be 0-256 characters",
231+
model="bedrock",
232+
llm_provider="bedrock",
233+
)
234+
235+
if not key_pattern.match(key):
236+
raise litellm.exceptions.BadRequestError(
237+
message=f"requestMetadata key '{key}' contains invalid characters. Allowed: [a-zA-Z0-9\\s:_@$#=/+,.-]",
238+
model="bedrock",
239+
llm_provider="bedrock",
240+
)
241+
242+
if not value_pattern.match(value):
243+
raise litellm.exceptions.BadRequestError(
244+
message=f"requestMetadata value '{value}' contains invalid characters. Allowed: [a-zA-Z0-9\\s:_@$#=/+,.-]",
245+
model="bedrock",
246+
llm_provider="bedrock",
247+
)
248+
178249
def get_supported_openai_params(self, model: str) -> List[str]:
179250
from litellm.utils import supports_function_calling
180251

@@ -188,6 +259,7 @@ def get_supported_openai_params(self, model: str) -> List[str]:
188259
"top_p",
189260
"extra_headers",
190261
"response_format",
262+
"requestMetadata",
191263
]
192264

193265
if (
@@ -497,6 +569,10 @@ def map_openai_params(
497569
optional_params["thinking"] = AnthropicConfig._map_reasoning_effort(
498570
value
499571
)
572+
if param == "requestMetadata":
573+
if value is not None and isinstance(value, dict):
574+
self._validate_request_metadata(value) # type: ignore
575+
optional_params["requestMetadata"] = value
500576

501577
# Only update thinking tokens for non-GPT-OSS models
502578
if "gpt-oss" not in model:
@@ -686,34 +762,8 @@ def _handle_top_k_value(self, model: str, inference_params: dict) -> dict:
686762

687763
return {}
688764

689-
def _transform_request_helper(
690-
self,
691-
model: str,
692-
system_content_blocks: List[SystemContentBlock],
693-
optional_params: dict,
694-
messages: Optional[List[AllMessageValues]] = None,
695-
headers: Optional[dict] = None,
696-
) -> CommonRequestObject:
697-
## VALIDATE REQUEST
698-
"""
699-
Bedrock doesn't support tool calling without `tools=` param specified.
700-
"""
701-
if (
702-
"tools" not in optional_params
703-
and messages is not None
704-
and has_tool_call_blocks(messages)
705-
):
706-
if litellm.modify_params:
707-
optional_params["tools"] = add_dummy_tool(
708-
custom_llm_provider="bedrock_converse"
709-
)
710-
else:
711-
raise litellm.UnsupportedParamsError(
712-
message="Bedrock doesn't support tool calling without `tools=` param specified. Pass `tools=` param OR set `litellm.modify_params = True` // `litellm_settings::modify_params: True` to add dummy tool to the request.",
713-
model="",
714-
llm_provider="bedrock",
715-
)
716-
765+
def _prepare_request_params(self, optional_params: dict, model: str) -> tuple[dict, dict, dict]:
766+
"""Prepare and separate request parameters."""
717767
inference_params = copy.deepcopy(optional_params)
718768
supported_converse_params = list(
719769
AmazonConverseConfig.__annotations__.keys()
@@ -727,6 +777,11 @@ def _transform_request_helper(
727777
)
728778
inference_params.pop("json_mode", None) # used for handling json_schema
729779

780+
# Extract requestMetadata before processing other parameters
781+
request_metadata = inference_params.pop("requestMetadata", None)
782+
if request_metadata is not None:
783+
self._validate_request_metadata(request_metadata)
784+
730785
# keep supported params in 'inference_params', and set all model-specific params in 'additional_request_params'
731786
additional_request_params = {
732787
k: v for k, v in inference_params.items() if k not in total_supported_params
@@ -740,9 +795,10 @@ def _transform_request_helper(
740795
self._handle_top_k_value(model, inference_params)
741796
)
742797

743-
original_tools = inference_params.pop("tools", [])
798+
return inference_params, additional_request_params, request_metadata
744799

745-
# Initialize bedrock_tools
800+
def _process_tools_and_beta(self, original_tools: list, model: str, headers: Optional[dict], additional_request_params: dict) -> tuple[List[ToolBlock], list]:
801+
"""Process tools and collect anthropic_beta values."""
746802
bedrock_tools: List[ToolBlock] = []
747803

748804
# Collect anthropic_beta values from user headers
@@ -784,6 +840,44 @@ def _transform_request_helper(
784840
seen.add(beta)
785841
additional_request_params["anthropic_beta"] = unique_betas
786842

843+
return bedrock_tools, anthropic_beta_list
844+
845+
def _transform_request_helper(
846+
self,
847+
model: str,
848+
system_content_blocks: List[SystemContentBlock],
849+
optional_params: dict,
850+
messages: Optional[List[AllMessageValues]] = None,
851+
headers: Optional[dict] = None,
852+
) -> CommonRequestObject:
853+
## VALIDATE REQUEST
854+
"""
855+
Bedrock doesn't support tool calling without `tools=` param specified.
856+
"""
857+
if (
858+
"tools" not in optional_params
859+
and messages is not None
860+
and has_tool_call_blocks(messages)
861+
):
862+
if litellm.modify_params:
863+
optional_params["tools"] = add_dummy_tool(
864+
custom_llm_provider="bedrock_converse"
865+
)
866+
else:
867+
raise litellm.UnsupportedParamsError(
868+
message="Bedrock doesn't support tool calling without `tools=` param specified. Pass `tools=` param OR set `litellm.modify_params = True` // `litellm_settings::modify_params: True` to add dummy tool to the request.",
869+
model="",
870+
llm_provider="bedrock",
871+
)
872+
873+
# Prepare and separate parameters
874+
inference_params, additional_request_params, request_metadata = self._prepare_request_params(optional_params, model)
875+
876+
original_tools = inference_params.pop("tools", [])
877+
878+
# Process tools and collect beta values
879+
bedrock_tools, anthropic_beta_list = self._process_tools_and_beta(original_tools, model, headers, additional_request_params)
880+
787881
bedrock_tool_config: Optional[ToolConfigBlock] = None
788882
if len(bedrock_tools) > 0:
789883
tool_choice_values: ToolChoiceValuesBlock = inference_params.pop(
@@ -813,6 +907,10 @@ def _transform_request_helper(
813907
if bedrock_tool_config is not None:
814908
data["toolConfig"] = bedrock_tool_config
815909

910+
# Request Metadata (top-level field)
911+
if request_metadata is not None:
912+
data["requestMetadata"] = request_metadata
913+
816914
return data
817915

818916
async def _async_transform_request(

litellm/types/llms/bedrock.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import json
2-
from typing import Any, List, Literal, Optional, Union
2+
from typing import Any, Dict, List, Literal, Optional, Union
33

44
from typing_extensions import (
55
TYPE_CHECKING,
@@ -231,6 +231,7 @@ class CommonRequestObject(
231231
toolConfig: ToolConfigBlock
232232
guardrailConfig: Optional[GuardrailConfigBlock]
233233
performanceConfig: Optional[PerformanceConfigBlock]
234+
requestMetadata: Optional[Dict[str, str]]
234235

235236

236237
class RequestObject(CommonRequestObject, total=False):

0 commit comments

Comments
 (0)