Skip to content

Commit 0a1c1a2

Browse files
Merge pull request #14787 from BerriAI/litellm_dev_09_22_2025_p1
docs(provider_specific_params.md): fix docs
2 parents 390e47c + 471d646 commit 0a1c1a2

File tree

5 files changed

+296
-167
lines changed

5 files changed

+296
-167
lines changed

.github/workflows/test-mcp.yml

Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
name: LiteLLM MCP Tests (folder - tests/mcp_tests)
2+
3+
on:
4+
pull_request:
5+
branches: [ main ]
6+
7+
jobs:
8+
test:
9+
runs-on: ubuntu-latest
10+
timeout-minutes: 25
11+
12+
steps:
13+
- uses: actions/checkout@v4
14+
15+
- name: Thank You Message
16+
run: |
17+
echo "### 🙏 Thank you for contributing to LiteLLM!" >> $GITHUB_STEP_SUMMARY
18+
echo "Your PR is being tested now. We appreciate your help in making LiteLLM better!" >> $GITHUB_STEP_SUMMARY
19+
20+
- name: Set up Python
21+
uses: actions/setup-python@v4
22+
with:
23+
python-version: '3.12'
24+
25+
- name: Install Poetry
26+
uses: snok/install-poetry@v1
27+
28+
- name: Install dependencies
29+
run: |
30+
poetry install --with dev,proxy-dev --extras "proxy semantic-router"
31+
poetry run pip install "pytest==7.3.1"
32+
poetry run pip install "pytest-retry==1.6.3"
33+
poetry run pip install "pytest-cov==5.0.0"
34+
poetry run pip install "pytest-asyncio==0.21.1"
35+
poetry run pip install "respx==0.22.0"
36+
poetry run pip install "pydantic==2.10.2"
37+
poetry run pip install "mcp==1.10.1"
38+
poetry run pip install pytest-xdist
39+
40+
- name: Setup litellm-enterprise as local package
41+
run: |
42+
cd enterprise
43+
python -m pip install -e .
44+
cd ..
45+
46+
- name: Run MCP tests
47+
run: |
48+
poetry run pytest tests/mcp_tests -x -vv -n 4 --cov=litellm --cov-report=xml --durations=5

docs/my-website/docs/completion/provider_specific_params.md

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -423,16 +423,17 @@ model_list:
423423
curl -X POST 'http://0.0.0.0:4000/chat/completions' \
424424
-H 'Content-Type: application/json' \
425425
-H 'Authorization: Bearer sk-1234' \
426-
-D '{
426+
-d '{
427427
"model": "llama-3-8b-instruct",
428428
"messages": [
429429
{
430430
"role": "user",
431431
"content": "What'\''s the weather like in Boston today?"
432432
}
433433
],
434-
"adapater_id": "my-special-adapter-id" # 👈 PROVIDER-SPECIFIC PARAM
435-
}'
434+
"adapater_id": "my-special-adapter-id"
435+
}'
436+
```
436437

437438
## Provider-Specific Metadata Parameters
438439

@@ -482,5 +483,4 @@ response = litellm.completion(
482483
```
483484

484485
</TabItem>
485-
</Tabs>
486-
```
486+
</Tabs>

docs/my-website/sidebars.js

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -546,6 +546,7 @@ const sidebars = {
546546
items: [
547547
"set_keys",
548548
"completion/token_usage",
549+
"sdk/headers",
549550
"sdk_custom_pricing",
550551
"embedding/async_embedding",
551552
"embedding/moderation",

litellm/llms/bedrock/chat/converse_transformation.py

Lines changed: 31 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -200,8 +200,8 @@ def _validate_request_metadata(self, metadata: dict) -> None:
200200
llm_provider="bedrock",
201201
)
202202

203-
key_pattern = re.compile(r'^[a-zA-Z0-9\s:_@$#=/+,.-]{1,256}$')
204-
value_pattern = re.compile(r'^[a-zA-Z0-9\s:_@$#=/+,.-]{0,256}$')
203+
key_pattern = re.compile(r"^[a-zA-Z0-9\s:_@$#=/+,.-]{1,256}$")
204+
value_pattern = re.compile(r"^[a-zA-Z0-9\s:_@$#=/+,.-]{0,256}$")
205205

206206
for key, value in metadata.items():
207207
if not isinstance(key, str):
@@ -762,7 +762,9 @@ def _handle_top_k_value(self, model: str, inference_params: dict) -> dict:
762762

763763
return {}
764764

765-
def _prepare_request_params(self, optional_params: dict, model: str) -> tuple[dict, dict, dict]:
765+
def _prepare_request_params(
766+
self, optional_params: dict, model: str
767+
) -> Tuple[dict, dict, dict]:
766768
"""Prepare and separate request parameters."""
767769
inference_params = copy.deepcopy(optional_params)
768770
supported_converse_params = list(
@@ -797,7 +799,13 @@ def _prepare_request_params(self, optional_params: dict, model: str) -> tuple[di
797799

798800
return inference_params, additional_request_params, request_metadata
799801

800-
def _process_tools_and_beta(self, original_tools: list, model: str, headers: Optional[dict], additional_request_params: dict) -> tuple[List[ToolBlock], list]:
802+
def _process_tools_and_beta(
803+
self,
804+
original_tools: list,
805+
model: str,
806+
headers: Optional[dict],
807+
additional_request_params: dict,
808+
) -> tuple[List[ToolBlock], list]:
801809
"""Process tools and collect anthropic_beta values."""
802810
bedrock_tools: List[ToolBlock] = []
803811

@@ -871,12 +879,16 @@ def _transform_request_helper(
871879
)
872880

873881
# Prepare and separate parameters
874-
inference_params, additional_request_params, request_metadata = self._prepare_request_params(optional_params, model)
882+
inference_params, additional_request_params, request_metadata = (
883+
self._prepare_request_params(optional_params, model)
884+
)
875885

876886
original_tools = inference_params.pop("tools", [])
877887

878888
# Process tools and collect beta values
879-
bedrock_tools, anthropic_beta_list = self._process_tools_and_beta(original_tools, model, headers, additional_request_params)
889+
bedrock_tools, anthropic_beta_list = self._process_tools_and_beta(
890+
original_tools, model, headers, additional_request_params
891+
)
880892

881893
bedrock_tool_config: Optional[ToolConfigBlock] = None
882894
if len(bedrock_tools) > 0:
@@ -1157,9 +1169,7 @@ def apply_tool_call_transformation_if_needed(
11571169

11581170
return message, returned_finish_reason
11591171

1160-
def _translate_message_content(
1161-
self, content_blocks: List[ContentBlock]
1162-
) -> Tuple[
1172+
def _translate_message_content(self, content_blocks: List[ContentBlock]) -> Tuple[
11631173
str,
11641174
List[ChatCompletionToolCallChunk],
11651175
Optional[List[BedrockConverseReasoningContentBlock]],
@@ -1174,9 +1184,9 @@ def _translate_message_content(
11741184
"""
11751185
content_str = ""
11761186
tools: List[ChatCompletionToolCallChunk] = []
1177-
reasoningContentBlocks: Optional[
1178-
List[BedrockConverseReasoningContentBlock]
1179-
] = None
1187+
reasoningContentBlocks: Optional[List[BedrockConverseReasoningContentBlock]] = (
1188+
None
1189+
)
11801190
for idx, content in enumerate(content_blocks):
11811191
"""
11821192
- Content is either a tool response or text
@@ -1297,9 +1307,9 @@ def _transform_response(
12971307
chat_completion_message: ChatCompletionResponseMessage = {"role": "assistant"}
12981308
content_str = ""
12991309
tools: List[ChatCompletionToolCallChunk] = []
1300-
reasoningContentBlocks: Optional[
1301-
List[BedrockConverseReasoningContentBlock]
1302-
] = None
1310+
reasoningContentBlocks: Optional[List[BedrockConverseReasoningContentBlock]] = (
1311+
None
1312+
)
13031313

13041314
if message is not None:
13051315
(
@@ -1312,12 +1322,12 @@ def _transform_response(
13121322
chat_completion_message["provider_specific_fields"] = {
13131323
"reasoningContentBlocks": reasoningContentBlocks,
13141324
}
1315-
chat_completion_message[
1316-
"reasoning_content"
1317-
] = self._transform_reasoning_content(reasoningContentBlocks)
1318-
chat_completion_message[
1319-
"thinking_blocks"
1320-
] = self._transform_thinking_blocks(reasoningContentBlocks)
1325+
chat_completion_message["reasoning_content"] = (
1326+
self._transform_reasoning_content(reasoningContentBlocks)
1327+
)
1328+
chat_completion_message["thinking_blocks"] = (
1329+
self._transform_thinking_blocks(reasoningContentBlocks)
1330+
)
13211331
chat_completion_message["content"] = content_str
13221332
if (
13231333
json_mode is True

0 commit comments

Comments
 (0)