@@ -195,70 +195,155 @@ litellm_settings:
195
195
196
196
# # Using your MCP
197
197
198
- <Tabs>
199
- <TabItem value="openai" label="OpenAI API">
198
+ # ## Use on LiteLLM UI
200
199
201
- # ### Connect via OpenAI Responses API
200
+ # ## Use with Responses API
202
201
203
- Use the OpenAI Responses API to connect to your LiteLLM MCP server :
202
+ Replace `http://localhost:4000` with your LiteLLM Proxy base URL.
203
+
204
+ <Tabs>
205
+ <TabItem value="curl" label="cURL">
204
206
205
207
` ` ` bash title="cURL Example" showLineNumbers
206
- curl --location 'https ://api.openai.com /v1/responses' \
208
+ curl --location 'http ://localhost:4000 /v1/responses' \
207
209
--header 'Content-Type: application/json' \
208
- --header "Authorization: Bearer $OPENAI_API_KEY " \
210
+ --header "Authorization: Bearer sk-1234 " \
209
211
--data '{
210
- "model": "gpt-4o",
212
+ "model": "gpt-5",
213
+ "input": [
214
+ {
215
+ "role": "user",
216
+ "content": "give me TLDR of what BerriAI/litellm repo is about",
217
+ "type": "message"
218
+ }
219
+ ],
211
220
"tools": [
212
221
{
213
222
"type": "mcp",
214
223
"server_label": "litellm",
215
224
"server_url": "litellm_proxy",
216
- "require_approval": "never",
217
- "headers": {
218
- "x-litellm-api-key": "Bearer YOUR_LITELLM_API_KEY"
219
- }
225
+ "require_approval": "never"
220
226
}
221
227
],
222
- "input ": "Run available tools" ,
228
+ "stream ": true ,
223
229
"tool_choice": "required"
224
230
}'
225
231
` ` `
226
232
227
233
</TabItem>
234
+ <TabItem value="python" label="Python SDK">
228
235
229
- <TabItem value="litellm" label="LiteLLM Proxy">
236
+ ` ` ` python title="Python SDK Example" showLineNumbers
237
+ import openai
230
238
231
- # ### Connect via LiteLLM Proxy Responses API
239
+ client = openai.OpenAI(
240
+ api_key="sk-1234",
241
+ base_url="http://localhost:4000"
242
+ )
232
243
233
- Use this when calling LiteLLM Proxy for LLM API requests to `/v1/responses` endpoint.
244
+ response = client.responses.create(
245
+ model="gpt-5",
246
+ input=[
247
+ {
248
+ "role": "user",
249
+ "content": "give me TLDR of what BerriAI/litellm repo is about",
250
+ "type": "message"
251
+ }
252
+ ],
253
+ tools=[
254
+ {
255
+ "type": "mcp",
256
+ "server_label": "litellm",
257
+ "server_url": "litellm_proxy",
258
+ "require_approval": "never"
259
+ }
260
+ ],
261
+ stream=True,
262
+ tool_choice="required"
263
+ )
234
264
235
- ` ` ` bash title="cURL Example" showLineNumbers
236
- curl --location '<your-litellm-proxy-base-url>/v1/responses' \
265
+ print(response)
266
+ ` ` `
267
+
268
+ </TabItem>
269
+ </Tabs>
270
+
271
+ # ### Specifying MCP Tools
272
+
273
+ You can specify which MCP tools are available by using the `allowed_tools` parameter. This allows you to restrict access to specific tools within an MCP server.
274
+
275
+ To get the list of allowed tools when using LiteLLM MCP Gateway, you can naigate to the LiteLLM UI on MCP Servers > MCP Tools > Click the Tool > Copy Tool Name.
276
+
277
+ <Tabs>
278
+ <TabItem value="curl" label="cURL">
279
+
280
+ ` ` ` bash title="cURL Example with allowed_tools" showLineNumbers
281
+ curl --location 'http://localhost:4000/v1/responses' \
237
282
--header 'Content-Type: application/json' \
238
- --header "Authorization: Bearer $LITELLM_API_KEY " \
283
+ --header "Authorization: Bearer sk-1234 " \
239
284
--data '{
240
- "model": "gpt-4o",
285
+ "model": "gpt-5",
286
+ "input": [
287
+ {
288
+ "role": "user",
289
+ "content": "give me TLDR of what BerriAI/litellm repo is about",
290
+ "type": "message"
291
+ }
292
+ ],
241
293
"tools": [
242
294
{
243
295
"type": "mcp",
244
296
"server_label": "litellm",
245
- "server_url": "litellm_proxy",
297
+ "server_url": "litellm_proxy/mcp ",
246
298
"require_approval": "never",
247
- "headers": {
248
- "x-litellm-api-key": "Bearer YOUR_LITELLM_API_KEY"
249
- }
299
+ "allowed_tools": ["GitMCP-fetch_litellm_documentation"]
250
300
}
251
301
],
252
- "input ": "Run available tools" ,
302
+ "stream ": true ,
253
303
"tool_choice": "required"
254
304
}'
255
305
` ` `
256
306
257
307
</TabItem>
308
+ <TabItem value="python" label="Python SDK">
258
309
259
- <TabItem value="cursor" label="Cursor IDE">
310
+ ` ` ` python title="Python SDK Example with allowed_tools" showLineNumbers
311
+ import openai
312
+
313
+ client = openai.OpenAI(
314
+ api_key="sk-1234",
315
+ base_url="http://localhost:4000"
316
+ )
317
+
318
+ response = client.responses.create(
319
+ model="gpt-5",
320
+ input=[
321
+ {
322
+ "role": "user",
323
+ "content": "give me TLDR of what BerriAI/litellm repo is about",
324
+ "type": "message"
325
+ }
326
+ ],
327
+ tools=[
328
+ {
329
+ "type": "mcp",
330
+ "server_label": "litellm",
331
+ "server_url": "litellm_proxy/mcp",
332
+ "require_approval": "never",
333
+ "allowed_tools": ["GitMCP-fetch_litellm_documentation"]
334
+ }
335
+ ],
336
+ stream=True,
337
+ tool_choice="required"
338
+ )
339
+
340
+ print(response)
341
+ ` ` `
260
342
261
- # ### Connect via Cursor IDE
343
+ </TabItem>
344
+ </Tabs>
345
+
346
+ # ## Use with Cursor IDE
262
347
263
348
Use tools directly from Cursor IDE with LiteLLM MCP :
264
349
@@ -281,9 +366,6 @@ Use tools directly from Cursor IDE with LiteLLM MCP:
281
366
}
282
367
` ` `
283
368
284
- </TabItem>
285
- </Tabs>
286
-
287
369
# ### How it works when server_url="litellm_proxy"
288
370
289
371
When server_url="litellm_proxy", LiteLLM bridges non-MCP providers to your MCP tools.
0 commit comments