Skip to content

Commit 82fd182

Browse files
Merge pull request #15196 from BerriAI/litellm_staging_10_04_2025
Litellm staging 10 04 2025
2 parents b2ffeb8 + f24a7a5 commit 82fd182

File tree

16 files changed

+850
-102
lines changed

16 files changed

+850
-102
lines changed

litellm/proxy/management_endpoints/common_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ def _set_object_metadata_field(
4343
value: Value to set for the field
4444
"""
4545
if field_name in LiteLLM_ManagementEndpoint_MetadataFields_Premium:
46-
_premium_user_check()
46+
_premium_user_check(field_name)
4747
object_data.metadata = object_data.metadata or {}
4848
object_data.metadata[field_name] = value
4949

litellm/proxy/management_endpoints/key_management_endpoints.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -903,7 +903,7 @@ def prepare_metadata_fields(
903903
if k in LiteLLM_ManagementEndpoint_MetadataFields_Premium:
904904
from litellm.proxy.utils import _premium_user_check
905905

906-
_premium_user_check()
906+
_premium_user_check(k)
907907
casted_metadata[k] = v
908908

909909
except Exception as e:

litellm/proxy/utils.py

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3575,17 +3575,22 @@ def handle_exception_on_proxy(e: Exception) -> ProxyException:
35753575
)
35763576

35773577

3578-
def _premium_user_check():
3578+
def _premium_user_check(feature:str=None):
35793579
"""
35803580
Raises an HTTPException if the user is not a premium user
35813581
"""
35823582
from litellm.proxy.proxy_server import premium_user
35833583

3584+
if feature:
3585+
detail_msg = f"This feature is only available for LiteLLM Enterprise users: {feature}. {CommonProxyErrors.not_premium_user.value}"
3586+
else:
3587+
detail_msg = f"This feature is only available for LiteLLM Enterprise users. {CommonProxyErrors.not_premium_user.value}"
3588+
35843589
if not premium_user:
35853590
raise HTTPException(
35863591
status_code=403,
35873592
detail={
3588-
"error": f"This feature is only available for LiteLLM Enterprise users. {CommonProxyErrors.not_premium_user.value}"
3593+
"error": detail_msg
35893594
},
35903595
)
35913596

ui/litellm-dashboard/src/app/page.tsx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ import SpendLogsTable from "@/components/view_logs"
2222
import ModelHubTable from "@/components/model_hub_table"
2323
import NewUsagePage from "@/components/new_usage"
2424
import APIRef from "@/components/api_ref"
25-
import ChatUI from "@/components/chat_ui"
25+
import ChatUI from "@/components/chat_ui/ChatUI"
2626
import Sidebar from "@/components/leftnav"
2727
import Usage from "@/components/usage"
2828
import CacheDashboard from "@/components/cache_dashboard"

ui/litellm-dashboard/src/components/chat_ui.tsx renamed to ui/litellm-dashboard/src/components/chat_ui/ChatUI.tsx

Lines changed: 37 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -25,34 +25,34 @@ import {
2525
import { v4 as uuidv4 } from 'uuid';
2626

2727
import { message, Select, Spin, Typography, Tooltip, Input, Upload, Modal, Button } from "antd";
28-
import { makeOpenAIChatCompletionRequest } from "./chat_ui/llm_calls/chat_completion";
29-
import { makeOpenAIImageGenerationRequest } from "./chat_ui/llm_calls/image_generation";
30-
import { makeOpenAIImageEditsRequest } from "./chat_ui/llm_calls/image_edits";
31-
import { makeOpenAIResponsesRequest } from "./chat_ui/llm_calls/responses_api";
32-
import { makeAnthropicMessagesRequest } from "./chat_ui/llm_calls/anthropic_messages";
33-
import { fetchAvailableModels, ModelGroup } from "./chat_ui/llm_calls/fetch_models";
34-
import { fetchAvailableMCPTools } from "./chat_ui/llm_calls/fetch_mcp_tools";
35-
import type { MCPTool } from "./chat_ui/llm_calls/fetch_mcp_tools";
36-
import { litellmModeMapping, ModelMode, EndpointType, getEndpointType } from "./chat_ui/mode_endpoint_mapping";
28+
import { makeOpenAIChatCompletionRequest } from "./llm_calls/chat_completion";
29+
import { makeOpenAIImageGenerationRequest } from "./llm_calls/image_generation";
30+
import { makeOpenAIImageEditsRequest } from "./llm_calls/image_edits";
31+
import { makeOpenAIResponsesRequest } from "./llm_calls/responses_api";
32+
import { makeAnthropicMessagesRequest } from "./llm_calls/anthropic_messages";
33+
import { fetchAvailableModels, ModelGroup } from "./llm_calls/fetch_models";
34+
import { fetchAvailableMCPTools } from "./llm_calls/fetch_mcp_tools";
35+
import type { MCPTool } from "./llm_calls/fetch_mcp_tools";
36+
import { litellmModeMapping, ModelMode, EndpointType, getEndpointType } from "./mode_endpoint_mapping";
3737
import { Prism as SyntaxHighlighter } from "react-syntax-highlighter";
3838
import { coy } from 'react-syntax-highlighter/dist/esm/styles/prism';
39-
import EndpointSelector from "./chat_ui/EndpointSelector";
40-
import TagSelector from "./tag_management/TagSelector";
41-
import VectorStoreSelector from "./vector_store_management/VectorStoreSelector";
42-
import GuardrailSelector from "./guardrails/GuardrailSelector";
43-
import { determineEndpointType } from "./chat_ui/EndpointUtils";
44-
import { generateCodeSnippet } from "./chat_ui/CodeSnippets";
45-
import { MessageType } from "./chat_ui/types";
46-
import ReasoningContent from "./chat_ui/ReasoningContent";
47-
import ResponseMetrics, { TokenUsage } from "./chat_ui/ResponseMetrics";
48-
import ResponsesImageUpload from "./chat_ui/ResponsesImageUpload";
49-
import ResponsesImageRenderer from "./chat_ui/ResponsesImageRenderer";
50-
import { convertImageToBase64, createMultimodalMessage, createDisplayMessage } from "./chat_ui/ResponsesImageUtils";
51-
import ChatImageUpload from "./chat_ui/ChatImageUpload";
52-
import ChatImageRenderer from "./chat_ui/ChatImageRenderer";
53-
import { createChatMultimodalMessage, createChatDisplayMessage } from "./chat_ui/ChatImageUtils";
54-
import SessionManagement from "./chat_ui/SessionManagement";
55-
import MCPEventsDisplay, { MCPEvent } from "./chat_ui/MCPEventsDisplay";
39+
import EndpointSelector from "./EndpointSelector";
40+
import TagSelector from "../tag_management/TagSelector";
41+
import VectorStoreSelector from "../vector_store_management/VectorStoreSelector";
42+
import GuardrailSelector from "../guardrails/GuardrailSelector";
43+
import { determineEndpointType } from "./EndpointUtils";
44+
import { generateCodeSnippet } from "./CodeSnippets";
45+
import { MessageType } from "./types";
46+
import ReasoningContent from "./ReasoningContent";
47+
import ResponseMetrics, { TokenUsage } from "./ResponseMetrics";
48+
import ResponsesImageUpload from "./ResponsesImageUpload";
49+
import ResponsesImageRenderer from "./ResponsesImageRenderer";
50+
import { convertImageToBase64, createMultimodalMessage, createDisplayMessage } from "./ResponsesImageUtils";
51+
import ChatImageUpload from "./ChatImageUpload";
52+
import ChatImageRenderer from "./ChatImageRenderer";
53+
import { createChatMultimodalMessage, createChatDisplayMessage } from "./ChatImageUtils";
54+
import SessionManagement from "./SessionManagement";
55+
import MCPEventsDisplay, { MCPEvent } from "./MCPEventsDisplay";
5656
import {
5757
SendOutlined,
5858
ApiOutlined,
@@ -73,7 +73,7 @@ import {
7373
FilePdfOutlined,
7474
ArrowUpOutlined
7575
} from "@ant-design/icons";
76-
import NotificationsManager from "./molecules/notifications_manager";
76+
import NotificationsManager from "../molecules/notifications_manager";
7777

7878
const { TextArea } = Input;
7979
const { Dragger } = Upload;
@@ -282,13 +282,17 @@ const ChatUI: React.FC<ChatUIProps> = ({
282282
);
283283

284284
console.log("Fetched models:", uniqueModels);
285-
286-
if (uniqueModels.length > 0) {
287-
setModelInfo(uniqueModels);
288-
if (!selectedModel) {
289-
setSelectedModel(uniqueModels[0].model_group);
290-
}
285+
286+
setModelInfo(uniqueModels);
287+
288+
// check for selection overlap or empty model list
289+
const hasSelection = uniqueModels.some(m => m.model_group === selectedModel);
290+
if (!uniqueModels.length) {
291+
setSelectedModel(undefined);
292+
} else if (!hasSelection) {
293+
setSelectedModel(uniqueModels[0].model_group);
291294
}
295+
292296
} catch (error) {
293297
console.error("Error fetching model info:", error);
294298
}

ui/litellm-dashboard/src/components/guardrails/GuardrailSelector.tsx

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,13 +9,15 @@ interface GuardrailSelectorProps {
99
value?: string[];
1010
className?: string;
1111
accessToken: string;
12+
disabled?: boolean;
1213
}
1314

1415
const GuardrailSelector: React.FC<GuardrailSelectorProps> = ({
1516
onChange,
1617
value,
1718
className,
18-
accessToken
19+
accessToken,
20+
disabled
1921
}) => {
2022
const [guardrails, setGuardrails] = useState<Guardrail[]>([]);
2123
const [loading, setLoading] = useState(false);
@@ -51,7 +53,8 @@ const GuardrailSelector: React.FC<GuardrailSelectorProps> = ({
5153
<div>
5254
<Select
5355
mode="multiple"
54-
placeholder="Select guardrails"
56+
disabled={disabled}
57+
placeholder={disabled ? "Setting guardrails is a premium feature." : "Select guardrails"}
5558
onChange={handleGuardrailChange}
5659
value={value}
5760
loading={loading}

ui/litellm-dashboard/src/components/networking.tsx

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2514,10 +2514,10 @@ export const allTagNamesCall = async (accessToken: String) => {
25142514
export const allEndUsersCall = async (accessToken: String) => {
25152515
try {
25162516
let url = proxyBaseUrl
2517-
? `${proxyBaseUrl}/global/all_end_users`
2518-
: `/global/all_end_users`;
2517+
? `${proxyBaseUrl}/customer/list`
2518+
: `/customer/list`;
25192519

2520-
console.log("in global/all_end_users call", url);
2520+
console.log("in customer/list", url);
25212521
const response = await fetch(`${url}`, {
25222522
method: "GET",
25232523
headers: {

0 commit comments

Comments
 (0)