1
+ """
2
+ REST API endpoint for model parameter definitions.
3
+ """
4
+ from jupyter_server .base .handlers import APIHandler as BaseAPIHandler
5
+ from tornado .web import authenticated , HTTPError
6
+ from pydantic import BaseModel
7
+ from typing import List , Optional
8
+
9
+ from litellm .litellm_core_utils .get_supported_openai_params import get_supported_openai_params
10
+
11
+ class ModelParametersResponse (BaseModel ):
12
+ """Response model for model parameters API."""
13
+ #parameters: List[]
14
+ count : int
15
+
16
+ class ModelParametersRestAPI (BaseAPIHandler ):
17
+ """
18
+ REST API for model parameters at `/api/ai/model-parameters`
19
+
20
+ GET /api/ai/model-parameters: Returns all supported OpenAI parameters
21
+ GET /api/ai/model-parameters?core=true: Returns only core parameters
22
+ """
23
+
24
+ @authenticated
25
+ def get (self ):
26
+ """
27
+ Returns list of supported model parameters for OpenAI-compatible models.
28
+
29
+ Query Parameters:
30
+ - core (boolean): If true, returns only core/common parameters
31
+
32
+ These parameters work with LiteLLM across different providers.
33
+ """
34
+ try :
35
+ core_only = self .get_query_argument ("core" , default = "false" ).lower () == "true"
36
+ if core_only :
37
+ parameters = get_core_parameters ()
38
+ else :
39
+ parameters = get_supported_openai_params ()
40
+
41
+ response = ModelParametersResponse (
42
+ parameters = parameters ,
43
+ count = len (parameters )
44
+ )
45
+
46
+ self .set_status (200 )
47
+ self .set_header ("Content-Type" , "application/json" )
48
+ self .finish (response .model_dump_json ())
49
+
50
+ except Exception as e :
51
+ self .log .exception ("Failed to get model parameters" )
52
+ raise HTTPError (500 , f"Internal server error: { str (e )} " )
0 commit comments