|
1 |
| -""" |
2 |
| -REST API endpoint for model parameter definitions. |
3 |
| -""" |
4 | 1 | from jupyter_server.base.handlers import APIHandler as BaseAPIHandler
|
5 | 2 | from tornado.web import authenticated, HTTPError
|
6 |
| -from pydantic import BaseModel |
7 |
| -from typing import List, Optional |
| 3 | +import json |
8 | 4 |
|
9 | 5 | from litellm.litellm_core_utils.get_supported_openai_params import get_supported_openai_params
|
10 |
| - |
11 |
| -class ModelParametersResponse(BaseModel): |
12 |
| - """Response model for model parameters API.""" |
13 |
| - #parameters: List[] |
14 |
| - count: int |
15 | 6 |
|
16 | 7 | class ModelParametersRestAPI(BaseAPIHandler):
|
17 | 8 | """
|
18 | 9 | REST API for model parameters at `/api/ai/model-parameters`
|
19 | 10 |
|
20 |
| - GET /api/ai/model-parameters: Returns all supported OpenAI parameters |
21 |
| - GET /api/ai/model-parameters?core=true: Returns only core parameters |
| 11 | + GET /api/ai/model-parameters: Returns common parameters |
| 12 | + GET /api/ai/model-parameters?model=gpt-4: Returns parameters for specific model |
22 | 13 | """
|
23 | 14 |
|
24 | 15 | @authenticated
|
25 | 16 | def get(self):
|
26 | 17 | """
|
27 |
| - Returns list of supported model parameters for OpenAI-compatible models. |
| 18 | + Returns list of supported model parameters. |
28 | 19 |
|
29 | 20 | Query Parameters:
|
30 |
| - - core (boolean): If true, returns only core/common parameters |
31 |
| - |
32 |
| - These parameters work with LiteLLM across different providers. |
| 21 | + - model (string): Model ID to get parameters for |
| 22 | + - provider (string): Custom LLM provider (optional) |
| 23 | + If no model provided, returns common parameters. |
33 | 24 | """
|
34 | 25 | try:
|
35 |
| - core_only = self.get_query_argument("core", default="false").lower() == "true" |
36 |
| - if core_only: |
37 |
| - parameters = get_core_parameters() |
| 26 | + model = self.get_query_argument("model", default=None) |
| 27 | + provider = self.get_query_argument("provider", default=None) |
| 28 | + |
| 29 | + # Temporary common parameters that work across most models |
| 30 | + common_params = ["temperature", "max_tokens", "top_p", "stop"] |
| 31 | + |
| 32 | + if model: |
| 33 | + try: |
| 34 | + parameters = get_supported_openai_params( |
| 35 | + model=model, |
| 36 | + custom_llm_provider=provider |
| 37 | + ) |
| 38 | + if not parameters: |
| 39 | + parameters = common_params |
| 40 | + except Exception: |
| 41 | + parameters = common_params |
38 | 42 | else:
|
39 |
| - parameters = get_supported_openai_params() |
| 43 | + parameters = common_params |
40 | 44 |
|
41 |
| - response = ModelParametersResponse( |
42 |
| - parameters=parameters, |
43 |
| - count=len(parameters) |
44 |
| - ) |
| 45 | + response = { |
| 46 | + "parameters": parameters, |
| 47 | + "count": len(parameters) |
| 48 | + } |
45 | 49 |
|
46 | 50 | self.set_status(200)
|
47 | 51 | self.set_header("Content-Type", "application/json")
|
48 |
| - self.finish(response.model_dump_json()) |
| 52 | + self.finish(json.dumps(response)) |
49 | 53 |
|
50 | 54 | except Exception as e:
|
51 | 55 | self.log.exception("Failed to get model parameters")
|
|
0 commit comments