Skip to content

Commit 0f25a10

Browse files
committed
fixes api code
1 parent eb985ae commit 0f25a10

File tree

1 file changed

+29
-25
lines changed

1 file changed

+29
-25
lines changed

packages/jupyter-ai/jupyter_ai/model_providers/parameters_rest_api.py

Lines changed: 29 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -1,51 +1,55 @@
1-
"""
2-
REST API endpoint for model parameter definitions.
3-
"""
41
from jupyter_server.base.handlers import APIHandler as BaseAPIHandler
52
from tornado.web import authenticated, HTTPError
6-
from pydantic import BaseModel
7-
from typing import List, Optional
3+
import json
84

95
from litellm.litellm_core_utils.get_supported_openai_params import get_supported_openai_params
10-
11-
class ModelParametersResponse(BaseModel):
12-
"""Response model for model parameters API."""
13-
#parameters: List[]
14-
count: int
156

167
class ModelParametersRestAPI(BaseAPIHandler):
178
"""
189
REST API for model parameters at `/api/ai/model-parameters`
1910
20-
GET /api/ai/model-parameters: Returns all supported OpenAI parameters
21-
GET /api/ai/model-parameters?core=true: Returns only core parameters
11+
GET /api/ai/model-parameters: Returns common parameters
12+
GET /api/ai/model-parameters?model=gpt-4: Returns parameters for specific model
2213
"""
2314

2415
@authenticated
2516
def get(self):
2617
"""
27-
Returns list of supported model parameters for OpenAI-compatible models.
18+
Returns list of supported model parameters.
2819
2920
Query Parameters:
30-
- core (boolean): If true, returns only core/common parameters
31-
32-
These parameters work with LiteLLM across different providers.
21+
- model (string): Model ID to get parameters for
22+
- provider (string): Custom LLM provider (optional)
23+
If no model provided, returns common parameters.
3324
"""
3425
try:
35-
core_only = self.get_query_argument("core", default="false").lower() == "true"
36-
if core_only:
37-
parameters = get_core_parameters()
26+
model = self.get_query_argument("model", default=None)
27+
provider = self.get_query_argument("provider", default=None)
28+
29+
# Temporary common parameters that work across most models
30+
common_params = ["temperature", "max_tokens", "top_p", "stop"]
31+
32+
if model:
33+
try:
34+
parameters = get_supported_openai_params(
35+
model=model,
36+
custom_llm_provider=provider
37+
)
38+
if not parameters:
39+
parameters = common_params
40+
except Exception:
41+
parameters = common_params
3842
else:
39-
parameters = get_supported_openai_params()
43+
parameters = common_params
4044

41-
response = ModelParametersResponse(
42-
parameters=parameters,
43-
count=len(parameters)
44-
)
45+
response = {
46+
"parameters": parameters,
47+
"count": len(parameters)
48+
}
4549

4650
self.set_status(200)
4751
self.set_header("Content-Type", "application/json")
48-
self.finish(response.model_dump_json())
52+
self.finish(json.dumps(response))
4953

5054
except Exception as e:
5155
self.log.exception("Failed to get model parameters")

0 commit comments

Comments
 (0)