Skip to content

Commit 6047e65

Browse files
committed
implemented type mapping
1 parent 0f25a10 commit 6047e65

File tree

2 files changed

+156
-7
lines changed

2 files changed

+156
-7
lines changed
Lines changed: 144 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,144 @@
1+
2+
PARAMETER_SCHEMAS = {
3+
"temperature": {
4+
"type": "number",
5+
"default": 1,
6+
"min": 0,
7+
"max": 2,
8+
"description": "Controls randomness in the output. Lower values make it more focused and deterministic."
9+
},
10+
"top_p": {
11+
"type": "number",
12+
"default": 1,
13+
"min": 0,
14+
"max": 1,
15+
"description": "Nucleus sampling parameter. Consider tokens with top_p probability mass."
16+
},
17+
"max_tokens": {
18+
"type": "integer",
19+
"default": None,
20+
"min": 1,
21+
"description": "The maximum number of tokens to generate in the completion."
22+
},
23+
"max_completion_tokens": {
24+
"type": "integer",
25+
"default": None,
26+
"min": 1,
27+
"description": "Upper bound for the number of tokens that can be generated for a completion."
28+
},
29+
"n": {
30+
"type": "integer",
31+
"default": 1,
32+
"min": 1,
33+
"max": 128,
34+
"description": "How many completion choices to generate for each prompt."
35+
},
36+
"seed": {
37+
"type": "integer",
38+
"default": None,
39+
"description": "Seed for deterministic sampling. Same seed and parameters should return same result."
40+
},
41+
42+
# Response Formatting
43+
"stream": {
44+
"type": "boolean",
45+
"default": False,
46+
"description": "Whether to stream partial message deltas."
47+
},
48+
"stop": {
49+
"type": "array",
50+
"default": None,
51+
"description": "Up to 4 sequences where the API will stop generating further tokens."
52+
},
53+
"response_format": {
54+
"type": "object",
55+
"default": None,
56+
"description": "Specify the format that the model must output (e.g., JSON)."
57+
},
58+
59+
# Model Behavior
60+
"tools": {
61+
"type": "array",
62+
"default": None,
63+
"description": "A list of tools the model may call."
64+
},
65+
"tool_choice": {
66+
"type": "string|object",
67+
"default": "auto",
68+
"description": "Controls which function is called by the model."
69+
},
70+
"parallel_tool_calls": {
71+
"type": "boolean",
72+
"default": True,
73+
"description": "Whether to enable parallel function calling during tool use."
74+
},
75+
76+
# Penalty/Bias Parameters
77+
"presence_penalty": {
78+
"type": "number",
79+
"default": 0,
80+
"min": -2,
81+
"max": 2,
82+
"description": "Penalize new tokens based on whether they appear in the text so far."
83+
},
84+
"frequency_penalty": {
85+
"type": "number",
86+
"default": 0,
87+
"min": -2,
88+
"max": 2,
89+
"description": "Penalize new tokens based on their frequency in the text so far."
90+
},
91+
"logit_bias": {
92+
"type": "object",
93+
"default": None,
94+
"description": "Modify the likelihood of specified tokens appearing in the completion."
95+
},
96+
97+
# Logging/Debugging
98+
"logprobs": {
99+
"type": "boolean",
100+
"default": False,
101+
"description": "Whether to return log probabilities of the output tokens."
102+
},
103+
"top_logprobs": {
104+
"type": "integer",
105+
"default": None,
106+
"min": 0,
107+
"max": 5,
108+
"description": "Number of most likely tokens to return at each token position."
109+
},
110+
"user": {
111+
"type": "string",
112+
"default": None,
113+
"description": "A unique identifier representing your end-user."
114+
},
115+
116+
# Additional Technical Parameters
117+
"timeout": {
118+
"type": "integer",
119+
"default": None,
120+
"min": 1,
121+
"description": "Request timeout in seconds."
122+
},
123+
"top_k": {
124+
"type": "integer",
125+
"default": None,
126+
"min": 1,
127+
"description": "Limit the next token selection to the K most probable tokens."
128+
}
129+
}
130+
131+
def get_parameter_schema(param_name: str) -> dict:
132+
"""Get schema for a specific parameter."""
133+
return PARAMETER_SCHEMAS.get(param_name, {
134+
"type": "string",
135+
"default": None,
136+
"description": f"Parameter {param_name} (schema not defined)"
137+
})
138+
139+
def get_parameters_with_schemas(param_names: list) -> dict:
140+
"""Get schemas for a list of parameter names."""
141+
return {
142+
name: get_parameter_schema(name)
143+
for name in param_names
144+
}

packages/jupyter-ai/jupyter_ai/model_providers/parameters_rest_api.py

Lines changed: 12 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
import json
44

55
from litellm.litellm_core_utils.get_supported_openai_params import get_supported_openai_params
6+
from .parameter_schemas import get_parameters_with_schemas
67

78
class ModelParametersRestAPI(BaseAPIHandler):
89
"""
@@ -31,20 +32,24 @@ def get(self):
3132

3233
if model:
3334
try:
34-
parameters = get_supported_openai_params(
35+
parameter_names = get_supported_openai_params(
3536
model=model,
3637
custom_llm_provider=provider
3738
)
38-
if not parameters:
39-
parameters = common_params
39+
if not parameter_names:
40+
parameter_names = common_params
4041
except Exception:
41-
parameters = common_params
42+
parameter_names = common_params
4243
else:
43-
parameters = common_params
44+
parameter_names = common_params
45+
46+
# Get parameter schemas with types, defaults, and descriptions
47+
parameters_with_schemas = get_parameters_with_schemas(parameter_names)
4448

4549
response = {
46-
"parameters": parameters,
47-
"count": len(parameters)
50+
"parameters": parameters_with_schemas,
51+
"parameter_names": parameter_names,
52+
"count": len(parameter_names)
4853
}
4954

5055
self.set_status(200)

0 commit comments

Comments
 (0)