Skip to content

Commit af616ac

Browse files
committed
feat: fetch latest models from API
1 parent 4118f57 commit af616ac

File tree

2 files changed

+78
-17
lines changed

2 files changed

+78
-17
lines changed

py-src/data_formulator/agents/client_utils.py

Lines changed: 13 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -16,9 +16,11 @@ def __init__(self, endpoint, model, api_key=None, api_base=None, api_version=No
1616
# other params, including temperature, max_completion_tokens, api_base, api_version
1717
self.params = {
1818
"temperature": 0.7,
19-
"max_completion_tokens": 1200,
2019
}
2120

21+
if not (model == "o3-mini" or model == "o1"):
22+
self.params["max_completion_tokens"] = 1200
23+
2224
if api_key is not None and api_key != "":
2325
self.params["api_key"] = api_key
2426
if api_base is not None and api_base != "":
@@ -67,12 +69,16 @@ def get_completion(self, messages):
6769
timeout=120
6870
)
6971

70-
return client.chat.completions.create(
71-
model=self.model,
72-
messages=messages,
73-
temperature=self.params["temperature"],
74-
max_tokens=self.params["max_completion_tokens"],
75-
)
72+
completion_params = {
73+
"model": self.model,
74+
"messages": messages,
75+
}
76+
77+
if not (self.model == "o3-mini" or self.model == "o1"):
78+
completion_params["temperature"] = self.params["temperature"]
79+
completion_params["max_tokens"] = self.params["max_completion_tokens"]
80+
81+
return client.chat.completions.create(**completion_params)
7682
else:
7783
return litellm.completion(
7884
model=self.model,

src/views/ModelSelectionDialog.tsx

Lines changed: 65 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -76,6 +76,14 @@ export const ModelSelectionButton: React.FC<{}> = ({ }) => {
7676
const [modelDialogOpen, setModelDialogOpen] = useState<boolean>(false);
7777
const [showKeys, setShowKeys] = useState<boolean>(false);
7878
const [tempSelectedModelId, setTempSelectedModeId] = useState<string | undefined >(selectedModelId);
79+
const [providerModelOptions, setProviderModelOptions] = useState<{[key: string]: string[]}>({
80+
'openai': [],
81+
'azure': [],
82+
'anthropic': [],
83+
'gemini': [],
84+
'ollama': []
85+
});
86+
const [isLoadingModelOptions, setIsLoadingModelOptions] = useState<boolean>(false);
7987

8088
let updateModelStatus = (model: ModelConfig, status: 'ok' | 'error' | 'testing' | 'unknown', message: string) => {
8189
dispatch(dfActions.updateModelStatus({id: model.id, status, message}));
@@ -90,23 +98,60 @@ export const ModelSelectionButton: React.FC<{}> = ({ }) => {
9098
const [newApiBase, setNewApiBase] = useState<string | undefined>(undefined);
9199
const [newApiVersion, setNewApiVersion] = useState<string | undefined>(undefined);
92100

101+
// Fetch available models from the API
102+
useEffect(() => {
103+
const fetchModelOptions = async () => {
104+
setIsLoadingModelOptions(true);
105+
try {
106+
const response = await fetch(getUrls().CHECK_AVAILABLE_MODELS);
107+
const data = await response.json();
108+
109+
// Group models by provider
110+
const modelsByProvider: {[key: string]: string[]} = {
111+
'openai': [],
112+
'azure': [],
113+
'anthropic': [],
114+
'gemini': [],
115+
'ollama': []
116+
};
117+
118+
data.forEach((modelConfig: any) => {
119+
const provider = modelConfig.endpoint;
120+
const model = modelConfig.model;
121+
122+
if (provider && model && !modelsByProvider[provider].includes(model)) {
123+
modelsByProvider[provider].push(model);
124+
}
125+
});
126+
127+
setProviderModelOptions(modelsByProvider);
128+
} catch (error) {
129+
console.error("Failed to fetch model options:", error);
130+
} finally {
131+
setIsLoadingModelOptions(false);
132+
}
133+
};
134+
135+
fetchModelOptions();
136+
}, []);
137+
93138
useEffect(() => {
94139
if (newEndpoint == 'ollama') {
95140
if (!newApiBase) {
96141
setNewApiBase('http://localhost:11434');
97142
}
98143
}
99144
if (newEndpoint == "openai") {
100-
if (!newModel) {
101-
setNewModel('gpt-4o');
145+
if (!newModel && providerModelOptions.openai.length > 0) {
146+
setNewModel(providerModelOptions.openai[0]);
102147
}
103148
}
104149
if (newEndpoint == "anthropic") {
105-
if (!newModel) {
106-
setNewModel('claude-3-5-sonnet-20241022');
150+
if (!newModel && providerModelOptions.anthropic.length > 0) {
151+
setNewModel(providerModelOptions.anthropic[0]);
107152
}
108153
}
109-
}, [newEndpoint]);
154+
}, [newEndpoint, providerModelOptions]);
110155

111156
let modelExists = models.some(m =>
112157
m.endpoint == newEndpoint && m.model == newModel && m.api_base == newApiBase
@@ -150,8 +195,8 @@ export const ModelSelectionButton: React.FC<{}> = ({ }) => {
150195
value={newEndpoint}
151196
onChange={(event: any, newValue: string | null) => {
152197
setNewEndpoint(newValue || "");
153-
if (newModel == "" && newValue == "openai") {
154-
setNewModel("gpt-4o");
198+
if (newModel == "" && newValue == "openai" && providerModelOptions.openai.length > 0) {
199+
setNewModel(providerModelOptions.openai[0]);
155200
}
156201
if (!newApiVersion && newValue == "azure") {
157202
setNewApiVersion("2024-02-15");
@@ -202,7 +247,8 @@ export const ModelSelectionButton: React.FC<{}> = ({ }) => {
202247
freeSolo
203248
onChange={(event: any, newValue: string | null) => { setNewModel(newValue || ""); }}
204249
value={newModel}
205-
options={['gpt-4o-mini', 'gpt-4o', 'claude-3-5-sonnet-20241022']}
250+
options={newEndpoint && providerModelOptions[newEndpoint] ? providerModelOptions[newEndpoint] : []}
251+
loading={isLoadingModelOptions}
206252
renderOption={(props, option) => {
207253
return <Typography {...props} onClick={()=>{ setNewModel(option); }} sx={{fontSize: "small"}}>{option}</Typography>
208254
}}
@@ -211,7 +257,16 @@ export const ModelSelectionButton: React.FC<{}> = ({ }) => {
211257
error={newEndpoint != "" && !newModel}
212258
{...params}
213259
placeholder="model name"
214-
InputProps={{ ...params.InputProps, style: { fontSize: "0.875rem" } }}
260+
InputProps={{
261+
...params.InputProps,
262+
style: { fontSize: "0.875rem" },
263+
endAdornment: (
264+
<>
265+
{isLoadingModelOptions ? <CircularProgress color="inherit" size={20} /> : null}
266+
{params.InputProps.endAdornment}
267+
</>
268+
),
269+
}}
215270
inputProps={{
216271
...params.inputProps,
217272
'aria-label': 'Select or enter a model',
@@ -226,7 +281,7 @@ export const ModelSelectionButton: React.FC<{}> = ({ }) => {
226281
PaperComponent={({ children }) => (
227282
<Paper>
228283
<Typography sx={{ p: 1, color: 'gray', fontStyle: 'italic', fontSize: 'small' }}>
229-
examples
284+
{isLoadingModelOptions ? 'Loading models...' : 'examples'}
230285
</Typography>
231286
{children}
232287
</Paper>

0 commit comments

Comments
 (0)