55 SILICONFLOW_BASE_URL ,
66 SiliconFlow ,
77 REQUEST_TIMEOUT_MS_FOR_THINKING ,
8+ DEFAULT_MODELS ,
89} from "@/app/constant" ;
910import {
1011 useAccessStore ,
@@ -27,10 +28,19 @@ import {
2728 getMessageTextContentWithoutThinking ,
2829} from "@/app/utils" ;
2930import { RequestPayload } from "./openai" ;
31+
3032import { fetch } from "@/app/utils/stream" ;
33+ export interface SiliconFlowListModelResponse {
34+ object : string ;
35+ data : Array < {
36+ id : string ;
37+ object : string ;
38+ root : string ;
39+ } > ;
40+ }
3141
3242export class SiliconflowApi implements LLMApi {
33- private disableListModels = true ;
43+ private disableListModels = false ;
3444
3545 path ( path : string ) : string {
3646 const accessStore = useAccessStore . getState ( ) ;
@@ -238,6 +248,36 @@ export class SiliconflowApi implements LLMApi {
238248 }
239249
240250 async models ( ) : Promise < LLMModel [ ] > {
241- return [ ] ;
251+ if ( this . disableListModels ) {
252+ return DEFAULT_MODELS . slice ( ) ;
253+ }
254+
255+ const res = await fetch ( this . path ( SiliconFlow . ListModelPath ) , {
256+ method : "GET" ,
257+ headers : {
258+ ...getHeaders ( ) ,
259+ } ,
260+ } ) ;
261+
262+ const resJson = ( await res . json ( ) ) as SiliconFlowListModelResponse ;
263+ const chatModels = resJson . data ;
264+ console . log ( "[Models]" , chatModels ) ;
265+
266+ if ( ! chatModels ) {
267+ return [ ] ;
268+ }
269+
270+ let seq = 1000 ; //同 Constant.ts 中的排序保持一致
271+ return chatModels . map ( ( m ) => ( {
272+ name : m . id ,
273+ available : true ,
274+ sorted : seq ++ ,
275+ provider : {
276+ id : "siliconflow" ,
277+ providerName : "SiliconFlow" ,
278+ providerType : "siliconflow" ,
279+ sorted : 14 ,
280+ } ,
281+ } ) ) ;
242282 }
243283}
0 commit comments