@@ -5,21 +5,15 @@ import NodeCache from "node-cache"
55
66import { ContextProxy } from "../../../core/config/ContextProxy"
77import { getCacheDirectoryPath } from "../../../shared/storagePathManager"
8+ import { RouterName , ModelRecord } from "../../../shared/api"
89import { fileExistsAtPath } from "../../../utils/fs"
9- import type { ModelInfo } from "../../../schemas"
10+
1011import { getOpenRouterModels } from "./openrouter"
1112import { getRequestyModels } from "./requesty"
1213import { getGlamaModels } from "./glama"
1314import { getUnboundModels } from "./unbound"
1415
15- export type RouterName = "openrouter" | "requesty" | "glama" | "unbound"
16-
17- export type ModelRecord = Record < string , ModelInfo >
18-
19- const memoryCache = new NodeCache ( {
20- stdTTL : 5 * 60 ,
21- checkperiod : 5 * 60 ,
22- } )
16+ const memoryCache = new NodeCache ( { stdTTL : 5 * 60 , checkperiod : 5 * 60 } )
2317
2418async function writeModels ( router : RouterName , data : ModelRecord ) {
2519 const filename = `${ router } _models.json`
@@ -48,6 +42,7 @@ export const getModels = async (router: RouterName): Promise<ModelRecord> => {
4842 let models = memoryCache . get < ModelRecord > ( router )
4943
5044 if ( models ) {
45+ // console.log(`[getModels] NodeCache hit for ${router} -> ${Object.keys(models).length}`)
5146 return models
5247 }
5348
@@ -67,17 +62,20 @@ export const getModels = async (router: RouterName): Promise<ModelRecord> => {
6762 }
6863
6964 if ( Object . keys ( models ) . length > 0 ) {
65+ // console.log(`[getModels] API fetch for ${router} -> ${Object.keys(models).length}`)
7066 memoryCache . set ( router , models )
7167
7268 try {
7369 await writeModels ( router , models )
70+ // console.log(`[getModels] wrote ${router} models to file cache`)
7471 } catch ( error ) { }
7572
7673 return models
7774 }
7875
7976 try {
8077 models = await readModels ( router )
78+ // console.log(`[getModels] read ${router} models from file cache`)
8179 } catch ( error ) { }
8280
8381 return models ?? { }
0 commit comments