|
| 1 | +import * as path from "path" |
| 2 | +import fs from "fs/promises" |
| 3 | + |
| 4 | +import NodeCache from "node-cache" |
| 5 | +import sanitize from "sanitize-filename" |
| 6 | + |
| 7 | +import { ContextProxy } from "../../../core/config/ContextProxy" |
| 8 | +import { getCacheDirectoryPath } from "../../../shared/storagePathManager" |
| 9 | +import { RouterName, ModelRecord } from "../../../shared/api" |
| 10 | +import { fileExistsAtPath } from "../../../utils/fs" |
| 11 | + |
| 12 | +import { getOpenRouterModelEndpoints } from "./openrouter" |
| 13 | + |
| 14 | +const memoryCache = new NodeCache({ stdTTL: 5 * 60, checkperiod: 5 * 60 }) |
| 15 | + |
| 16 | +const getCacheKey = (router: RouterName, modelId: string) => sanitize(`${router}_${modelId}`) |
| 17 | + |
| 18 | +async function writeModelEndpoints(key: string, data: ModelRecord) { |
| 19 | + const filename = `${key}_endpoints.json` |
| 20 | + const cacheDir = await getCacheDirectoryPath(ContextProxy.instance.globalStorageUri.fsPath) |
| 21 | + await fs.writeFile(path.join(cacheDir, filename), JSON.stringify(data, null, 2)) |
| 22 | +} |
| 23 | + |
| 24 | +async function readModelEndpoints(key: string): Promise<ModelRecord | undefined> { |
| 25 | + const filename = `${key}_endpoints.json` |
| 26 | + const cacheDir = await getCacheDirectoryPath(ContextProxy.instance.globalStorageUri.fsPath) |
| 27 | + const filePath = path.join(cacheDir, filename) |
| 28 | + const exists = await fileExistsAtPath(filePath) |
| 29 | + return exists ? JSON.parse(await fs.readFile(filePath, "utf8")) : undefined |
| 30 | +} |
| 31 | + |
| 32 | +export const getModelEndpoints = async (router: RouterName, modelId?: string): Promise<ModelRecord> => { |
| 33 | + // OpenRouter is the only provider that supports model endpoints, but you |
| 34 | + // can see how we'd extend this to other providers in the future. |
| 35 | + if (router !== "openrouter" || !modelId) { |
| 36 | + return {} |
| 37 | + } |
| 38 | + |
| 39 | + const key = getCacheKey(router, modelId) |
| 40 | + let modelProviders = memoryCache.get<ModelRecord>(key) |
| 41 | + |
| 42 | + if (modelProviders) { |
| 43 | + // console.log(`[getModelProviders] NodeCache hit for ${key} -> ${Object.keys(modelProviders).length}`) |
| 44 | + return modelProviders |
| 45 | + } |
| 46 | + |
| 47 | + modelProviders = await getOpenRouterModelEndpoints(modelId) |
| 48 | + |
| 49 | + if (Object.keys(modelProviders).length > 0) { |
| 50 | + // console.log(`[getModelProviders] API fetch for ${key} -> ${Object.keys(modelProviders).length}`) |
| 51 | + memoryCache.set(key, modelProviders) |
| 52 | + |
| 53 | + try { |
| 54 | + await writeModelEndpoints(key, modelProviders) |
| 55 | + // console.log(`[getModelProviders] wrote ${key} endpoints to file cache`) |
| 56 | + } catch (error) { |
| 57 | + console.error(`[getModelProviders] error writing ${key} endpoints to file cache`, error) |
| 58 | + } |
| 59 | + |
| 60 | + return modelProviders |
| 61 | + } |
| 62 | + |
| 63 | + try { |
| 64 | + modelProviders = await readModelEndpoints(router) |
| 65 | + // console.log(`[getModelProviders] read ${key} endpoints from file cache`) |
| 66 | + } catch (error) { |
| 67 | + console.error(`[getModelProviders] error reading ${key} endpoints from file cache`, error) |
| 68 | + } |
| 69 | + |
| 70 | + return modelProviders ?? {} |
| 71 | +} |
| 72 | + |
| 73 | +export const flushModelProviders = async (router: RouterName, modelId: string) => |
| 74 | + memoryCache.del(getCacheKey(router, modelId)) |
0 commit comments