@@ -58,7 +58,6 @@ export interface ApiHandlerOptions {
5858 azureApiVersion ?: string
5959 openRouterUseMiddleOutTransform ?: boolean
6060 openAiStreamingEnabled ?: boolean
61- setAzureApiVersion ?: boolean
6261 deepSeekBaseUrl ?: string
6362 deepSeekApiKey ?: string
6463 includeMaxTokens ?: boolean
@@ -83,12 +82,18 @@ import { GlobalStateKey } from "./globalState"
8382
8483// Define API configuration keys for dynamic object building
8584export const API_CONFIG_KEYS : GlobalStateKey [ ] = [
86- "apiProvider" ,
8785 "apiModelId" ,
86+ "anthropicBaseUrl" ,
87+ "vsCodeLmModelSelector" ,
8888 "glamaModelId" ,
8989 "glamaModelInfo" ,
90+ "openRouterModelId" ,
91+ "openRouterModelInfo" ,
92+ "openRouterBaseUrl" ,
9093 "awsRegion" ,
9194 "awsUseCrossRegionInference" ,
95+ // "awsUsePromptCache", // NOT exist on GlobalStateKey
96+ // "awspromptCacheId", // NOT exist on GlobalStateKey
9297 "awsProfile" ,
9398 "awsUseProfile" ,
9499 "vertexProjectId" ,
@@ -101,24 +106,21 @@ export const API_CONFIG_KEYS: GlobalStateKey[] = [
101106 "ollamaBaseUrl" ,
102107 "lmStudioModelId" ,
103108 "lmStudioBaseUrl" ,
104- "anthropicBaseUrl " ,
105- "modelMaxThinkingTokens " ,
106- "mistralCodestralUrl" ,
109+ "lmStudioDraftModelId " ,
110+ "lmStudioSpeculativeDecodingEnabled " ,
111+ "mistralCodestralUrl" , // New option for Codestral URL
107112 "azureApiVersion" ,
108- "openAiStreamingEnabled" ,
109- "openRouterModelId" ,
110- "openRouterModelInfo" ,
111- "openRouterBaseUrl" ,
112113 "openRouterUseMiddleOutTransform" ,
113- "vsCodeLmModelSelector" ,
114+ "openAiStreamingEnabled" ,
115+ // "deepSeekBaseUrl", // not exist on GlobalStateKey
116+ // "includeMaxTokens", // not exist on GlobalStateKey
114117 "unboundModelId" ,
115118 "unboundModelInfo" ,
116119 "requestyModelId" ,
117120 "requestyModelInfo" ,
118121 "modelTemperature" ,
119122 "modelMaxTokens" ,
120- "lmStudioSpeculativeDecodingEnabled" ,
121- "lmStudioDraftModelId"
123+ "modelMaxThinkingTokens" ,
122124]
123125
124126// Models
0 commit comments