@@ -18,13 +18,13 @@ export const providerNames = [
1818 "lmstudio" ,
1919 "gemini" ,
2020 "openai-native" ,
21- "xai" ,
2221 "mistral" ,
2322 "deepseek" ,
2423 "unbound" ,
2524 "requesty" ,
2625 "human-relay" ,
2726 "fake-ai" ,
27+ "xai" ,
2828] as const
2929
3030export const providerNamesSchema = z . enum ( providerNames )
@@ -41,19 +41,6 @@ export const toolGroupsSchema = z.enum(toolGroups)
4141
4242export type ToolGroup = z . infer < typeof toolGroupsSchema >
4343
44- /**
45- * CheckpointStorage
46- */
47-
48- export const checkpointStorages = [ "task" , "workspace" ] as const
49-
50- export const checkpointStoragesSchema = z . enum ( checkpointStorages )
51-
52- export type CheckpointStorage = z . infer < typeof checkpointStoragesSchema >
53-
54- export const isCheckpointStorage = ( value : string ) : value is CheckpointStorage =>
55- checkpointStorages . includes ( value as CheckpointStorage )
56-
5744/**
5845 * Language
5946 */
@@ -93,23 +80,49 @@ export const telemetrySettingsSchema = z.enum(telemetrySettings)
9380
9481export type TelemetrySetting = z . infer < typeof telemetrySettingsSchema >
9582
83+ /**
84+ * ReasoningEffort
85+ */
86+
87+ export const reasoningEfforts = [ "low" , "medium" , "high" ] as const
88+
89+ export const reasoningEffortsSchema = z . enum ( reasoningEfforts )
90+
91+ export type ReasoningEffort = z . infer < typeof reasoningEffortsSchema >
92+
9693/**
9794 * ModelInfo
9895 */
9996
10097export const modelInfoSchema = z . object ( {
10198 maxTokens : z . number ( ) . nullish ( ) ,
99+ maxThinkingTokens : z . number ( ) . nullish ( ) ,
102100 contextWindow : z . number ( ) ,
103101 supportsImages : z . boolean ( ) . optional ( ) ,
104102 supportsComputerUse : z . boolean ( ) . optional ( ) ,
105103 supportsPromptCache : z . boolean ( ) ,
104+ isPromptCacheOptional : z . boolean ( ) . optional ( ) ,
106105 inputPrice : z . number ( ) . optional ( ) ,
107106 outputPrice : z . number ( ) . optional ( ) ,
108107 cacheWritesPrice : z . number ( ) . optional ( ) ,
109108 cacheReadsPrice : z . number ( ) . optional ( ) ,
110109 description : z . string ( ) . optional ( ) ,
111- reasoningEffort : z . enum ( [ "low" , "medium" , "high" ] ) . optional ( ) ,
110+ reasoningEffort : reasoningEffortsSchema . optional ( ) ,
112111 thinking : z . boolean ( ) . optional ( ) ,
112+ minTokensPerCachePoint : z . number ( ) . optional ( ) ,
113+ maxCachePoints : z . number ( ) . optional ( ) ,
114+ cachableFields : z . array ( z . string ( ) ) . optional ( ) ,
115+ tiers : z
116+ . array (
117+ z . object ( {
118+ contextWindow : z . number ( ) ,
119+ inputPrice : z . number ( ) . optional ( ) ,
120+ outputPrice : z . number ( ) . optional ( ) ,
121+ cacheWritesPrice : z . number ( ) . optional ( ) ,
122+ cacheReadsPrice : z . number ( ) . optional ( ) ,
123+ } ) ,
124+ )
125+ . optional ( ) ,
113126} )
114127
115128export type ModelInfo = z . infer < typeof modelInfoSchema >
@@ -141,6 +154,7 @@ export const historyItemSchema = z.object({
141154 cacheReads : z . number ( ) . optional ( ) ,
142155 totalCost : z . number ( ) ,
143156 size : z . number ( ) . optional ( ) ,
157+ workspace : z . string ( ) . optional ( ) ,
144158} )
145159
146160export type HistoryItem = z . infer < typeof historyItemSchema >
@@ -268,6 +282,29 @@ export const customSupportPromptsSchema = z.record(z.string(), z.string().option
268282
269283export type CustomSupportPrompts = z . infer < typeof customSupportPromptsSchema >
270284
285+ /**
286+ * CommandExecutionStatus
287+ */
288+
289+ export const commandExecutionStatusSchema = z . discriminatedUnion ( "status" , [
290+ z . object ( {
291+ executionId : z . string ( ) ,
292+ status : z . literal ( "running" ) ,
293+ pid : z . number ( ) . optional ( ) ,
294+ } ) ,
295+ z . object ( {
296+ executionId : z . string ( ) ,
297+ status : z . literal ( "exited" ) ,
298+ exitCode : z . number ( ) . optional ( ) ,
299+ } ) ,
300+ z . object ( {
301+ executionId : z . string ( ) ,
302+ status : z . literal ( "fallback" ) ,
303+ } ) ,
304+ ] )
305+
306+ export type CommandExecutionStatus = z . infer < typeof commandExecutionStatusSchema >
307+
271308/**
272309 * ExperimentId
273310 */
@@ -330,12 +367,15 @@ export const providerSettingsSchema = z.object({
330367 // OpenAI
331368 openAiBaseUrl : z . string ( ) . optional ( ) ,
332369 openAiApiKey : z . string ( ) . optional ( ) ,
370+ openAiHostHeader : z . string ( ) . optional ( ) ,
371+ openAiLegacyFormat : z . boolean ( ) . optional ( ) ,
333372 openAiR1FormatEnabled : z . boolean ( ) . optional ( ) ,
334373 openAiModelId : z . string ( ) . optional ( ) ,
335- openAiCustomModelInfo : modelInfoSchema . optional ( ) ,
374+ openAiCustomModelInfo : modelInfoSchema . nullish ( ) ,
336375 openAiUseAzure : z . boolean ( ) . optional ( ) ,
337376 azureApiVersion : z . string ( ) . optional ( ) ,
338377 openAiStreamingEnabled : z . boolean ( ) . optional ( ) ,
378+ enableReasoningEffort : z . boolean ( ) . optional ( ) ,
339379 // Ollama
340380 ollamaModelId : z . string ( ) . optional ( ) ,
341381 ollamaBaseUrl : z . string ( ) . optional ( ) ,
@@ -373,13 +413,18 @@ export const providerSettingsSchema = z.object({
373413 // Requesty
374414 requestyApiKey : z . string ( ) . optional ( ) ,
375415 requestyModelId : z . string ( ) . optional ( ) ,
416+ // X.AI (Grok)
417+ xaiApiKey : z . string ( ) . optional ( ) ,
376418 // Claude 3.7 Sonnet Thinking
377- modelMaxTokens : z . number ( ) . optional ( ) , // Currently only used by Anthropic hybrid thinking models.
378- modelMaxThinkingTokens : z . number ( ) . optional ( ) , // Currently only used by Anthropic hybrid thinking models.
419+ modelMaxTokens : z . number ( ) . optional ( ) ,
420+ modelMaxThinkingTokens : z . number ( ) . optional ( ) ,
379421 // Generic
380422 includeMaxTokens : z . boolean ( ) . optional ( ) ,
423+ reasoningEffort : reasoningEffortsSchema . optional ( ) ,
424+ promptCachingEnabled : z . boolean ( ) . optional ( ) ,
425+ diffEnabled : z . boolean ( ) . optional ( ) ,
426+ fuzzyMatchThreshold : z . number ( ) . optional ( ) ,
381427 modelTemperature : z . number ( ) . nullish ( ) ,
382- reasoningEffort : z . enum ( [ "low" , "medium" , "high" ] ) . optional ( ) ,
383428 rateLimitSeconds : z . number ( ) . optional ( ) ,
384429 // Fake AI
385430 fakeAi : z . unknown ( ) . optional ( ) ,
@@ -424,14 +469,15 @@ const providerSettingsRecord: ProviderSettingsRecord = {
424469 // OpenAI
425470 openAiBaseUrl : undefined ,
426471 openAiApiKey : undefined ,
472+ openAiHostHeader : undefined ,
473+ openAiLegacyFormat : undefined ,
427474 openAiR1FormatEnabled : undefined ,
428475 openAiModelId : undefined ,
429476 openAiCustomModelInfo : undefined ,
430477 openAiUseAzure : undefined ,
431478 azureApiVersion : undefined ,
432479 openAiStreamingEnabled : undefined ,
433- // xAI
434- xaiApiKey : undefined ,
480+ enableReasoningEffort : undefined ,
435481 // Ollama
436482 ollamaModelId : undefined ,
437483 ollamaBaseUrl : undefined ,
@@ -464,11 +510,16 @@ const providerSettingsRecord: ProviderSettingsRecord = {
464510 modelMaxThinkingTokens : undefined ,
465511 // Generic
466512 includeMaxTokens : undefined ,
467- modelTemperature : undefined ,
468513 reasoningEffort : undefined ,
514+ promptCachingEnabled : undefined ,
515+ diffEnabled : undefined ,
516+ fuzzyMatchThreshold : undefined ,
517+ modelTemperature : undefined ,
469518 rateLimitSeconds : undefined ,
470519 // Fake AI
471520 fakeAi : undefined ,
521+ // X.AI (Grok)
522+ xaiApiKey : undefined ,
472523}
473524
474525export const PROVIDER_SETTINGS_KEYS = Object . keys ( providerSettingsRecord ) as Keys < ProviderSettings > [ ]
@@ -506,9 +557,9 @@ export const globalSettingsSchema = z.object({
506557 screenshotQuality : z . number ( ) . optional ( ) ,
507558 remoteBrowserEnabled : z . boolean ( ) . optional ( ) ,
508559 remoteBrowserHost : z . string ( ) . optional ( ) ,
560+ cachedChromeHostUrl : z . string ( ) . optional ( ) ,
509561
510562 enableCheckpoints : z . boolean ( ) . optional ( ) ,
511- checkpointStorage : checkpointStoragesSchema . optional ( ) ,
512563
513564 ttsEnabled : z . boolean ( ) . optional ( ) ,
514565 ttsSpeed : z . number ( ) . optional ( ) ,
@@ -522,13 +573,16 @@ export const globalSettingsSchema = z.object({
522573
523574 terminalOutputLineLimit : z . number ( ) . optional ( ) ,
524575 terminalShellIntegrationTimeout : z . number ( ) . optional ( ) ,
576+ terminalShellIntegrationDisabled : z . boolean ( ) . optional ( ) ,
525577 terminalCommandDelay : z . number ( ) . optional ( ) ,
526578 terminalPowershellCounter : z . boolean ( ) . optional ( ) ,
527579 terminalZshClearEolMark : z . boolean ( ) . optional ( ) ,
528580 terminalZshOhMy : z . boolean ( ) . optional ( ) ,
529581 terminalZshP10k : z . boolean ( ) . optional ( ) ,
530582 terminalZdotdir : z . boolean ( ) . optional ( ) ,
583+ terminalCompressProgressBar : z . boolean ( ) . optional ( ) ,
531584
585+ rateLimitSeconds : z . number ( ) . optional ( ) ,
532586 diffEnabled : z . boolean ( ) . optional ( ) ,
533587 fuzzyMatchThreshold : z . number ( ) . optional ( ) ,
534588 experiments : experimentsSchema . optional ( ) ,
@@ -546,6 +600,7 @@ export const globalSettingsSchema = z.object({
546600 customModePrompts : customModePromptsSchema . optional ( ) ,
547601 customSupportPrompts : customSupportPromptsSchema . optional ( ) ,
548602 enhancementApiConfigId : z . string ( ) . optional ( ) ,
603+ historyPreviewCollapsed : z . boolean ( ) . optional ( ) ,
549604} )
550605
551606export type GlobalSettings = z . infer < typeof globalSettingsSchema >
@@ -583,7 +638,6 @@ const globalSettingsRecord: GlobalSettingsRecord = {
583638 remoteBrowserHost : undefined ,
584639
585640 enableCheckpoints : undefined ,
586- checkpointStorage : undefined ,
587641
588642 ttsEnabled : undefined ,
589643 ttsSpeed : undefined ,
@@ -597,13 +651,16 @@ const globalSettingsRecord: GlobalSettingsRecord = {
597651
598652 terminalOutputLineLimit : undefined ,
599653 terminalShellIntegrationTimeout : undefined ,
654+ terminalShellIntegrationDisabled : undefined ,
600655 terminalCommandDelay : undefined ,
601656 terminalPowershellCounter : undefined ,
602657 terminalZshClearEolMark : undefined ,
603658 terminalZshOhMy : undefined ,
604659 terminalZshP10k : undefined ,
605660 terminalZdotdir : undefined ,
661+ terminalCompressProgressBar : undefined ,
606662
663+ rateLimitSeconds : undefined ,
607664 diffEnabled : undefined ,
608665 fuzzyMatchThreshold : undefined ,
609666 experiments : undefined ,
@@ -621,6 +678,8 @@ const globalSettingsRecord: GlobalSettingsRecord = {
621678 customModePrompts : undefined ,
622679 customSupportPrompts : undefined ,
623680 enhancementApiConfigId : undefined ,
681+ cachedChromeHostUrl : undefined ,
682+ historyPreviewCollapsed : undefined ,
624683}
625684
626685export const GLOBAL_SETTINGS_KEYS = Object . keys ( globalSettingsRecord ) as Keys < GlobalSettings > [ ]
@@ -654,6 +713,7 @@ export type SecretState = Pick<
654713 | "mistralApiKey"
655714 | "unboundApiKey"
656715 | "requestyApiKey"
716+ | "xaiApiKey"
657717>
658718
659719type SecretStateRecord = Record < Keys < SecretState > , undefined >
@@ -672,6 +732,7 @@ const secretStateRecord: SecretStateRecord = {
672732 mistralApiKey : undefined ,
673733 unboundApiKey : undefined ,
674734 requestyApiKey : undefined ,
735+ xaiApiKey : undefined ,
675736}
676737
677738export const SECRET_STATE_KEYS = Object . keys ( secretStateRecord ) as Keys < SecretState > [ ]
@@ -749,6 +810,7 @@ export type ClineSay = z.infer<typeof clineSaySchema>
749810 */
750811
751812export const toolProgressStatusSchema = z . object ( {
813+ id : z . string ( ) . optional ( ) ,
752814 icon : z . string ( ) . optional ( ) ,
753815 text : z . string ( ) . optional ( ) ,
754816} )
@@ -798,7 +860,6 @@ export const toolNames = [
798860 "execute_command" ,
799861 "read_file" ,
800862 "write_to_file" ,
801- "append_to_file" ,
802863 "apply_diff" ,
803864 "insert_content" ,
804865 "search_and_replace" ,
0 commit comments