diff --git a/.changeset/thin-fans-deliver.md b/.changeset/thin-fans-deliver.md new file mode 100644 index 00000000000..5244b7d0d75 --- /dev/null +++ b/.changeset/thin-fans-deliver.md @@ -0,0 +1,5 @@ +--- +"roo-cline": patch +--- + +Add prompt caching to OpenAI-compatible custom model info diff --git a/webview-ui/src/components/settings/ApiOptions.tsx b/webview-ui/src/components/settings/ApiOptions.tsx index 656a6831cba..eb8634c198a 100644 --- a/webview-ui/src/components/settings/ApiOptions.tsx +++ b/webview-ui/src/components/settings/ApiOptions.tsx @@ -847,6 +847,29 @@ const ApiOptions = ({ +
+
+ { + return { + ...(apiConfiguration?.openAiCustomModelInfo || openAiModelInfoSaneDefaults), + supportsPromptCache: checked, + } + })}> + Prompt Caching + + +
+
+ Is this model capable of caching prompts? +
+
+
+ {apiConfiguration?.openAiCustomModelInfo?.supportsPromptCache && ( + <> +
+ = 0, + "border-vscode-error": apiConfiguration?.openAiCustomModelInfo?.cacheReadsPrice < 0, + })} + onChange={handleInputChange("openAiCustomModelInfo", (e) => { + const value = (e.target as HTMLInputElement).value + const parsed = parseFloat(value) + + return { + ...(apiConfiguration?.openAiCustomModelInfo ?? + openAiModelInfoSaneDefaults), + cacheReadsPrice: isNaN(parsed) ? 0 : parsed, + } + })} + placeholder="e.g. 0.0001" + className="w-full"> +
+ Cache Reads Price + +
+
+
+
+ = 0, + "border-vscode-error": apiConfiguration?.openAiCustomModelInfo?.cacheWritesPrice < 0, + })} + onChange={handleInputChange("openAiCustomModelInfo", (e) => { + const value = (e.target as HTMLInputElement).value + const parsed = parseFloat(value) + + return { + ...(apiConfiguration?.openAiCustomModelInfo ?? + openAiModelInfoSaneDefaults), + cacheWritesPrice: isNaN(parsed) ? 0 : parsed, + } + })} + placeholder="e.g. 0.00005" + className="w-full"> +
+ Cache Writes Price + +
+
+
+ + )} +