Skip to content

Commit 7ee90f9

Browse files
Refactor model cache breakpoint logic to use model info in Unbound (#4137)
Co-authored-by: Pugazhendhi <[email protected]>
1 parent 0f22eab commit 7ee90f9

File tree

1 file changed

+8
-3
lines changed

1 file changed

+8
-3
lines changed

src/api/providers/unbound.ts

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,8 @@ import { ApiHandlerOptions, unboundDefaultModelId, unboundDefaultModelInfo } fro
55

66
import { ApiStream, ApiStreamUsageChunk } from "../transform/stream"
77
import { convertToOpenAiMessages } from "../transform/openai-format"
8-
import { addCacheBreakpoints } from "../transform/caching/anthropic"
8+
import { addCacheBreakpoints as addAnthropicCacheBreakpoints } from "../transform/caching/anthropic"
9+
import { addCacheBreakpoints as addGeminiCacheBreakpoints } from "../transform/caching/gemini"
910

1011
import type { SingleCompletionHandler, ApiHandlerCreateMessageMetadata } from "../index"
1112
import { RouterProvider } from "./router-provider"
@@ -44,8 +45,12 @@ export class UnboundHandler extends RouterProvider implements SingleCompletionHa
4445
...convertToOpenAiMessages(messages),
4546
]
4647

47-
if (modelId.startsWith("anthropic/claude-3")) {
48-
addCacheBreakpoints(systemPrompt, openAiMessages)
48+
if (info.supportsPromptCache) {
49+
if (modelId.startsWith("google/")) {
50+
addGeminiCacheBreakpoints(systemPrompt, openAiMessages)
51+
} else if (modelId.startsWith("anthropic/")) {
52+
addAnthropicCacheBreakpoints(systemPrompt, openAiMessages)
53+
}
4954
}
5055

5156
// Required by Anthropic; other providers default to max tokens allowed.

0 commit comments

Comments
 (0)