Skip to content

Commit c280949

Browse files
Anthropic: Bump it up! (RooCodeInc#1938)
* Updated Anthropic SDK and fixed types * adjust export markdown handling * adjust export markdown handling * added changeset
1 parent 87670a3 commit c280949

File tree

10 files changed

+48
-33
lines changed

10 files changed

+48
-33
lines changed
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"claude-dev": minor
3+
---
4+
5+
Update anthropic SDK to the latest version

package-lock.json

Lines changed: 13 additions & 11 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -237,8 +237,8 @@
237237
"typescript": "^5.4.5"
238238
},
239239
"dependencies": {
240-
"@anthropic-ai/bedrock-sdk": "^0.10.2",
241-
"@anthropic-ai/sdk": "^0.26.0",
240+
"@anthropic-ai/bedrock-sdk": "^0.12.4",
241+
"@anthropic-ai/sdk": "^0.37.0",
242242
"@anthropic-ai/vertex-sdk": "^0.4.1",
243243
"@google/generative-ai": "^0.18.0",
244244
"@mistralai/mistralai": "^1.5.0",

src/api/providers/anthropic.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ export class AnthropicHandler implements ApiHandler {
2020
@withRetry()
2121
async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {
2222
const model = this.getModel()
23-
let stream: AnthropicStream<Anthropic.Beta.PromptCaching.Messages.RawPromptCachingBetaMessageStreamEvent>
23+
let stream: AnthropicStream<Anthropic.RawMessageStreamEvent>
2424
const modelId = model.id
2525
switch (modelId) {
2626
// 'latest' alias does not support cache_control
@@ -38,7 +38,7 @@ export class AnthropicHandler implements ApiHandler {
3838
)
3939
const lastUserMsgIndex = userMsgIndices[userMsgIndices.length - 1] ?? -1
4040
const secondLastMsgUserIndex = userMsgIndices[userMsgIndices.length - 2] ?? -1
41-
stream = await this.client.beta.promptCaching.messages.create(
41+
stream = await this.client.messages.create(
4242
{
4343
model: modelId,
4444
max_tokens: model.info.maxTokens || 8192,

src/api/transform/gemini-format.ts

Lines changed: 4 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -11,16 +11,7 @@ import {
1111
TextPart,
1212
} from "@google/generative-ai"
1313

14-
export function convertAnthropicContentToGemini(
15-
content:
16-
| string
17-
| Array<
18-
| Anthropic.Messages.TextBlockParam
19-
| Anthropic.Messages.ImageBlockParam
20-
| Anthropic.Messages.ToolUseBlockParam
21-
| Anthropic.Messages.ToolResultBlockParam
22-
>,
23-
): Part[] {
14+
export function convertAnthropicContentToGemini(content: string | Anthropic.ContentBlockParam[]): Part[] {
2415
if (typeof content === "string") {
2516
return [{ text: content } as TextPart]
2617
}
@@ -133,7 +124,7 @@ export function convertGeminiResponseToAnthropic(response: EnhancedGenerateConte
133124
// Add the main text response
134125
const text = response.text()
135126
if (text) {
136-
content.push({ type: "text", text })
127+
content.push({ type: "text", text, citations: null })
137128
}
138129

139130
// Add function calls as tool_use blocks
@@ -183,6 +174,8 @@ export function convertGeminiResponseToAnthropic(response: EnhancedGenerateConte
183174
usage: {
184175
input_tokens: response.usageMetadata?.promptTokenCount ?? 0,
185176
output_tokens: response.usageMetadata?.candidatesTokenCount ?? 0,
177+
cache_creation_input_tokens: null,
178+
cache_read_input_tokens: null,
186179
},
187180
}
188181
}

src/api/transform/o1-format.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -376,6 +376,7 @@ export function convertO1ResponseToAnthropicMessage(
376376
{
377377
type: "text",
378378
text: normalText,
379+
citations: null,
379380
},
380381
],
381382
model: completion.model,
@@ -396,6 +397,8 @@ export function convertO1ResponseToAnthropicMessage(
396397
usage: {
397398
input_tokens: completion.usage?.prompt_tokens || 0,
398399
output_tokens: completion.usage?.completion_tokens || 0,
400+
cache_creation_input_tokens: null,
401+
cache_read_input_tokens: null,
399402
},
400403
}
401404

src/api/transform/openai-format.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -161,6 +161,7 @@ export function convertToAnthropicMessage(completion: OpenAI.Chat.Completions.Ch
161161
{
162162
type: "text",
163163
text: openAiMessage.content || "",
164+
citations: null,
164165
},
165166
],
166167
model: completion.model,
@@ -181,6 +182,8 @@ export function convertToAnthropicMessage(completion: OpenAI.Chat.Completions.Ch
181182
usage: {
182183
input_tokens: completion.usage?.prompt_tokens || 0,
183184
output_tokens: completion.usage?.completion_tokens || 0,
185+
cache_creation_input_tokens: null,
186+
cache_read_input_tokens: null,
184187
},
185188
}
186189

src/api/transform/vscode-lm-format.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -175,6 +175,7 @@ export async function convertToAnthropicMessage(
175175
return {
176176
type: "text",
177177
text: part.value,
178+
citations: null,
178179
}
179180
}
180181

@@ -195,6 +196,8 @@ export async function convertToAnthropicMessage(
195196
usage: {
196197
input_tokens: 0,
197198
output_tokens: 0,
199+
cache_creation_input_tokens: null,
200+
cache_read_input_tokens: null,
198201
},
199202
}
200203
}

src/core/Cline.ts

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -63,9 +63,7 @@ import { ClineProvider, GlobalFileNames } from "./webview/ClineProvider"
6363
const cwd = vscode.workspace.workspaceFolders?.map((folder) => folder.uri.fsPath).at(0) ?? path.join(os.homedir(), "Desktop") // may or may not exist but fs checking existence would immediately ask for permission which would be bad UX, need to come up with a better solution
6464

6565
type ToolResponse = string | Array<Anthropic.TextBlockParam | Anthropic.ImageBlockParam>
66-
type UserContent = Array<
67-
Anthropic.TextBlockParam | Anthropic.ImageBlockParam | Anthropic.ToolUseBlockParam | Anthropic.ToolResultBlockParam
68-
>
66+
type UserContent = Array<Anthropic.ContentBlockParam>
6967

7068
export class Cline {
7169
readonly taskId: string

src/integrations/misc/export-markdown.ts

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -35,21 +35,29 @@ export async function downloadTask(dateTs: number, conversationHistory: Anthropi
3535
})
3636

3737
if (saveUri) {
38-
// Write content to the selected location
39-
await vscode.workspace.fs.writeFile(saveUri, Buffer.from(markdownContent))
40-
vscode.window.showTextDocument(saveUri, { preview: true })
38+
try {
39+
// Write content to the selected location
40+
await vscode.workspace.fs.writeFile(saveUri, new TextEncoder().encode(markdownContent))
41+
vscode.window.showTextDocument(saveUri, { preview: true })
42+
} catch (error) {
43+
vscode.window.showErrorMessage(
44+
`Failed to save markdown file: ${error instanceof Error ? error.message : String(error)}`,
45+
)
46+
}
4147
}
4248
}
4349

4450
export function formatContentBlockToMarkdown(
45-
block: Anthropic.TextBlockParam | Anthropic.ImageBlockParam | Anthropic.ToolUseBlockParam | Anthropic.ToolResultBlockParam,
51+
block: Anthropic.ContentBlockParam,
4652
// messages: Anthropic.MessageParam[]
4753
): string {
4854
switch (block.type) {
4955
case "text":
5056
return block.text
5157
case "image":
5258
return `[Image]`
59+
case "document":
60+
return `[Document]`
5361
case "tool_use":
5462
let input: string
5563
if (typeof block.input === "object" && block.input !== null) {

0 commit comments

Comments
 (0)