Skip to content

Commit 9e53541

Browse files
mattappersonclaude
andcommitted
feat: add Anthropic Claude message format support to callModel
Add support for Anthropic Claude-style messages as input and output: Input formats now supported: - OpenResponses format (native) - OpenAI Chat Completions format (Message[]) - Anthropic Claude format (ClaudeMessageParam[]) Output methods added/renamed: - getChatMessage() - returns OpenAI chat format (renamed from getMessage) - getClaudeMessage() - returns Anthropic Claude format (new) - getNewChatMessagesStream() - stream chat messages (renamed from getNewMessagesStream) Also includes: - New ClaudeMessage types mirroring Anthropic SDK types - Automatic format detection and conversion - convertToClaudeMessage() helper for response transformation - Updated examples demonstrating all input/output formats - E2E tests for Claude-style message handling BREAKING CHANGE: getMessage() renamed to getChatMessage() BREAKING CHANGE: getNewMessagesStream() renamed to getNewChatMessagesStream() 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <[email protected]>
1 parent bf2a0df commit 9e53541

File tree

13 files changed

+859
-126
lines changed

13 files changed

+859
-126
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,3 +33,4 @@
3333
/examples/nextjs-example/package-lock.json
3434
/examples/nextjs-example/package.lock.json
3535
/temp
36+
.codanna/**

examples/betaResponsesSend.example.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ dotenv.config();
1111
* npm run build && npx tsx betaResponsesSend.example.ts
1212
*/
1313

14-
import { OpenRouter } from "@openrouter/sdk";
14+
import { OpenRouter } from "../src/index.js";
1515

1616
const openRouter = new OpenRouter({
1717
apiKey: process.env["OPENROUTER_API_KEY"] ?? "",

examples/callModel.example.ts

Lines changed: 89 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,89 @@
1+
/*
2+
* Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.
3+
*/
4+
5+
import dotenv from "dotenv";
6+
dotenv.config();
7+
/**
8+
* Example usage of the @openrouter/sdk SDK
9+
*
10+
* To run this example from the examples directory:
11+
* npm run build && npx tsx betaResponsesSend.example.ts
12+
*/
13+
14+
import { OpenRouter } from "../src/index.js";
15+
import { Message, OpenResponsesEasyInputMessage } from "../src/models";
16+
17+
import { MessageParam as AnthropicClaudeMessage } from "@anthropic-ai/sdk/resources/messages";
18+
19+
const openRouter = new OpenRouter({
20+
apiKey: process.env["OPENROUTER_API_KEY"] ?? "",
21+
});
22+
23+
const chatMessages: Message[] = [
24+
{
25+
role: "user",
26+
content: "Hello! What is your name?",
27+
},
28+
];
29+
const text = "Hello! What is your name?";
30+
const responsesMessages: OpenResponsesEasyInputMessage[] = [
31+
{
32+
type: "message",
33+
role: "user",
34+
content: [
35+
{
36+
type: "input_text",
37+
text: "What is your name?",
38+
},
39+
],
40+
},
41+
];
42+
43+
const anthropicClaudeMessages: AnthropicClaudeMessage[] = [
44+
{
45+
role: "user",
46+
content: "Hello! What is your name?",
47+
},
48+
];
49+
50+
const supportedInputSchemas = [
51+
chatMessages,
52+
text,
53+
responsesMessages,
54+
anthropicClaudeMessages,
55+
];
56+
57+
for (const input of supportedInputSchemas) {
58+
const result = await openRouter.callModel({
59+
instructions: "You are a helpful assistant. Your name is Mark",
60+
model: "openai/gpt-4",
61+
input: input,
62+
});
63+
64+
// get resulting output each time in...
65+
66+
// text format
67+
console.log(await result.getText());
68+
69+
// chat message format (was names getMessage)
70+
console.log(await result.getChatMessage());
71+
72+
// response message format
73+
console.log(await result.getResponse().then((response) => response.output));
74+
75+
// AnthropicClaude message format
76+
console.log(await result.getClaudeMessage());
77+
78+
// stream response message format
79+
console.log(await result.getFullResponsesStream());
80+
81+
// stream text format
82+
console.log(await result.getTextStream());
83+
84+
// stream chat message format
85+
console.log(await result.getNewChatMessagesStream());
86+
87+
// stream response message format
88+
console.log(await result.getFullResponsesStream());
89+
}

examples/chatCompletions.example.ts

Lines changed: 19 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
* Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.
33
*/
44

5-
import dotenv from 'dotenv';
5+
import dotenv from "dotenv";
66

77
dotenv.config();
88

@@ -13,39 +13,40 @@ dotenv.config();
1313
* bun run chatCompletions.example.ts
1414
*/
1515

16-
import { OpenRouter } from '@openrouter/sdk';
16+
import { OpenRouter } from "../src/index.js";
1717

18-
if (!process.env['OPENROUTER_API_KEY']) {
19-
throw new Error('Missing OPENROUTER_API_KEY environment variable');
18+
if (!process.env["OPENROUTER_API_KEY"]) {
19+
throw new Error("Missing OPENROUTER_API_KEY environment variable");
2020
}
2121
const openRouter = new OpenRouter({
22-
apiKey: process.env['OPENROUTER_API_KEY'] ?? '',
22+
apiKey: process.env["OPENROUTER_API_KEY"] ?? "",
2323
debugLogger: console,
2424
});
2525

2626
async function nonStreamingExample() {
2727
const result = await openRouter.chat.send({
28-
model: 'qwen/qwen3-max',
28+
model: "qwen/qwen3-max",
2929
messages: [
3030
{
31-
role: 'user',
32-
content: 'Tell me a short joke about programming',
31+
role: "user",
32+
content: "Tell me a short joke about programming",
3333
},
3434
],
3535
stream: false,
3636
});
3737

38-
if ('choices' in result) {
38+
if ("choices" in result) {
39+
console.log(result.choices[0].message.content);
3940
}
4041
}
4142

4243
async function streamingExample() {
4344
const result = await openRouter.chat.send({
44-
model: 'qwen/qwen3-max',
45+
model: "qwen/qwen3-max",
4546
messages: [
4647
{
47-
role: 'user',
48-
content: 'Write a haiku about TypeScript',
48+
role: "user",
49+
content: "Write a haiku about TypeScript",
4950
},
5051
],
5152
stream: true,
@@ -54,9 +55,9 @@ async function streamingExample() {
5455
},
5556
});
5657

57-
if (result && typeof result === 'object' && Symbol.asyncIterator in result) {
58+
if (result && typeof result === "object" && Symbol.asyncIterator in result) {
5859
const stream = result;
59-
let _fullContent = '';
60+
let _fullContent = "";
6061

6162
for await (const chunk of stream) {
6263
if (chunk.choices?.[0]?.delta?.content) {
@@ -66,6 +67,7 @@ async function streamingExample() {
6667
}
6768

6869
if (chunk.usage) {
70+
console.log(chunk.usage);
6971
}
7072
}
7173
}
@@ -75,7 +77,9 @@ async function main() {
7577
try {
7678
await nonStreamingExample();
7779
await streamingExample();
78-
} catch (_error) {}
80+
} catch (_error) {
81+
console.error(_error);
82+
}
7983
}
8084

8185
main();

0 commit comments

Comments
 (0)