Skip to content

Commit 0a63dd5

Browse files
committed
lossless anthropic conversion
1 parent 060df24 commit 0a63dd5

File tree

4 files changed

+268
-63
lines changed

4 files changed

+268
-63
lines changed

src/index.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,3 +11,4 @@ export * from "./sdk/sdk.js";
1111
// Message format compatibility helpers
1212
export { fromClaudeMessages, toClaudeMessage } from "./lib/anthropic-compat.js";
1313
export { fromChatMessages, toChatMessage } from "./lib/chat-compat.js";
14+
export { extractUnsupportedContent, hasUnsupportedContent, getUnsupportedContentSummary } from "./lib/stream-transformers.js";

src/lib/anthropic-compat.ts

Lines changed: 39 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -45,15 +45,6 @@ function createFunctionCallOutput(
4545
};
4646
}
4747

48-
/**
49-
* Type guard for Claude text block params
50-
*/
51-
function isTextBlock(
52-
block: models.ClaudeContentBlockParam
53-
): block is models.ClaudeTextBlockParam {
54-
return block.type === "text";
55-
}
56-
5748
/**
5849
* Convert Anthropic Claude-style messages to OpenResponses input format.
5950
*
@@ -86,32 +77,56 @@ export function fromClaudeMessages(
8677
for (const msg of messages) {
8778
const { role, content } = msg;
8879

89-
// Handle string content directly
9080
if (typeof content === "string") {
9181
result.push(createEasyInputMessage(role, content));
9282
continue;
9383
}
9484

95-
// Handle array content - extract text and handle tool results
9685
const textParts: string[] = [];
86+
9787
for (const block of content) {
98-
if (isTextBlock(block)) {
99-
textParts.push(block.text);
100-
} else if (block.type === "tool_result") {
101-
// Tool results need special handling - convert to function_call_output
102-
const toolContent =
103-
typeof block.content === "string"
104-
? block.content
105-
: block.content.filter(isTextBlock).map((b: models.ClaudeTextBlockParam) => b.text).join("");
106-
result.push(createFunctionCallOutput(block.tool_use_id, toolContent));
88+
switch (block.type) {
89+
case 'text': {
90+
const textBlock = block as models.ClaudeTextBlockParam;
91+
textParts.push(textBlock.text);
92+
// Note: cache_control is lost in conversion (OpenRouter doesn't support it)
93+
break;
94+
}
95+
96+
case 'image': {
97+
// Images in input cannot be mapped to OpenRouter easy format
98+
// Add text marker to preserve conversation flow
99+
textParts.push('[Image content - not supported in OpenRouter format]');
100+
break;
101+
}
102+
103+
case 'tool_use': {
104+
// Tool use blocks in input are conversation history, skip
105+
break;
106+
}
107+
108+
case 'tool_result': {
109+
const toolResultBlock = block as models.ClaudeToolResultBlockParam;
110+
111+
let toolOutput = '';
112+
if (typeof toolResultBlock.content === 'string') {
113+
toolOutput = toolResultBlock.content;
114+
} else {
115+
// Extract text, skip images (OpenRouter function_call_output only supports text)
116+
toolOutput = toolResultBlock.content
117+
.filter((part): part is models.ClaudeTextBlockParam => part.type === 'text')
118+
.map(part => part.text)
119+
.join('');
120+
}
121+
122+
result.push(createFunctionCallOutput(toolResultBlock.tool_use_id, toolOutput));
123+
break;
124+
}
107125
}
108-
// Note: tool_use and image blocks in input are typically part of conversation history
109-
// They would come from previous assistant responses, we skip them for now
110126
}
111127

112-
// If we collected text parts, add them as a message
113128
if (textParts.length > 0) {
114-
result.push(createEasyInputMessage(role, textParts.join("")));
129+
result.push(createEasyInputMessage(role, textParts.join('')));
115130
}
116131
}
117132

src/lib/stream-transformers.ts

Lines changed: 213 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -449,6 +449,69 @@ export function responseHasToolCalls(response: models.OpenResponsesNonStreamingR
449449
return response.output.some((item) => 'type' in item && item.type === 'function_call');
450450
}
451451

452+
/**
453+
* Convert OpenRouter annotations to Claude citations
454+
*/
455+
function mapAnnotationsToCitations(
456+
annotations?: Array<models.OpenAIResponsesAnnotation>
457+
): models.ClaudeTextCitation[] | undefined {
458+
if (!annotations || annotations.length === 0) {
459+
return undefined;
460+
}
461+
462+
const citations: models.ClaudeTextCitation[] = [];
463+
464+
for (const annotation of annotations) {
465+
if (!('type' in annotation)) {
466+
continue;
467+
}
468+
469+
switch (annotation.type) {
470+
case 'file_citation': {
471+
const fileCite = annotation as models.FileCitation;
472+
citations.push({
473+
type: 'char_location',
474+
cited_text: '',
475+
document_index: fileCite.index,
476+
document_title: fileCite.filename,
477+
file_id: fileCite.fileId,
478+
start_char_index: 0,
479+
end_char_index: 0,
480+
});
481+
break;
482+
}
483+
484+
case 'url_citation': {
485+
const urlCite = annotation as models.URLCitation;
486+
citations.push({
487+
type: 'web_search_result_location',
488+
cited_text: '',
489+
title: urlCite.title,
490+
url: urlCite.url,
491+
encrypted_index: '',
492+
});
493+
break;
494+
}
495+
496+
case 'file_path': {
497+
const pathCite = annotation as models.FilePath;
498+
citations.push({
499+
type: 'char_location',
500+
cited_text: '',
501+
document_index: pathCite.index,
502+
document_title: '',
503+
file_id: pathCite.fileId,
504+
start_char_index: 0,
505+
end_char_index: 0,
506+
});
507+
break;
508+
}
509+
}
510+
}
511+
512+
return citations.length > 0 ? citations : undefined;
513+
}
514+
452515
/**
453516
* Map OpenResponses status to Claude stop reason
454517
*/
@@ -489,59 +552,126 @@ export function convertToClaudeMessage(
489552
response: models.OpenResponsesNonStreamingResponse,
490553
): models.ClaudeMessage {
491554
const content: models.ClaudeContentBlock[] = [];
555+
const unsupportedContent: models.UnsupportedContent[] = [];
492556

493557
for (const item of response.output) {
494558
if (!('type' in item)) {
495559
continue;
496560
}
497561

498-
// Handle message output items
499-
if (item.type === 'message') {
500-
const msgItem = item as models.ResponsesOutputMessage;
501-
for (const part of msgItem.content) {
502-
if ('type' in part && part.type === 'output_text') {
503-
const textPart = part as models.ResponseOutputText;
504-
content.push({
505-
type: 'text',
506-
text: textPart.text,
507-
});
562+
switch (item.type) {
563+
case 'message': {
564+
const msgItem = item as models.ResponsesOutputMessage;
565+
for (const part of msgItem.content) {
566+
if (!('type' in part)) {
567+
continue;
568+
}
569+
570+
if (part.type === 'output_text') {
571+
const textPart = part as models.ResponseOutputText;
572+
const citations = mapAnnotationsToCitations(textPart.annotations);
573+
574+
content.push({
575+
type: 'text',
576+
text: textPart.text,
577+
...(citations && { citations }),
578+
});
579+
} else if (part.type === 'refusal') {
580+
const refusalPart = part as models.OpenAIResponsesRefusalContent;
581+
unsupportedContent.push({
582+
original_type: 'refusal',
583+
data: { refusal: refusalPart.refusal },
584+
reason: 'Claude does not have a native refusal content type',
585+
});
586+
}
508587
}
588+
break;
509589
}
510-
}
511590

512-
// Handle function call output items (tool use)
513-
if (item.type === 'function_call') {
514-
const fnCall = item as models.ResponsesOutputItemFunctionCall;
515-
let parsedInput: Record<string, unknown> = {};
591+
case 'function_call': {
592+
const fnCall = item as models.ResponsesOutputItemFunctionCall;
593+
let parsedInput: Record<string, unknown> = {};
516594

517-
try {
518-
parsedInput = JSON.parse(fnCall.arguments);
519-
} catch {
520-
// If parsing fails, keep as empty object
521-
parsedInput = {};
522-
}
595+
try {
596+
parsedInput = JSON.parse(fnCall.arguments);
597+
} catch {
598+
parsedInput = {};
599+
}
523600

524-
content.push({
525-
type: 'tool_use',
526-
id: fnCall.callId,
527-
name: fnCall.name,
528-
input: parsedInput,
529-
});
530-
}
601+
content.push({
602+
type: 'tool_use',
603+
id: fnCall.callId,
604+
name: fnCall.name,
605+
input: parsedInput,
606+
});
607+
break;
608+
}
531609

532-
// Handle reasoning output items (thinking)
533-
if (item.type === 'reasoning') {
534-
const reasoningItem = item as models.ResponsesOutputItemReasoning;
535-
if (reasoningItem.summary && reasoningItem.summary.length > 0) {
536-
for (const summaryItem of reasoningItem.summary) {
537-
if (summaryItem.type === 'summary_text' && summaryItem.text) {
538-
content.push({
539-
type: 'thinking',
540-
thinking: summaryItem.text,
541-
signature: '',
542-
});
610+
case 'reasoning': {
611+
const reasoningItem = item as models.ResponsesOutputItemReasoning;
612+
613+
if (reasoningItem.summary && reasoningItem.summary.length > 0) {
614+
for (const summaryItem of reasoningItem.summary) {
615+
if (summaryItem.type === 'summary_text' && summaryItem.text) {
616+
content.push({
617+
type: 'thinking',
618+
thinking: summaryItem.text,
619+
signature: '',
620+
});
621+
}
543622
}
544623
}
624+
625+
if (reasoningItem.encryptedContent) {
626+
unsupportedContent.push({
627+
original_type: 'reasoning_encrypted',
628+
data: {
629+
id: reasoningItem.id,
630+
encrypted_content: reasoningItem.encryptedContent,
631+
},
632+
reason: 'Encrypted reasoning content preserved for round-trip',
633+
});
634+
}
635+
break;
636+
}
637+
638+
case 'web_search_call': {
639+
const webSearchItem = item as models.ResponsesWebSearchCallOutput;
640+
content.push({
641+
type: 'server_tool_use',
642+
id: webSearchItem.id,
643+
name: 'web_search',
644+
input: { status: webSearchItem.status },
645+
});
646+
break;
647+
}
648+
649+
case 'file_search_call': {
650+
const fileSearchItem = item as models.ResponsesOutputItemFileSearchCall;
651+
content.push({
652+
type: 'tool_use',
653+
id: fileSearchItem.id,
654+
name: 'file_search',
655+
input: {
656+
queries: fileSearchItem.queries,
657+
status: fileSearchItem.status,
658+
},
659+
});
660+
break;
661+
}
662+
663+
case 'image_generation_call': {
664+
const imageGenItem = item as models.ResponsesImageGenerationCall;
665+
unsupportedContent.push({
666+
original_type: 'image_generation_call',
667+
data: {
668+
id: imageGenItem.id,
669+
result: imageGenItem.result,
670+
status: imageGenItem.status,
671+
},
672+
reason: 'Claude does not support image outputs in assistant messages',
673+
});
674+
break;
545675
}
546676
}
547677
}
@@ -560,5 +690,49 @@ export function convertToClaudeMessage(
560690
cache_creation_input_tokens: response.usage?.inputTokensDetails?.cachedTokens ?? 0,
561691
cache_read_input_tokens: 0,
562692
},
693+
...(unsupportedContent.length > 0 && { unsupported_content: unsupportedContent }),
563694
};
564695
}
696+
697+
/**
698+
* Extract unsupported content by original type
699+
*/
700+
export function extractUnsupportedContent(
701+
message: models.ClaudeMessage,
702+
originalType: string
703+
): models.UnsupportedContent[] {
704+
if (!message.unsupported_content) {
705+
return [];
706+
}
707+
708+
return message.unsupported_content.filter(
709+
item => item.original_type === originalType
710+
);
711+
}
712+
713+
/**
714+
* Check if message has any unsupported content
715+
*/
716+
export function hasUnsupportedContent(
717+
message: models.ClaudeMessage
718+
): boolean {
719+
return !!(message.unsupported_content && message.unsupported_content.length > 0);
720+
}
721+
722+
/**
723+
* Get summary of unsupported content types
724+
*/
725+
export function getUnsupportedContentSummary(
726+
message: models.ClaudeMessage
727+
): Record<string, number> {
728+
if (!message.unsupported_content) {
729+
return {};
730+
}
731+
732+
const summary: Record<string, number> = {};
733+
for (const item of message.unsupported_content) {
734+
summary[item.original_type] = (summary[item.original_type] || 0) + 1;
735+
}
736+
737+
return summary;
738+
}

0 commit comments

Comments
 (0)