Skip to content

Commit c660706

Browse files
committed
Add support for displaying reasoning for openrouter models
1 parent 663747e commit c660706

File tree

6 files changed

+109
-2
lines changed

6 files changed

+109
-2
lines changed

src/api/providers/openrouter.ts

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ import delay from "delay"
1010
// Add custom interface for OpenRouter params
1111
type OpenRouterChatCompletionParams = OpenAI.Chat.ChatCompletionCreateParams & {
1212
transforms?: string[]
13+
include_reasoning?: boolean
1314
}
1415

1516
// Add custom interface for OpenRouter usage chunk
@@ -126,6 +127,7 @@ export class OpenRouterHandler implements ApiHandler, SingleCompletionHandler {
126127
temperature: temperature,
127128
messages: openAiMessages,
128129
stream: true,
130+
include_reasoning: true,
129131
// This way, the transforms field will only be included in the parameters when openRouterUseMiddleOutTransform is true.
130132
...(this.options.openRouterUseMiddleOutTransform && { transforms: ["middle-out"] }),
131133
} as OpenRouterChatCompletionParams)
@@ -145,6 +147,12 @@ export class OpenRouterHandler implements ApiHandler, SingleCompletionHandler {
145147
}
146148

147149
const delta = chunk.choices[0]?.delta
150+
if ("reasoning" in delta && delta.reasoning) {
151+
yield {
152+
type: "reasoning",
153+
text: delta.reasoning,
154+
} as ApiStreamChunk
155+
}
148156
if (delta?.content) {
149157
fullResponseText += delta.content
150158
yield {

src/api/transform/stream.ts

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,16 @@
11
export type ApiStream = AsyncGenerator<ApiStreamChunk>
2-
export type ApiStreamChunk = ApiStreamTextChunk | ApiStreamUsageChunk
2+
export type ApiStreamChunk = ApiStreamTextChunk | ApiStreamUsageChunk | ApiStreamReasoningChunk
33

44
export interface ApiStreamTextChunk {
55
type: "text"
66
text: string
77
}
88

9+
export interface ApiStreamReasoningChunk {
10+
type: "reasoning"
11+
text: string
12+
}
13+
914
export interface ApiStreamUsageChunk {
1015
type: "usage"
1116
inputTokens: number

src/core/Cline.ts

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2219,7 +2219,7 @@ export class Cline {
22192219
}
22202220

22212221
/*
2222-
Seeing out of bounds is fine, it means that the next too call is being built up and ready to add to assistantMessageContent to present.
2222+
Seeing out of bounds is fine, it means that the next too call is being built up and ready to add to assistantMessageContent to present.
22232223
When you see the UI inactive during this, it means that a tool is breaking without presenting any UI. For example the write_to_file tool was breaking when relpath was undefined, and for invalid relpath it never presented UI.
22242224
*/
22252225
this.presentAssistantMessageLocked = false // this needs to be placed here, if not then calling this.presentAssistantMessage below would fail (sometimes) since it's locked
@@ -2391,9 +2391,14 @@ export class Cline {
23912391

23922392
const stream = this.attemptApiRequest(previousApiReqIndex) // yields only if the first chunk is successful, otherwise will allow the user to retry the request (most likely due to rate limit error, which gets thrown on the first chunk)
23932393
let assistantMessage = ""
2394+
let reasoningMessage = ""
23942395
try {
23952396
for await (const chunk of stream) {
23962397
switch (chunk.type) {
2398+
case "reasoning":
2399+
reasoningMessage += chunk.text
2400+
await this.say("reasoning", reasoningMessage, undefined, true)
2401+
break
23972402
case "usage":
23982403
inputTokens += chunk.inputTokens
23992404
outputTokens += chunk.outputTokens

src/shared/ExtensionMessage.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -121,6 +121,7 @@ export interface ClineMessage {
121121
text?: string
122122
images?: string[]
123123
partial?: boolean
124+
reasoning?: string
124125
}
125126

126127
export type ClineAsk =
@@ -142,6 +143,7 @@ export type ClineSay =
142143
| "api_req_started"
143144
| "api_req_finished"
144145
| "text"
146+
| "reasoning"
145147
| "completion_result"
146148
| "user_feedback"
147149
| "user_feedback_diff"

webview-ui/src/components/chat/ChatRow.tsx

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ import { vscode } from "../../utils/vscode"
1515
import CodeAccordian, { removeLeadingNonAlphanumeric } from "../common/CodeAccordian"
1616
import CodeBlock, { CODE_BLOCK_BG_COLOR } from "../common/CodeBlock"
1717
import MarkdownBlock from "../common/MarkdownBlock"
18+
import ReasoningBlock from "./ReasoningBlock"
1819
import Thumbnails from "../common/Thumbnails"
1920
import McpResourceRow from "../mcp/McpResourceRow"
2021
import McpToolRow from "../mcp/McpToolRow"
@@ -79,6 +80,14 @@ export const ChatRowContent = ({
7980
isStreaming,
8081
}: ChatRowContentProps) => {
8182
const { mcpServers } = useExtensionState()
83+
const [reasoningCollapsed, setReasoningCollapsed] = useState(false)
84+
85+
// Auto-collapse reasoning when new messages arrive
86+
useEffect(() => {
87+
if (!isLast && message.say === "reasoning") {
88+
setReasoningCollapsed(true)
89+
}
90+
}, [isLast, message.say])
8291
const [cost, apiReqCancelReason, apiReqStreamingFailedMessage] = useMemo(() => {
8392
if (message.text != null && message.say === "api_req_started") {
8493
const info: ClineApiReqInfo = JSON.parse(message.text)
@@ -472,6 +481,14 @@ export const ChatRowContent = ({
472481
switch (message.type) {
473482
case "say":
474483
switch (message.say) {
484+
case "reasoning":
485+
return (
486+
<ReasoningBlock
487+
content={message.text || ""}
488+
isCollapsed={reasoningCollapsed}
489+
onToggleCollapse={() => setReasoningCollapsed(!reasoningCollapsed)}
490+
/>
491+
)
475492
case "api_req_started":
476493
return (
477494
<>
Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,70 @@
1+
import React, { useEffect, useRef } from "react"
2+
import { CODE_BLOCK_BG_COLOR } from "../common/CodeBlock"
3+
import MarkdownBlock from "../common/MarkdownBlock"
4+
5+
interface ReasoningBlockProps {
6+
content: string
7+
isCollapsed?: boolean
8+
onToggleCollapse?: () => void
9+
autoHeight?: boolean
10+
}
11+
12+
const ReasoningBlock: React.FC<ReasoningBlockProps> = ({
13+
content,
14+
isCollapsed = false,
15+
onToggleCollapse,
16+
autoHeight = false,
17+
}) => {
18+
const contentRef = useRef<HTMLDivElement>(null)
19+
20+
// Scroll to bottom when content updates
21+
useEffect(() => {
22+
if (contentRef.current && !isCollapsed) {
23+
contentRef.current.scrollTop = contentRef.current.scrollHeight
24+
}
25+
}, [content, isCollapsed])
26+
27+
return (
28+
<div
29+
style={{
30+
backgroundColor: CODE_BLOCK_BG_COLOR,
31+
border: "1px solid var(--vscode-editorGroup-border)",
32+
borderRadius: "3px",
33+
overflow: "hidden",
34+
}}>
35+
<div
36+
onClick={onToggleCollapse}
37+
style={{
38+
padding: "8px 12px",
39+
cursor: "pointer",
40+
userSelect: "none",
41+
display: "flex",
42+
alignItems: "center",
43+
justifyContent: "space-between",
44+
borderBottom: isCollapsed ? "none" : "1px solid var(--vscode-editorGroup-border)",
45+
}}>
46+
<span style={{ fontWeight: "bold" }}>Reasoning</span>
47+
<span className={`codicon codicon-chevron-${isCollapsed ? "right" : "down"}`}></span>
48+
</div>
49+
{!isCollapsed && (
50+
<div
51+
ref={contentRef}
52+
style={{
53+
padding: "8px 12px",
54+
maxHeight: autoHeight ? "none" : "160px",
55+
overflowY: "auto",
56+
}}>
57+
<div
58+
style={{
59+
fontSize: "13px",
60+
opacity: 0.9,
61+
}}>
62+
<MarkdownBlock markdown={content} />
63+
</div>
64+
</div>
65+
)}
66+
</div>
67+
)
68+
}
69+
70+
export default ReasoningBlock

0 commit comments

Comments
 (0)