Skip to content

Commit cd5ea20

Browse files
authored
fix: timeout error no longer appears after tool call fail or new chat (#131)
This pull request introduces enhancements to the chat functionality and streaming logic to improve error handling and streamline event signaling. The most important changes include adding a reset for the `timedOut` state in the chat component, centralizing the `stream_done` event chunk, and ensuring the `stream_done` event is emitted even when errors occur during streaming. Relates to #127 <img width="3692" height="1560" alt="CleanShot 2025-07-11 at 10 23 10@2x" src="https://github.com/user-attachments/assets/f81d438f-3e5e-4c30-979e-7b8d83852fb7" />
1 parent 6786ac7 commit cd5ea20

File tree

3 files changed

+32
-3
lines changed

3 files changed

+32
-3
lines changed

src/components/Chat.tsx

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -905,6 +905,7 @@ export function Chat() {
905905
setHasStartedChat(false)
906906
setStreamBuffer([])
907907
setStreaming(false)
908+
setTimedOut(false)
908909
setMessages([]) // Clear messages completely - initialMessage will be shown via renderEvents logic
909910
setInput('')
910911
setFocusTimestamp(Date.now())

src/lib/streaming.test.ts

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -143,4 +143,26 @@ describe('streamText', () => {
143143
// Should include t:{...stream_done...}
144144
expect(result).toMatch(/t:{\"type\":\"stream_done\"}/)
145145
})
146+
147+
it('emits stream_done when an error occurs', async () => {
148+
// Simulate an error thrown during streaming
149+
const errorIterable = {
150+
async *[Symbol.asyncIterator]() {
151+
throw new Error('Simulated streaming error')
152+
},
153+
}
154+
const response = streamText(errorIterable)
155+
const reader = response.body!.getReader()
156+
let result = ''
157+
let done = false
158+
while (!done) {
159+
const { value, done: d } = await reader.read()
160+
if (value) result += new TextDecoder().decode(value)
161+
done = d
162+
}
163+
// Should include t:{...stream_done...} even on error
164+
expect(result).toMatch(/t:{"type":"stream_done"}/)
165+
// Should also include the error event
166+
expect(result).toMatch(/e:{"type":"error".*}/)
167+
})
146168
})

src/lib/streaming.ts

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,10 @@
11
import { APIError } from 'openai'
22

3+
// Event chunk for stream completion
4+
const STREAM_DONE_CHUNK = new TextEncoder().encode(
5+
`t:${JSON.stringify({ type: 'stream_done' })}\n`,
6+
)
7+
38
export function streamText(
49
answer: AsyncIterable<any>,
510
onMessageId?: (messageId: string) => void,
@@ -358,9 +363,7 @@ export function streamText(
358363
// Flush any remaining content
359364
flush()
360365
// Emit a final done event to signal successful completion
361-
controller.enqueue(
362-
encoder.encode(`t:${JSON.stringify({ type: 'stream_done' })}\n`),
363-
)
366+
controller.enqueue(STREAM_DONE_CHUNK)
364367
controller.close()
365368
} catch (error: unknown) {
366369
console.error('Error during streamed response:', error)
@@ -385,6 +388,9 @@ export function streamText(
385388
),
386389
)
387390

391+
// Always emit stream_done after error
392+
controller.enqueue(STREAM_DONE_CHUNK)
393+
388394
// Close the stream
389395
try {
390396
controller.close()

0 commit comments

Comments
 (0)