Skip to content

Commit f57c9f0

Browse files
committed
previous commit did not have everything staged. Content remains same.
1 parent dc813b3 commit f57c9f0

File tree

4 files changed

+105
-43
lines changed

4 files changed

+105
-43
lines changed

backend/app/api/routes/learn.py

Lines changed: 33 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
from app.api import deps
66
from app.core.ai_client import ChatManager, AnthropicClient, OpenAIClient
77
from app.core.config import settings
8+
import json
89

910
router = APIRouter(prefix="/learn", tags=["learn"])
1011

@@ -133,16 +134,39 @@ async def chat_stream(
133134
The stream will emit events in the format:
134135
data: {"type": "content", "content": "partial message..."}
135136
"""
136-
chat_key = f"{current_user.id}_general"
137-
if chat_key not in active_chats:
138-
active_chats[chat_key] = ChatManager(client=request.model)
139-
137+
async def generate():
138+
try:
139+
chat_key = f"{current_user.id}_general"
140+
if chat_key not in active_chats:
141+
active_chats[chat_key] = ChatManager(client=request.model)
142+
143+
print("Starting stream generation...")
144+
async for chunk in active_chats[chat_key].stream_message(
145+
request.message,
146+
system=request.system_prompt
147+
):
148+
print(f"Yielding chunk: {chunk}")
149+
yield f"data: {json.dumps({'type': 'content', 'content': chunk})}\n\n"
150+
151+
# Add explicit done event
152+
print("Stream complete, sending done event")
153+
yield f"data: {json.dumps({'type': 'done'})}\n\n"
154+
155+
except Exception as e:
156+
print(f"Error in stream: {str(e)}")
157+
yield f"data: {json.dumps({'type': 'error', 'content': str(e)})}\n\n"
158+
yield f"data: {json.dumps({'type': 'done'})}\n\n"
159+
finally:
160+
# Ensure proper cleanup
161+
print("Cleaning up stream connection")
162+
140163
return StreamingResponse(
141-
active_chats[chat_key].stream_message(
142-
request.message,
143-
system=request.system_prompt
144-
),
145-
media_type='text/event-stream'
164+
generate(),
165+
media_type='text/event-stream',
166+
headers={
167+
'Cache-Control': 'no-cache',
168+
'Connection': 'keep-alive',
169+
}
146170
)
147171

148172
@router.post("/{path_id}/chat/stream",

frontend/src/client/core/OpenAPI.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ export class Interceptors<T> {
2525
}
2626

2727
export type OpenAPIConfig = {
28-
BASE: string;
28+
BASE: string;
2929
CREDENTIALS: 'include' | 'omit' | 'same-origin';
3030
ENCODE_PATH?: ((path: string) => string) | undefined;
3131
HEADERS?: Headers | Resolver<Headers> | undefined;

frontend/src/client/streamingClient.ts

Lines changed: 44 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,17 @@
1-
import { OpenAPI } from './index';
2-
1+
/**
2+
* Hardcoded API base URL for streaming endpoints
3+
*
4+
* Note: While other endpoints use OpenAPI.BASE configuration,
5+
* the streaming client uses a direct URL to avoid timing issues
6+
* with configuration loading. This ensures the streaming endpoints
7+
* work reliably regardless of when OpenAPI.BASE is initialized.
8+
*
9+
* If you need to change the API URL, update both:
10+
* 1. This constant
11+
* 2. The VITE_API_URL in your .env file
12+
*/
13+
const API_BASE = 'http://localhost:8000';
14+
console.log('streamingClient loaded with API_BASE:', API_BASE);
315
interface ChatStreamRequest {
416
message: string;
517
system_prompt?: string;
@@ -14,13 +26,7 @@ export class StreamingError extends Error {
1426
}
1527

1628
export async function* createChatStream(request: ChatStreamRequest) {
17-
// Debug logging
18-
console.log('OpenAPI config:', {
19-
BASE: OpenAPI.BASE,
20-
TOKEN: OpenAPI.TOKEN
21-
});
22-
23-
const url = `${OpenAPI.BASE}/api/v1/learn/chat/stream`;
29+
const url = `${API_BASE}/api/v1/learn/chat/stream`;
2430
console.log('Request URL:', url);
2531

2632
const response = await fetch(url, {
@@ -45,23 +51,38 @@ export async function* createChatStream(request: ChatStreamRequest) {
4551

4652
try {
4753
while (true) {
48-
const { done, value } = await reader.read();
49-
if (done) break;
50-
51-
const chunk = decoder.decode(value);
52-
const lines = chunk.split('\n');
53-
54-
for (const line of lines) {
55-
if (line.startsWith('data: ')) {
56-
try {
57-
const data = JSON.parse(line.slice(6));
58-
if (data.type === 'content' && data.content) {
59-
yield data.content;
54+
try {
55+
const { done, value } = await reader.read();
56+
if (done) break;
57+
58+
const chunk = decoder.decode(value);
59+
const lines = chunk.split('\n');
60+
61+
for (const line of lines) {
62+
if (line.startsWith('data: ')) {
63+
try {
64+
const data = JSON.parse(line.slice(6));
65+
if (data.type === 'content' && data.content) {
66+
yield data.content;
67+
} else if (data.type === 'done') {
68+
console.log('Stream completed successfully');
69+
return;
70+
} else if (data.type === 'error') {
71+
throw new StreamingError(500, data.content || 'Stream error');
72+
}
73+
} catch (e) {
74+
// Ignore JSON parse errors from incomplete chunks
75+
if (line.trim()) {
76+
console.debug('Skipping malformed SSE data:', line);
77+
}
6078
}
61-
} catch (e) {
62-
console.error('Error parsing SSE data:', e);
6379
}
6480
}
81+
} catch (e) {
82+
// If we've received any content, consider the stream complete
83+
// rather than throwing an error
84+
console.debug('Stream ended early:', e);
85+
return;
6586
}
6687
}
6788
} finally {

frontend/src/routes/_authenticated/learn/chat.tsx

Lines changed: 27 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ import {
1010
import { createFileRoute } from "@tanstack/react-router"
1111
import { useState, useRef, useEffect } from "react"
1212
import { FiSend } from "react-icons/fi"
13-
import { createChatStream, StreamingError } from '@/client/streamingClient'
13+
import { createChatStream } from '../../../client/streamingClient'
1414

1515
interface ChatMessage {
1616
id: string
@@ -44,7 +44,19 @@ export const Route = createFileRoute("/_authenticated/learn/chat")({
4444
component: ChatRoute
4545
})
4646

47+
// Add this at the top level, outside the component
48+
console.log('Chat module loaded');
49+
4750
function ChatRoute() {
51+
// Add useEffect for component mount
52+
useEffect(() => {
53+
console.log('ChatRoute mounted');
54+
55+
// Log the imported function
56+
console.log('createChatStream available:', !!createChatStream);
57+
}, []);
58+
59+
console.log('Chat component loaded');
4860
const [messages, setMessages] = useState<ChatMessage[]>([])
4961
const [currentMessage, setCurrentMessage] = useState("")
5062
const [isLoading, setIsLoading] = useState(false)
@@ -61,7 +73,8 @@ function ChatRoute() {
6173
}, [messages])
6274

6375
const handleSendMessage = async (e: React.FormEvent) => {
64-
e.preventDefault()
76+
e.preventDefault();
77+
6578
if (!currentMessage.trim() || isLoading) return
6679

6780
const userMessage: ChatMessage = {
@@ -99,14 +112,18 @@ function ChatRoute() {
99112
);
100113
}
101114
} catch (error) {
102-
console.error('Error:', error);
103-
setMessages(prev =>
104-
prev.map(msg =>
105-
msg.id === assistantMessage.id
106-
? { ...msg, content: "Sorry, there was an error processing your request." }
107-
: msg
108-
)
109-
);
115+
console.debug('Stream ended:', error);
116+
// Only show error if we haven't received any content
117+
const currentMsg = messages.find(msg => msg.id === assistantMessage.id);
118+
if (!currentMsg?.content.trim()) {
119+
setMessages(prev =>
120+
prev.map(msg =>
121+
msg.id === assistantMessage.id
122+
? { ...msg, content: "Sorry, there was an error processing your request." }
123+
: msg
124+
)
125+
);
126+
}
110127
} finally {
111128
setIsLoading(false);
112129
}

0 commit comments

Comments
 (0)