66from app .core .ai_client import ChatManager , AnthropicClient , OpenAIClient
77from app .core .config import settings
88
9- router = APIRouter (tags = ["learn" ])
9+ router = APIRouter (prefix = "/learn" , tags = ["learn" ])
1010
1111# Store chat managers in memory for now
1212active_chats : dict [str , ChatManager ] = {}
1313
1414class ChatRequest (BaseModel ):
15+ """Request model for chat streaming endpoint."""
1516 message : str
1617 system_prompt : str | None = None
1718 model : Literal ["anthropic" , "openai" ] = "anthropic"
@@ -26,7 +27,30 @@ class TestResponse(BaseModel):
2627 openai_model : str
2728 test_message : str | None = None
2829
29- @router .get ("/learn/test" , response_model = TestResponse )
30+ class ChatStreamResponse (BaseModel ):
31+ """Response model for individual stream messages."""
32+ type : Literal ["content" ]
33+ content : str
34+
35+ class ChatMessageResponse (BaseModel ):
36+ """Response model for non-streaming chat messages."""
37+ message : str
38+
39+ class ChatStreamRequest (BaseModel ):
40+ """Request model for chat streaming endpoint."""
41+ message : str
42+ system_prompt : str | None = None
43+ model : Literal ["anthropic" , "openai" ] = "anthropic"
44+
45+ class Config :
46+ schema_extra = {
47+ 'example' : {
48+ 'message' : 'Write a haiku about coding' ,
49+ 'model' : 'anthropic'
50+ }
51+ }
52+
53+ @router .get ("/test" , response_model = TestResponse )
3054async def test_configuration ():
3155 """Test the LLM configuration and basic functionality."""
3256 response = TestResponse (
@@ -50,12 +74,14 @@ async def test_configuration():
5074
5175 return response
5276
53- @router .post ("/learn/ chat" , response_model = ChatResponse )
77+ @router .post ("/chat" , response_model = ChatMessageResponse )
5478async def chat_general (
5579 request : ChatRequest ,
5680 current_user = Depends (deps .get_current_user ),
5781):
58- """General purpose chat endpoint without path context."""
82+ """
83+ Send a message to the AI and get a response.
84+ """
5985 chat_key = f"{ current_user .id } _general"
6086 if chat_key not in active_chats :
6187 active_chats [chat_key ] = ChatManager (client = request .model )
@@ -65,14 +91,48 @@ async def chat_general(
6591 system = request .system_prompt
6692 )
6793
68- return ChatResponse (message = response )
94+ return ChatMessageResponse (message = response )
6995
70- @router .post ("/learn/chat/stream" )
96+ @router .post ("/chat/stream" ,
97+ response_class = StreamingResponse ,
98+ openapi_extra = {
99+ 'responses' : {
100+ '200' : {
101+ 'description' : 'Streaming response' ,
102+ 'headers' : {
103+ 'Transfer-Encoding' : {
104+ 'schema' : {
105+ 'type' : 'string' ,
106+ 'enum' : ['chunked' ]
107+ }
108+ }
109+ },
110+ 'content' : {
111+ 'text/event-stream' : {
112+ 'schema' : {
113+ 'type' : 'object' ,
114+ 'properties' : {
115+ 'type' : {'type' : 'string' , 'enum' : ['content' ]},
116+ 'content' : {'type' : 'string' }
117+ }
118+ }
119+ }
120+ }
121+ }
122+ }
123+ }
124+ )
71125async def chat_stream (
72- request : ChatRequest ,
126+ request : ChatStreamRequest ,
73127 current_user = Depends (deps .get_current_user ),
74- ):
75- """Streaming chat endpoint."""
128+ ) -> StreamingResponse :
129+ """
130+ Send a message to the AI and get a streaming response.
131+ Returns a StreamingResponse with Server-Sent Events containing partial messages.
132+
133+ The stream will emit events in the format:
134+ data: {"type": "content", "content": "partial message..."}
135+ """
76136 chat_key = f"{ current_user .id } _general"
77137 if chat_key not in active_chats :
78138 active_chats [chat_key ] = ChatManager (client = request .model )
@@ -82,28 +142,46 @@ async def chat_stream(
82142 request .message ,
83143 system = request .system_prompt
84144 ),
85- media_type = 'text/event-stream' ,
86- headers = {
87- 'Cache-Control' : 'no-cache' ,
88- 'Connection' : 'keep-alive' ,
89- 'X-Accel-Buffering' : 'no' # Disable buffering in nginx
90- }
145+ media_type = 'text/event-stream'
91146 )
92147
93- @router .post ("/learn/{path_id}" , response_model = ChatResponse )
94- async def chat (
148+ @router .post ("/{path_id}/chat/stream" ,
149+ openapi_extra = {
150+ 'responses' : {
151+ '200' : {
152+ 'description' : 'Streaming response' ,
153+ 'content' : {
154+ 'text/event-stream' : {
155+ 'schema' : {
156+ 'type' : 'object' ,
157+ 'properties' : {
158+ 'type' : {'type' : 'string' , 'enum' : ['content' ]},
159+ 'content' : {'type' : 'string' }
160+ }
161+ }
162+ }
163+ }
164+ }
165+ }
166+ }
167+ )
168+ async def path_chat_stream (
95169 path_id : str ,
96170 request : ChatRequest ,
97171 current_user = Depends (deps .get_current_user ),
98- ):
99- """Path-specific chat endpoint that maintains conversation context for each path."""
172+ ) -> StreamingResponse :
173+ """
174+ Path-specific chat endpoint that maintains conversation context for each path.
175+ Returns a StreamingResponse with Server-Sent Events containing partial messages.
176+ """
100177 chat_key = f"{ current_user .id } _{ path_id } "
101178 if chat_key not in active_chats :
102179 active_chats [chat_key ] = ChatManager (client = request .model )
103180
104- response = await active_chats [chat_key ].send_message (
105- request .message ,
106- system = request .system_prompt
181+ return StreamingResponse (
182+ active_chats [chat_key ].stream_message (
183+ request .message ,
184+ system = request .system_prompt
185+ ),
186+ media_type = 'text/event-stream'
107187 )
108-
109- return ChatResponse (message = response )
0 commit comments