Skip to content

Commit 2ff290d

Browse files
feat: standardize chat typing (#803)
Co-authored-by: jakubduda-dsai <[email protected]>
1 parent 85f6528 commit 2ff290d

File tree

14 files changed

+65
-54
lines changed

14 files changed

+65
-54
lines changed

README.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -229,8 +229,8 @@ class MyChat(ChatInterface):
229229
async def chat(
230230
self,
231231
message: str,
232-
history: ChatFormat | None = None,
233-
context: ChatContext | None = None,
232+
history: ChatFormat,
233+
context: ChatContext,
234234
) -> AsyncGenerator[ChatResponse]:
235235
async for result in agent.run_streaming(message):
236236
match result:

docs/how-to/chatbots/api.md

Lines changed: 16 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,8 @@ First, create a chat implementation by subclassing `ChatInterface`. Here's a min
1212
from collections.abc import AsyncGenerator
1313

1414
from ragbits.chat.interface import ChatInterface
15-
from ragbits.chat.interface.types import ChatResponse, Message
15+
from ragbits.chat.interface.types import ChatResponse
16+
from ragbits.core.prompt import ChatFormat
1617
from ragbits.core.llms import LiteLLM
1718

1819
class MyChat(ChatInterface):
@@ -22,8 +23,8 @@ class MyChat(ChatInterface):
2223
async def chat(
2324
self,
2425
message: str,
25-
history: list[Message] | None = None,
26-
context: dict | None = None,
26+
history: ChatFormat,
27+
context: ChatContext,
2728
) -> AsyncGenerator[ChatResponse, None]:
2829
async for chunk in self.llm.generate_streaming([*history, {"role": "user", "content": message}]):
2930
yield self.create_text_response(chunk)
@@ -58,7 +59,7 @@ Ragbits Chat supports multiple response types that can be yielded from your `cha
5859
Text responses are the primary way to stream content to users. Use `create_text_response()` to yield text chunks:
5960

6061
```python
61-
async def chat(self, message: str, history: list[Message] | None = None, context: dict | None = None) -> AsyncGenerator[ChatResponse, None]:
62+
async def chat(self, message: str, history: ChatFormat, context: ChatContext) -> AsyncGenerator[ChatResponse, None]:
6263
# Stream response from LLM
6364
async for chunk in self.llm.generate_streaming([*history, {"role": "user", "content": message}]):
6465
yield self.create_text_response(chunk)
@@ -69,7 +70,7 @@ async def chat(self, message: str, history: list[Message] | None = None, context
6970
References allow you to cite sources, documents, or external links that support your response:
7071

7172
```python
72-
async def chat(self, message: str, history: list[Message] | None = None, context: dict | None = None) -> AsyncGenerator[ChatResponse, None]:
73+
async def chat(self, message: str, history: ChatFormat, context: ChatContext) -> AsyncGenerator[ChatResponse, None]:
7374
# Add a reference
7475
yield self.create_reference(
7576
title="Example Reference",
@@ -85,7 +86,7 @@ You can include images in your responses using `create_image_response()`:
8586
```python
8687
import uuid
8788

88-
async def chat(self, message: str, history: list[Message] | None = None, context: dict | None = None) -> AsyncGenerator[ChatResponse, None]:
89+
async def chat(self, message: str, history: ChatFormat, context: ChatContext) -> AsyncGenerator[ChatResponse, None]:
8990
# Add an image to the response
9091
yield self.create_image_response(
9192
str(uuid.uuid4()), # Unique identifier for the image
@@ -98,7 +99,7 @@ async def chat(self, message: str, history: list[Message] | None = None, context
9899
Provide suggested follow-up questions to guide the conversation:
99100

100101
```python
101-
async def chat(self, message: str, history: list[Message] | None = None, context: dict | None = None) -> AsyncGenerator[ChatResponse, None]:
102+
async def chat(self, message: str, history: ChatFormat, context: ChatContext) -> AsyncGenerator[ChatResponse, None]:
102103
# Main response...
103104
async for chunk in self.llm.generate_streaming([*history, {"role": "user", "content": message}]):
104105
yield self.create_text_response(chunk)
@@ -121,7 +122,7 @@ Live updates show real-time progress for long-running operations (like web searc
121122
import asyncio
122123
from ragbits.chat.interface.types import LiveUpdateType
123124

124-
async def chat(self, message: str, history: list[Message] | None = None, context: dict | None = None) -> AsyncGenerator[ChatResponse, None]:
125+
async def chat(self, message: str, history: ChatFormat, context: ChatContext) -> AsyncGenerator[ChatResponse, None]:
125126
# Start a live update
126127
yield self.create_live_update(
127128
"search_task", # Unique task ID
@@ -163,12 +164,13 @@ Use `create_state_update()` to store state information that persists across conv
163164

164165
```python
165166
from ragbits.chat.interface.types import ChatContext
167+
from ragbits.core.prompt import ChatFormat
166168

167169
async def chat(
168170
self,
169171
message: str,
170-
history: list[Message] | None = None,
171-
context: ChatContext | None = None
172+
history: ChatFormat,
173+
context: ChatContext
172174
) -> AsyncGenerator[ChatResponse, None]:
173175
# Access existing state from context
174176
current_state = context.state if context else {}
@@ -393,7 +395,8 @@ from pydantic import BaseModel, ConfigDict, Field
393395

394396
from ragbits.chat.interface import ChatInterface
395397
from ragbits.chat.interface.forms import FeedbackConfig, UserSettings
396-
from ragbits.chat.interface.types import ChatContext, ChatResponse, LiveUpdateType, Message
398+
from ragbits.chat.interface.types import ChatContext, ChatResponse, LiveUpdateType
399+
from ragbits.core.prompt import ChatFormat
397400
from ragbits.chat.interface.ui_customization import HeaderCustomization, PageMetaCustomization, UICustomization
398401
from ragbits.core.llms import LiteLLM
399402

@@ -486,8 +489,8 @@ class MyChat(ChatInterface):
486489
async def chat(
487490
self,
488491
message: str,
489-
history: list[Message] | None = None,
490-
context: ChatContext | None = None,
492+
history: ChatFormat,
493+
context: ChatContext,
491494
) -> AsyncGenerator[ChatResponse, None]:
492495
"""
493496
Comprehensive chat implementation demonstrating all response types.

docs/index.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -258,8 +258,8 @@ class MyChat(ChatInterface):
258258
async def chat(
259259
self,
260260
message: str,
261-
history: ChatFormat | None = None,
262-
context: ChatContext | None = None,
261+
history: ChatFormat,
262+
context: ChatContext,
263263
) -> AsyncGenerator[ChatResponse]:
264264
async for result in agent.run_streaming(message):
265265
match result:

docs/tutorials/chat.md

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -93,8 +93,8 @@ class BasicMountainChat(ChatInterface):
9393
async def chat(
9494
self,
9595
message: str,
96-
history: list[Message] | None = None,
97-
context: ChatContext | None = None,
96+
history: ChatFormat,
97+
context: ChatContext,
9898
) -> AsyncGenerator[ChatResponse, None]:
9999
# Basic streaming implementation
100100
stream = self.agent.run_streaming(
@@ -170,8 +170,8 @@ class BasicMountainChat(ChatInterface):
170170
async def chat(
171171
self,
172172
message: str,
173-
history: list[Message] | None = None,
174-
context: ChatContext | None = None,
173+
history: ChatFormat,
174+
context: ChatContext,
175175
) -> AsyncGenerator[ChatResponse, None]:
176176
# Enhanced streaming with tool handling
177177
stream = self.agent.run_streaming(
@@ -402,8 +402,8 @@ class MountainChatWithUI(ChatInterface):
402402
async def chat(
403403
self,
404404
message: str,
405-
history: list[Message] | None = None,
406-
context: ChatContext | None = None,
405+
history: ChatFormat,
406+
context: ChatContext,
407407
) -> AsyncGenerator[ChatResponse, None]:
408408
# Get user language preference
409409
language = "English"
@@ -530,11 +530,11 @@ class AuthenticatedMountainChat(ChatInterface):
530530
async def chat(
531531
self,
532532
message: str,
533-
history: list[Message] | None = None,
534-
context: ChatContext | None = None,
533+
history: ChatFormat,
534+
context: ChatContext,
535535
) -> AsyncGenerator[ChatResponse, None]:
536536
# Check authentication
537-
user_info = context.state.get("authenticated_user") if context else None
537+
user_info = context.user
538538

539539
if not user_info:
540540
yield self.create_text_response("⚠️ Authentication information not found.")
@@ -741,8 +741,8 @@ class MyChat(ChatInterface):
741741
base64_image = base64.b64encode(image_file.read()).decode("utf-8")
742742
return self.create_image_response(image_filename, f"data:image/png;base64,{base64_image}")
743743

744-
async def chat(self, message: str, history: list[Message] | None = None, context: ChatContext | None = None) -> AsyncGenerator[ChatResponse, None]:
745-
user_info = context.state.get("authenticated_user") if context else None
744+
async def chat(self, message: str, history: ChatFormat, context: ChatContext) -> AsyncGenerator[ChatResponse, None]:
745+
user_info = context.user
746746
if not user_info:
747747
yield self.create_text_response("⚠️ Authentication information not found.")
748748
return

examples/chat/README_authenticated.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -165,7 +165,7 @@ Modify the `chat()` method to customize responses based on user roles:
165165

166166
```python
167167
# Get user info from context
168-
user_info = context.state.get("authenticated_user") if context else None
168+
user_info = context.user
169169
user_roles = user_info.roles if user_info else []
170170

171171
if "admin" in user_roles:

examples/chat/authenticated_chat.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -28,9 +28,10 @@
2828

2929
from ragbits.chat.auth import ListAuthenticationBackend
3030
from ragbits.chat.interface import ChatInterface
31-
from ragbits.chat.interface.types import ChatContext, ChatResponse, LiveUpdateType, Message
31+
from ragbits.chat.interface.types import ChatContext, ChatResponse, LiveUpdateType
3232
from ragbits.chat.interface.ui_customization import HeaderCustomization, UICustomization
3333
from ragbits.core.llms import LiteLLM
34+
from ragbits.core.prompt.base import ChatFormat
3435

3536

3637
class MyAuthenticatedChat(ChatInterface):
@@ -61,14 +62,14 @@ def __init__(self):
6162
async def chat(
6263
self,
6364
message: str,
64-
history: list[Message] | None = None,
65-
context: ChatContext | None = None,
65+
history: ChatFormat,
66+
context: ChatContext,
6667
) -> AsyncGenerator[ChatResponse, None]:
6768
"""
6869
Authenticated chat implementation that provides user-specific responses.
6970
7071
This method is called after authentication validation passes.
71-
The user information is available in context.state["authenticated_user"].
72+
The user information is available in context.user.
7273
7374
Args:
7475
message: The current user message
@@ -79,7 +80,7 @@ async def chat(
7980
ChatResponse objects containing different types of content
8081
"""
8182
# Get authenticated user info
82-
user_info = context.state.get("authenticated_user") if context else None
83+
user_info = context.user
8384

8485
if not user_info:
8586
yield self.create_text_response("⚠️ Authentication information not found.")
@@ -100,8 +101,7 @@ async def chat(
100101
# Create user-specific state update
101102
yield self.create_state_update(
102103
{
103-
"authenticated_user_id": user_id,
104-
"session_context": context.session_id if context and context.session_id else "unknown",
104+
"session_context": context.session_id if context.session_id else "unknown",
105105
"user_roles": user_roles,
106106
"chat_timestamp": asyncio.get_event_loop().time(),
107107
}

examples/chat/chat.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -28,9 +28,10 @@
2828

2929
from ragbits.chat.interface import ChatInterface
3030
from ragbits.chat.interface.forms import FeedbackConfig, UserSettings
31-
from ragbits.chat.interface.types import ChatContext, ChatResponse, LiveUpdateType, Message
31+
from ragbits.chat.interface.types import ChatContext, ChatResponse, LiveUpdateType
3232
from ragbits.chat.interface.ui_customization import HeaderCustomization, PageMetaCustomization, UICustomization
3333
from ragbits.core.llms import LiteLLM
34+
from ragbits.core.prompt import ChatFormat
3435

3536

3637
class LikeFormExample(BaseModel):
@@ -96,8 +97,8 @@ def __init__(self) -> None:
9697
async def chat(
9798
self,
9899
message: str,
99-
history: list[Message] | None = None,
100-
context: ChatContext | None = None,
100+
history: ChatFormat,
101+
context: ChatContext,
101102
) -> AsyncGenerator[ChatResponse, None]:
102103
"""
103104
Example implementation of the ChatInterface.

examples/chat/offline_chat.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -26,9 +26,10 @@
2626

2727
from ragbits.chat.interface import ChatInterface
2828
from ragbits.chat.interface.forms import FeedbackConfig, UserSettings
29-
from ragbits.chat.interface.types import ChatContext, ChatResponse, LiveUpdateType, Message
29+
from ragbits.chat.interface.types import ChatContext, ChatResponse, LiveUpdateType
3030
from ragbits.chat.interface.ui_customization import HeaderCustomization, UICustomization
3131
from ragbits.chat.persistence.file import FileHistoryPersistence
32+
from ragbits.core.prompt import ChatFormat
3233

3334

3435
class LikeFormExample(BaseModel):
@@ -100,8 +101,8 @@ async def _generate_response(message: str) -> AsyncGenerator[str, None]:
100101
async def chat(
101102
self,
102103
message: str,
103-
history: list[Message] | None = None,
104-
context: ChatContext | None = None,
104+
history: ChatFormat,
105+
context: ChatContext,
105106
) -> AsyncGenerator[ChatResponse, None]:
106107
"""
107108
Offline implementation of the ChatInterface.

examples/chat/tutorial.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -44,10 +44,11 @@
4444
from ragbits.chat.auth import ListAuthenticationBackend
4545
from ragbits.chat.interface import ChatInterface
4646
from ragbits.chat.interface.forms import FeedbackConfig, UserSettings
47-
from ragbits.chat.interface.types import ChatContext, ChatResponse, LiveUpdateType, Message
47+
from ragbits.chat.interface.types import ChatContext, ChatResponse, LiveUpdateType
4848
from ragbits.chat.interface.ui_customization import HeaderCustomization, PageMetaCustomization, UICustomization
4949
from ragbits.core.llms import LiteLLM, ToolCall
5050
from ragbits.core.prompt import Prompt
51+
from ragbits.core.prompt.base import ChatFormat
5152

5253

5354
class LikeFormExample(BaseModel):
@@ -200,8 +201,8 @@ async def _create_image_response(self, image_path: Path) -> ChatResponse:
200201
async def chat(
201202
self,
202203
message: str,
203-
history: list[Message] | None = None,
204-
context: ChatContext | None = None,
204+
history: ChatFormat,
205+
context: ChatContext,
205206
) -> AsyncGenerator[ChatResponse, None]:
206207
"""
207208
Example implementation of the ChatInterface.
@@ -218,7 +219,7 @@ async def chat(
218219
- Live updates for tool execution status
219220
"""
220221
# Get authenticated user info
221-
user_info = context.state.get("authenticated_user") if context else None
222+
user_info = context.user
222223

223224
if not user_info:
224225
yield self.create_text_response("⚠️ Authentication information not found.")

packages/ragbits-chat/CHANGELOG.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
# CHANGELOG
22

33
## Unreleased
4+
5+
- fix: replace authenticated_user state tracking with direct user field in ChatContext
46
- Refactor chat handlers in the UI to use registry (#805)
57
- Add auth token storage and automatic logout on 401 (#802)
68
- Improve user settings storage when history is disabled (#799)

0 commit comments

Comments
 (0)