Skip to content

Commit fa9d221

Browse files
committed
bug: The selected model was not being sent to provider
1 parent 7f6105a commit fa9d221

File tree

5 files changed

+24
-24
lines changed

5 files changed

+24
-24
lines changed

chatbot-api/src/chat/provider.py

Lines changed: 6 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -38,31 +38,26 @@ def _format_message(self, message: Message) -> dict:
3838

3939
def generate_response(self, messages: List[Message]) -> ChatResponse:
4040
"""Generate a response for messages"""
41-
# Use the model from the last message, or default to claude-3-5-sonnet
42-
model = next(
43-
(m.model for m in reversed(messages) if m.model), "claude-3-5-sonnet"
44-
)
45-
41+
# Use the model from the latest message
42+
model = messages[-1].model if messages and messages[-1].model else "claude-3-5-sonnet"
4643
formatted_messages = [self._format_message(m) for m in messages]
4744
response = self.client.chat.completions.create(
4845
model=model, messages=formatted_messages
4946
)
5047

5148
return ChatResponse(
5249
content=response.choices[0].message.content,
53-
model=response.model,
50+
model=model,
5451
timestamp=datetime.utcnow(),
5552
)
5653

5754
async def generate_stream(self, messages: List[Message]) -> AsyncIterator[str]:
5855
"""Stream response for messages"""
5956
try:
60-
# Use the model from the last message, or default to claude-3-5-sonnet
61-
model = next(
62-
(m.model for m in reversed(messages) if m.model), "claude-3-5-sonnet"
63-
)
64-
57+
model = messages[-1].model
58+
logger.info(f"Using model: {model}")
6559
formatted_messages = [self._format_message(m) for m in messages]
60+
6661
stream = self.client.chat.completions.create(
6762
model=model, messages=formatted_messages, stream=True
6863
)

chatbot-api/src/chat/routes.py

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -84,9 +84,13 @@ async def chat(
8484
async def stream_chat(
8585
request: ChatRequestSchema, chat_service: ChatService = Depends(get_chat_service)
8686
):
87-
# Convert request messages to domain models
87+
# Get the selected model from the last message
88+
selected_model = request.messages[-1].model
89+
logger.info(f"Selected model: {selected_model}")
90+
91+
# Convert previous messages to domain models
8892
messages = []
89-
for msg in request.messages[:-1]: # All messages except the last one
93+
for msg in request.messages[:-1]:
9094
if isinstance(msg.content, str):
9195
content = msg.content
9296
else:
@@ -98,22 +102,18 @@ async def stream_chat(
98102
)
99103
for c in msg.content
100104
]
101-
messages.append(
102-
Message(
103-
role=msg.role,
104-
content=content,
105-
model=msg.model,
106-
timestamp=msg.timestamp or datetime.utcnow(),
107-
)
108-
)
105+
messages.append(Message(role=msg.role, content=content))
109106

110-
# Extract content from the last message
107+
# Extract content from the last message and create it with the selected model
111108
text, images = extract_message_content(request.messages[-1])
112109

113110
async def generate():
114111
try:
115112
async for chunk in chat_service.stream_response(
116-
text=text, images=images, conversation_messages=messages
113+
text=text,
114+
images=images,
115+
model=selected_model,
116+
conversation_messages=messages
117117
):
118118
yield f"data: {json.dumps({'content': chunk})}\n\n"
119119
except Exception as e:

chatbot-api/src/chat/service.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ async def stream_response(
3535
self,
3636
text: str = "",
3737
images: List[str] = None,
38+
model: str = None,
3839
conversation_messages: List[Message] = None,
3940
) -> AsyncIterator[str]:
4041
"""Stream response for a message with optional images"""
@@ -49,7 +50,8 @@ async def stream_response(
4950
else:
5051
content = text
5152

52-
messages.append(Message(role="user", content=content))
53+
# Add the current message with the selected model
54+
messages.append(Message(role="user", content=content, model=model))
5355

5456
async for chunk in self.ai_provider.generate_stream(messages):
5557
yield chunk

chatbot-api/src/conversation/service.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,7 @@ def generate_name(self, message: str) -> str:
4848
role="user",
4949
content=prompt,
5050
timestamp=datetime.now(timezone.utc),
51+
model="gpt-4o-mini",
5152
)
5253
]
5354
)

chatbot-ui/src/components/MessageBubble.tsx

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,8 @@ export type MessageContent = {
2222
export interface Message {
2323
role: 'user' | 'assistant';
2424
content: string | MessageContent[];
25+
model?: string;
26+
timestamp?: string;
2527
}
2628

2729
interface MessageBubbleProps {

0 commit comments

Comments
 (0)