Skip to content

Commit f1bff72

Browse files
refactor: simplify stream_chat_request method by removing unnecessary request_body parameter
1 parent 598cf36 commit f1bff72

File tree

3 files changed

+6
-15
lines changed

3 files changed

+6
-15
lines changed

src/api/api/api_routes.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -119,7 +119,7 @@ async def conversation(request: Request):
119119
conversation_id = request_json.get("conversation_id")
120120
query = request_json.get("query")
121121
chat_service = ChatService(request=request)
122-
result = await chat_service.stream_chat_request(request_json, conversation_id, query)
122+
result = await chat_service.stream_chat_request(conversation_id, query)
123123
track_event_if_configured(
124124
"ChatStreamSuccess",
125125
{"conversation_id": conversation_id, "query": query}

src/api/services/chat_service.py

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -8,9 +8,6 @@
88

99
import json
1010
import logging
11-
import time
12-
import uuid
13-
from types import SimpleNamespace
1411
import asyncio
1512
import random
1613
import re
@@ -25,7 +22,6 @@
2522

2623
from cachetools import TTLCache
2724

28-
from helpers.utils import format_stream_response
2925
from common.config.config import Config
3026

3127
# Constants
@@ -135,11 +131,10 @@ async def stream_openai_text(self, conversation_id: str, query: str) -> Streamin
135131
ChatService.thread_cache[corrupt_key] = thread_id
136132
yield "I cannot answer this question with the current data. Please rephrase or add more details."
137133

138-
async def stream_chat_request(self, request_body, conversation_id, query):
134+
async def stream_chat_request(self, conversation_id, query):
139135
"""
140136
Handles streaming chat requests.
141137
"""
142-
history_metadata = request_body.get("history_metadata", {})
143138

144139
async def generate():
145140
try:

src/tests/api/services/test_chat_service.py

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -252,8 +252,7 @@ async def mock_stream_openai_text(conversation_id, query):
252252

253253
chat_service.stream_openai_text = mock_stream_openai_text
254254

255-
request_body = {"history_metadata": {"test": "metadata"}}
256-
generator = await chat_service.stream_chat_request(request_body, "conv_1", "Hello")
255+
generator = await chat_service.stream_chat_request("conv_1", "Hello")
257256

258257
chunks = []
259258
async for chunk in generator:
@@ -280,8 +279,7 @@ async def mock_stream_openai_text_rate_limit_error(conversation_id, query):
280279

281280
chat_service.stream_openai_text = mock_stream_openai_text_rate_limit_error
282281

283-
request_body = {"history_metadata": {}}
284-
generator = await chat_service.stream_chat_request(request_body, "conv_1", "Hello")
282+
generator = await chat_service.stream_chat_request("conv_1", "Hello")
285283

286284
chunks = []
287285
async for chunk in generator:
@@ -304,8 +302,7 @@ async def mock_stream_openai_text_generic_error(conversation_id, query):
304302

305303
chat_service.stream_openai_text = mock_stream_openai_text_generic_error
306304

307-
request_body = {"history_metadata": {}}
308-
generator = await chat_service.stream_chat_request(request_body, "conv_1", "Hello")
305+
generator = await chat_service.stream_chat_request("conv_1", "Hello")
309306

310307
chunks = []
311308
async for chunk in generator:
@@ -328,8 +325,7 @@ async def mock_stream_openai_text_generic_error(conversation_id, query):
328325

329326
chat_service.stream_openai_text = mock_stream_openai_text_generic_error
330327

331-
request_body = {"history_metadata": {}}
332-
generator = await chat_service.stream_chat_request(request_body, "conv_1", "Hello")
328+
generator = await chat_service.stream_chat_request("conv_1", "Hello")
333329

334330
chunks = []
335331
async for chunk in generator:

0 commit comments

Comments
 (0)