Skip to content

Commit 632d356

Browse files
committed
refactor: chat module
1 parent 46c869f commit 632d356

File tree

1 file changed

+83
-53
lines changed

1 file changed

+83
-53
lines changed

backend/tenantfirstaid/chat.py

Lines changed: 83 additions & 53 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
from flask.views import View
44
import os
55

6-
from backend.tenantfirstaid.session import TenantSessionData
6+
from .session import TenantSessionData
77

88
API_KEY = os.getenv("OPENAI_API_KEY", os.getenv("GITHUB_API_KEY"))
99
BASE_URL = os.getenv("MODEL_ENDPOINT", "https://api.openai.com/v1")
@@ -31,11 +31,12 @@
3131

3232

3333
class ChatManager:
34-
def __init__(self):
34+
def __init__(self, tenant_session: TenantSessionData = None):
3535
self.client = OpenAI(
3636
api_key=API_KEY,
3737
base_url=BASE_URL,
3838
)
39+
self.tenant_session = tenant_session
3940

4041
def get_client(self):
4142
return self.client
@@ -55,51 +56,92 @@ def prepare_openai_tools(self, current_session: TenantSessionData):
5556
# This filters out other cities in the same state.
5657
# The user is gated into selecting a city in Oregon so we don't worry about
5758
# whether the relevant documents exist or not.
59+
filters = (
60+
{
61+
"type": "or",
62+
"filters": [
63+
{
64+
"type": "and",
65+
"filters": [
66+
{
67+
"type": "eq",
68+
"key": "city",
69+
"value": current_session["city"],
70+
},
71+
{
72+
"type": "eq",
73+
"key": "state",
74+
"value": current_session["state"],
75+
},
76+
],
77+
},
78+
{
79+
"type": "and",
80+
"filters": [
81+
{
82+
"type": "eq",
83+
"key": "city",
84+
"value": "null",
85+
},
86+
{
87+
"type": "eq",
88+
"key": "state",
89+
"value": current_session["state"],
90+
},
91+
],
92+
},
93+
],
94+
}
95+
if current_session["city"] != "null"
96+
else {
97+
# If city is null, we only filter by state
98+
"type": "and",
99+
"filters": [
100+
{
101+
"type": "eq",
102+
"key": "city",
103+
"value": "null",
104+
},
105+
{
106+
"type": "eq",
107+
"key": "state",
108+
"value": current_session["state"],
109+
},
110+
],
111+
}
112+
)
113+
58114
return [
59115
{
60116
"type": "file_search",
61117
"vector_store_ids": [VECTOR_STORE_ID],
62118
"max_num_results": os.getenv("NUM_FILE_SEARCH_RESULTS", 10),
63-
"filters": {
64-
"type": "or",
65-
"filters": [
66-
{
67-
"type": "and",
68-
"filters": [
69-
{
70-
"type": "eq",
71-
"key": "city",
72-
"value": current_session["city"],
73-
},
74-
{
75-
"type": "eq",
76-
"key": "state",
77-
"value": current_session["state"],
78-
},
79-
],
80-
}
81-
if current_session["city"] != "null"
82-
else None,
83-
{
84-
"type": "and",
85-
"filters": [
86-
{
87-
"type": "eq",
88-
"key": "city",
89-
"value": "null",
90-
},
91-
{
92-
"type": "eq",
93-
"key": "state",
94-
"value": current_session["state"],
95-
},
96-
],
97-
},
98-
],
99-
},
119+
"filters": filters,
100120
}
101121
]
102122

123+
def generate_chat_response(
124+
self, current_session: TenantSessionData, user_msg: str, stream=False
125+
):
126+
# Update the session with the user message
127+
current_session["messages"].append({"role": "user", "content": user_msg})
128+
129+
instructions = self.prepare_developer_instructions(current_session)
130+
tools = self.prepare_openai_tools(current_session)
131+
132+
# Use the OpenAI client to generate a response
133+
response_stream = self.client.responses.create(
134+
model=MODEL,
135+
input=current_session["messages"],
136+
instructions=instructions,
137+
reasoning={"effort": MODEL_REASONING_EFFORT},
138+
stream=stream,
139+
include=["file_search_call.results"],
140+
tools=tools if tools else None,
141+
)
142+
143+
return response_stream
144+
103145

104146
class ChatView(View):
105147
client = OpenAI(
@@ -117,23 +159,11 @@ def dispatch_request(self):
117159

118160
current_session = self.tenant_session.get()
119161

120-
# Update our cache with the user message
121-
current_session["messages"].append({"role": "user", "content": user_msg})
122-
123-
instructions = self.chat_manager.prepare_developer_instructions(current_session)
124-
tools = self.chat_manager.prepare_openai_tools(current_session)
125-
126162
def generate():
127163
try:
128164
# Use the new Responses API with streaming
129-
response_stream = self.client.responses.create(
130-
model=MODEL,
131-
input=current_session["messages"],
132-
instructions=instructions,
133-
reasoning={"effort": MODEL_REASONING_EFFORT},
134-
stream=True,
135-
include=["file_search_call.results"],
136-
tools=tools if tools else None,
165+
response_stream = self.chat_manager.generate_chat_response(
166+
current_session, user_msg, stream=True
137167
)
138168

139169
assistant_chunks = []

0 commit comments

Comments
 (0)