Skip to content

Commit 88ab9b1

Browse files
Merge pull request #9 from Promptly-Technologies-LLC/5-test-and-hook-up-warnings-component
Use Depends to create client in API endpoints
2 parents 9662f13 + c56db0e commit 88ab9b1

File tree

3 files changed

+30
-42
lines changed

3 files changed

+30
-42
lines changed

routers/files.py

Lines changed: 19 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import os
22
import logging
33
from dotenv import load_dotenv
4-
from fastapi import APIRouter, Request, UploadFile, File, HTTPException
4+
from fastapi import APIRouter, Request, UploadFile, File, HTTPException, Depends
55
from fastapi.responses import StreamingResponse
66
from pydantic import BaseModel
77
from openai import AsyncOpenAI
@@ -14,9 +14,6 @@
1414

1515
router: APIRouter = APIRouter(prefix="/assistants/{assistant_id}/files", tags=["assistants_files"])
1616

17-
# Initialize OpenAI client
18-
openai: AsyncOpenAI = AsyncOpenAI()
19-
2017
# Pydantic model for DELETE request body
2118
class DeleteRequest(BaseModel):
2219
fileId: str
@@ -26,13 +23,13 @@ class FileParams(BaseModel):
2623
file_id: str
2724

2825
# Helper function to get or create a vector store
29-
async def get_or_create_vector_store(assistantId: str) -> str:
30-
assistant = await openai.beta.assistants.retrieve(assistantId)
26+
async def get_or_create_vector_store(assistantId: str, client: AsyncOpenAI = Depends(lambda: AsyncOpenAI())) -> str:
27+
assistant = await client.beta.assistants.retrieve(assistantId)
3128
if assistant.tool_resources and assistant.tool_resources.file_search and assistant.tool_resources.file_search.vector_store_ids:
3229
return assistant.tool_resources.file_search.vector_store_ids[0]
3330

34-
vector_store = await openai.beta.vectorStores.create(name="sample-assistant-vector-store")
35-
await openai.beta.assistants.update(assistantId, {
31+
vector_store = await client.beta.vectorStores.create(name="sample-assistant-vector-store")
32+
await client.beta.assistants.update(assistantId, {
3633
"tool_resources": {
3734
"file_search": {
3835
"vector_store_ids": [vector_store.id],
@@ -43,13 +40,13 @@ async def get_or_create_vector_store(assistantId: str) -> str:
4340

4441

4542
@router.get("/files/{file_id}")
46-
async def get_file(file_id: str):
43+
async def get_file(file_id: str, client: AsyncOpenAI = Depends(lambda: AsyncOpenAI())):
4744
"""
4845
Endpoint to download a file by file ID.
4946
"""
5047
try:
5148
# Retrieve file metadata and content concurrently
52-
file, file_content = await openai.files.retrieve(file_id), await openai.files.content(file_id)
49+
file, file_content = await client.files.retrieve(file_id), await client.files.content(file_id)
5350

5451
# Return the file content as a streaming response
5552
return StreamingResponse(
@@ -62,28 +59,28 @@ async def get_file(file_id: str):
6259

6360

6461
@router.post("/upload")
65-
async def upload_file(file: UploadFile = File(...)):
62+
async def upload_file(file: UploadFile = File(...), client: AsyncOpenAI = Depends(lambda: AsyncOpenAI())):
6663
# Process file and upload to OpenAI
67-
vector_store_id = await get_or_create_vector_store()
68-
openai_file = await openai.files.create(
64+
vector_store_id = await get_or_create_vector_store(assistant_id, client)
65+
openai_file = await client.files.create(
6966
file=file.file,
7067
purpose="assistants"
7168
)
72-
await openai.beta.vectorStores.files.create(vector_store_id, {
69+
await client.beta.vectorStores.files.create(vector_store_id, {
7370
"file_id": openai_file.id
7471
})
7572
return {"message": "File uploaded successfully"}
7673

7774
@router.get("/files")
78-
async def list_files():
75+
async def list_files(client: AsyncOpenAI = Depends(lambda: AsyncOpenAI())):
7976
# List files in the vector store
80-
vector_store_id = await get_or_create_vector_store()
81-
file_list = await openai.beta.vectorStores.files.list(vector_store_id)
77+
vector_store_id = await get_or_create_vector_store(assistant_id, client)
78+
file_list = await client.beta.vectorStores.files.list(vector_store_id)
8279

8380
files_array = []
8481
for file in file_list.data:
85-
file_details = await openai.files.retrieve(file.id)
86-
vector_file_details = await openai.beta.vectorStores.files.retrieve(vector_store_id, file.id)
82+
file_details = await client.files.retrieve(file.id)
83+
vector_file_details = await client.beta.vectorStores.files.retrieve(vector_store_id, file.id)
8784
files_array.append({
8885
"file_id": file.id,
8986
"filename": file_details.filename,
@@ -93,10 +90,10 @@ async def list_files():
9390
return files_array
9491

9592
@router.delete("/delete")
96-
async def delete_file(request: Request):
93+
async def delete_file(request: Request, client: AsyncOpenAI = Depends(lambda: AsyncOpenAI())):
9794
# Delete file from vector store
9895
body = await request.json()
9996
delete_request = DeleteRequest(**body)
100-
vector_store_id = await get_or_create_vector_store()
101-
await openai.beta.vectorStores.files.delete(vector_store_id, delete_request.fileId)
97+
vector_store_id = await get_or_create_vector_store(assistant_id, client)
98+
await client.beta.vectorStores.files.delete(vector_store_id, delete_request.fileId)
10299
return {"message": "File deleted successfully"}

routers/messages.py

Lines changed: 8 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import os
22
import logging
33
from dotenv import load_dotenv
4-
from fastapi import APIRouter, Request, Form, HTTPException
4+
from fastapi import APIRouter, Form, HTTPException, Depends
55
from fastapi.responses import StreamingResponse
66
from openai import AsyncOpenAI
77
from openai.resources.beta.threads.runs.runs import AsyncAssistantStreamManager
@@ -10,29 +10,22 @@
1010

1111
logger: logging.Logger = logging.getLogger("uvicorn.error")
1212

13-
# Get the assistant ID from .env file
14-
load_dotenv()
15-
assistant_id: str = os.getenv("ASSISTANT_ID")
16-
1713
# Initialize the router
1814
router: APIRouter = APIRouter()
1915

20-
# Initialize the OpenAI client
21-
openai: AsyncOpenAI = AsyncOpenAI()
22-
2316
# Send a new message to a thread
2417
@router.post("/send_message")
2518
async def post_message(
26-
request: Request,
2719
userInput: str = Form(...),
28-
thread_id: str | None = Form(None)
20+
thread_id: str | None = Form(None),
21+
client: AsyncOpenAI = Depends(lambda: AsyncOpenAI())
2922
) -> dict:
3023
# Create a new assistant chat thread if no thread ID is provided
3124
if not thread_id or thread_id == "None" or thread_id == "null":
3225
thread_id: str = await create_thread()
3326

3427
# Create a new message in the thread
35-
await openai.beta.threads.messages.create(
28+
await client.beta.threads.messages.create(
3629
thread_id=thread_id,
3730
role="user",
3831
content=userInput
@@ -42,17 +35,17 @@ async def post_message(
4235

4336
@router.get("/stream_response")
4437
async def stream_response(
45-
request: Request,
4638
thread_id: str | None = None,
39+
client: AsyncOpenAI = Depends(lambda: AsyncOpenAI())
4740
) -> StreamingResponse:
4841
if not thread_id:
4942
raise HTTPException(status_code=400, message="thread_id is required")
5043

5144
# Create a generator to stream the response from the assistant
52-
# Create a generator to stream the response from the assistant
45+
load_dotenv()
5346
async def event_generator():
54-
stream: AsyncAssistantStreamManager = openai.beta.threads.runs.stream(
55-
assistant_id=assistant_id,
47+
stream: AsyncAssistantStreamManager = client.beta.threads.runs.stream(
48+
assistant_id=os.getenv("ASSISTANT_ID"),
5649
thread_id=thread_id
5750
)
5851
async with stream as stream_manager:

routers/tools.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,28 +1,26 @@
11
import logging
22
from typing import Any
3-
from fastapi import APIRouter, Request, HTTPException
3+
from fastapi import APIRouter, Request, HTTPException, Depends
44
from pydantic import BaseModel
55
from openai import AsyncOpenAI
66

77
logger = logging.getLogger("uvicorn.error")
88

99
router = APIRouter()
1010

11-
openai = AsyncOpenAI()
12-
1311
class ToolCallOutputs(BaseModel):
1412
tool_outputs: Any
1513
runId: str
1614

1715
@router.post("/send_tool_outputs")
18-
async def post_tool_outputs(thread_id: str, request: Request):
16+
async def post_tool_outputs(thread_id: str, request: Request, client: AsyncOpenAI = Depends(lambda: AsyncOpenAI())):
1917
try:
2018
# Parse the JSON body into the ToolCallOutputs model
2119
data = await request.json()
2220
tool_call_outputs = ToolCallOutputs(**data)
2321

2422
# Submit tool outputs stream
25-
stream = await openai.beta.threads.runs.submit_tool_outputs_stream(
23+
stream = await client.beta.threads.runs.submit_tool_outputs_stream(
2624
thread_id,
2725
tool_call_outputs.runId,
2826
{"tool_outputs": tool_call_outputs.tool_outputs}

0 commit comments

Comments
 (0)