1
1
import os
2
2
import logging
3
3
from dotenv import load_dotenv
4
- from fastapi import APIRouter , Request , UploadFile , File , HTTPException
4
+ from fastapi import APIRouter , Request , UploadFile , File , HTTPException , Depends
5
5
from fastapi .responses import StreamingResponse
6
6
from pydantic import BaseModel
7
7
from openai import AsyncOpenAI
14
14
15
15
router : APIRouter = APIRouter (prefix = "/assistants/{assistant_id}/files" , tags = ["assistants_files" ])
16
16
17
- # Initialize OpenAI client
18
- openai : AsyncOpenAI = AsyncOpenAI ()
19
-
20
17
# Pydantic model for DELETE request body
21
18
class DeleteRequest (BaseModel ):
22
19
fileId : str
@@ -26,13 +23,13 @@ class FileParams(BaseModel):
26
23
file_id : str
27
24
28
25
# Helper function to get or create a vector store
29
- async def get_or_create_vector_store (assistantId : str ) -> str :
30
- assistant = await openai .beta .assistants .retrieve (assistantId )
26
+ async def get_or_create_vector_store (assistantId : str , client : AsyncOpenAI = Depends ( lambda : AsyncOpenAI ()) ) -> str :
27
+ assistant = await client .beta .assistants .retrieve (assistantId )
31
28
if assistant .tool_resources and assistant .tool_resources .file_search and assistant .tool_resources .file_search .vector_store_ids :
32
29
return assistant .tool_resources .file_search .vector_store_ids [0 ]
33
30
34
- vector_store = await openai .beta .vectorStores .create (name = "sample-assistant-vector-store" )
35
- await openai .beta .assistants .update (assistantId , {
31
+ vector_store = await client .beta .vectorStores .create (name = "sample-assistant-vector-store" )
32
+ await client .beta .assistants .update (assistantId , {
36
33
"tool_resources" : {
37
34
"file_search" : {
38
35
"vector_store_ids" : [vector_store .id ],
@@ -43,13 +40,13 @@ async def get_or_create_vector_store(assistantId: str) -> str:
43
40
44
41
45
42
@router .get ("/files/{file_id}" )
46
- async def get_file (file_id : str ):
43
+ async def get_file (file_id : str , client : AsyncOpenAI = Depends ( lambda : AsyncOpenAI ()) ):
47
44
"""
48
45
Endpoint to download a file by file ID.
49
46
"""
50
47
try :
51
48
# Retrieve file metadata and content concurrently
52
- file , file_content = await openai .files .retrieve (file_id ), await openai .files .content (file_id )
49
+ file , file_content = await client .files .retrieve (file_id ), await client .files .content (file_id )
53
50
54
51
# Return the file content as a streaming response
55
52
return StreamingResponse (
@@ -62,28 +59,28 @@ async def get_file(file_id: str):
62
59
63
60
64
61
@router .post ("/upload" )
65
- async def upload_file (file : UploadFile = File (...)):
62
+ async def upload_file (file : UploadFile = File (...), client : AsyncOpenAI = Depends ( lambda : AsyncOpenAI ()) ):
66
63
# Process file and upload to OpenAI
67
- vector_store_id = await get_or_create_vector_store ()
68
- openai_file = await openai .files .create (
64
+ vector_store_id = await get_or_create_vector_store (assistant_id , client )
65
+ openai_file = await client .files .create (
69
66
file = file .file ,
70
67
purpose = "assistants"
71
68
)
72
- await openai .beta .vectorStores .files .create (vector_store_id , {
69
+ await client .beta .vectorStores .files .create (vector_store_id , {
73
70
"file_id" : openai_file .id
74
71
})
75
72
return {"message" : "File uploaded successfully" }
76
73
77
74
@router .get ("/files" )
78
- async def list_files ():
75
+ async def list_files (client : AsyncOpenAI = Depends ( lambda : AsyncOpenAI ()) ):
79
76
# List files in the vector store
80
- vector_store_id = await get_or_create_vector_store ()
81
- file_list = await openai .beta .vectorStores .files .list (vector_store_id )
77
+ vector_store_id = await get_or_create_vector_store (assistant_id , client )
78
+ file_list = await client .beta .vectorStores .files .list (vector_store_id )
82
79
83
80
files_array = []
84
81
for file in file_list .data :
85
- file_details = await openai .files .retrieve (file .id )
86
- vector_file_details = await openai .beta .vectorStores .files .retrieve (vector_store_id , file .id )
82
+ file_details = await client .files .retrieve (file .id )
83
+ vector_file_details = await client .beta .vectorStores .files .retrieve (vector_store_id , file .id )
87
84
files_array .append ({
88
85
"file_id" : file .id ,
89
86
"filename" : file_details .filename ,
@@ -93,10 +90,10 @@ async def list_files():
93
90
return files_array
94
91
95
92
@router .delete ("/delete" )
96
- async def delete_file (request : Request ):
93
+ async def delete_file (request : Request , client : AsyncOpenAI = Depends ( lambda : AsyncOpenAI ()) ):
97
94
# Delete file from vector store
98
95
body = await request .json ()
99
96
delete_request = DeleteRequest (** body )
100
- vector_store_id = await get_or_create_vector_store ()
101
- await openai .beta .vectorStores .files .delete (vector_store_id , delete_request .fileId )
97
+ vector_store_id = await get_or_create_vector_store (assistant_id , client )
98
+ await client .beta .vectorStores .files .delete (vector_store_id , delete_request .fileId )
102
99
return {"message" : "File deleted successfully" }
0 commit comments