1
1
import os
2
2
import logging
3
3
from dotenv import load_dotenv
4
- from fastapi import APIRouter , Request , UploadFile , File , HTTPException , Depends
4
+ from fastapi import APIRouter , Request , UploadFile , File , HTTPException , Depends , Form
5
5
from fastapi .responses import StreamingResponse
6
- from pydantic import BaseModel
7
6
from openai import AsyncOpenAI
8
7
9
8
logger = logging .getLogger ("uvicorn.error" )
17
16
tags = ["assistants_files" ]
18
17
)
19
18
20
- # Pydantic model for DELETE request body
21
- class DeleteRequest (BaseModel ):
22
- fileId : str
23
-
24
- # Pydantic model for request parameters
25
- class FileParams (BaseModel ):
26
- file_id : str
27
-
28
19
# Helper function to get or create a vector store
29
20
async def get_or_create_vector_store (assistantId : str , client : AsyncOpenAI = Depends (lambda : AsyncOpenAI ())) -> str :
30
21
assistant = await client .beta .assistants .retrieve (assistantId )
31
22
if assistant .tool_resources and assistant .tool_resources .file_search and assistant .tool_resources .file_search .vector_store_ids :
32
23
return assistant .tool_resources .file_search .vector_store_ids [0 ]
33
24
34
- vector_store = await client .beta .vectorStores .create (name = "sample-assistant-vector-store" )
25
+ vector_store = await client .beta .vector_stores .create (name = "sample-assistant-vector-store" )
35
26
await client .beta .assistants .update (assistantId , {
36
27
"tool_resources" : {
37
28
"file_search" : {
@@ -42,24 +33,26 @@ async def get_or_create_vector_store(assistantId: str, client: AsyncOpenAI = Dep
42
33
return vector_store .id
43
34
44
35
45
- @router .get ("/files/{file_id}" )
46
- async def get_file (file_id : str ):
47
- try :
48
- # Retrieve file metadata and content concurrently
49
- client = AsyncOpenAI ()
50
- file , file_content = await client .files .retrieve (file_id ), await client .files .content (file_id )
51
-
52
- # Return the file content as a streaming response
53
- return StreamingResponse (
54
- file_content .body ,
55
- headers = {"Content-Disposition" : f'attachment; filename="{ file .filename } "' }
56
- )
57
- except Exception as e :
58
- # Handle exceptions and return an HTTP error response
59
- raise HTTPException (status_code = 500 , detail = str (e ))
36
+ @router .get ("/" )
37
+ async def list_files (client : AsyncOpenAI = Depends (lambda : AsyncOpenAI ())):
38
+ # List files in the vector store
39
+ vector_store_id = await get_or_create_vector_store (assistant_id , client )
40
+ file_list = await client .beta .vectorStores .files .list (vector_store_id )
41
+
42
+ files_array = []
43
+ for file in file_list .data :
44
+ file_details = await client .files .retrieve (file .id )
45
+ vector_file_details = await client .beta .vectorStores .files .retrieve (vector_store_id , file .id )
46
+ files_array .append ({
47
+ "file_id" : file .id ,
48
+ "filename" : file_details .filename ,
49
+ "status" : vector_file_details .status ,
50
+ })
51
+
52
+ return files_array
60
53
61
54
62
- @router .post ("/upload " )
55
+ @router .post ("/" )
63
56
async def upload_file (file : UploadFile = File (...)):
64
57
try :
65
58
client = AsyncOpenAI ()
@@ -81,29 +74,25 @@ async def upload_file(file: UploadFile = File(...)):
81
74
raise HTTPException (status_code = 500 , detail = str (e ))
82
75
83
76
84
- @router .get ("" )
85
- async def list_files (client : AsyncOpenAI = Depends (lambda : AsyncOpenAI ())):
86
- # List files in the vector store
87
- vector_store_id = await get_or_create_vector_store (assistant_id , client )
88
- file_list = await client .beta .vectorStores .files .list (vector_store_id )
89
-
90
- files_array = []
91
- for file in file_list .data :
92
- file_details = await client .files .retrieve (file .id )
93
- vector_file_details = await client .beta .vectorStores .files .retrieve (vector_store_id , file .id )
94
- files_array .append ({
95
- "file_id" : file .id ,
96
- "filename" : file_details .filename ,
97
- "status" : vector_file_details .status ,
98
- })
99
-
100
- return files_array
77
+ @router .get ("/{file_id}" )
78
+ async def get_file (file_id : str = Form (...), client : AsyncOpenAI = Depends (lambda : AsyncOpenAI ())):
79
+ try :
80
+ # Retrieve file metadata and content concurrently
81
+ file , file_content = await client .files .retrieve (file_id ), await client .files .content (file_id )
82
+
83
+ # Return the file content as a streaming response
84
+ return StreamingResponse (
85
+ file_content .body ,
86
+ headers = {"Content-Disposition" : f'attachment; filename="{ file .filename } "' }
87
+ )
88
+ except Exception as e :
89
+ # Handle exceptions and return an HTTP error response
90
+ raise HTTPException (status_code = 500 , detail = str (e ))
91
+
101
92
102
93
@router .delete ("/delete" )
103
- async def delete_file (request : Request , client : AsyncOpenAI = Depends (lambda : AsyncOpenAI ())):
94
+ async def delete_file (request : Request , fileId : str = Form (...), client : AsyncOpenAI = Depends (lambda : AsyncOpenAI ())):
104
95
# Delete file from vector store
105
- body = await request .json ()
106
- delete_request = DeleteRequest (** body )
107
96
vector_store_id = await get_or_create_vector_store (assistant_id , client )
108
- await client .beta .vectorStores .files .delete (vector_store_id , delete_request . fileId )
97
+ await client .beta .vectorStores .files .delete (vector_store_id , fileId )
109
98
return {"message" : "File deleted successfully" }
0 commit comments