Skip to content
This repository was archived by the owner on May 27, 2025. It is now read-only.

Commit 088ebdf

Browse files
committed
clean up http status code usage
1 parent a77806f commit 088ebdf

File tree

5 files changed

+46
-31
lines changed

5 files changed

+46
-31
lines changed

backend/graphrag_app/api/data.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@
4747
"",
4848
summary="Get list of data containers.",
4949
response_model=StorageNameList,
50-
responses={200: {"model": StorageNameList}},
50+
responses={status.HTTP_200_OK: {"model": StorageNameList}},
5151
)
5252
async def get_all_data_containers():
5353
"""
@@ -169,20 +169,20 @@ async def upload_files(
169169

170170
if len(processing_errors) > 0:
171171
raise HTTPException(
172-
status_code=400,
172+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
173173
detail=f"Error uploading files: {processing_errors}.",
174174
)
175-
return BaseResponse(status="File upload successful.")
175+
return BaseResponse(status="Success.")
176176
except Exception as e:
177177
logger = load_pipeline_logger()
178178
logger.error(
179179
message="Error uploading files.",
180180
cause=e,
181181
stack=traceback.format_exc(),
182-
details={"files": [f.filename for f in files]},
182+
details={"files": processing_errors},
183183
)
184184
raise HTTPException(
185-
status_code=500,
185+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
186186
detail=f"Error uploading files to container '{container_name}'.",
187187
)
188188

@@ -191,7 +191,7 @@ async def upload_files(
191191
"/{container_name}",
192192
summary="Delete a data storage container",
193193
response_model=BaseResponse,
194-
responses={200: {"model": BaseResponse}},
194+
responses={status.HTTP_200_OK: {"model": BaseResponse}},
195195
)
196196
async def delete_files(
197197
container_name: str, sanitized_container_name: str = Depends(sanitize_name)

backend/graphrag_app/api/index.py

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ async def schedule_index_job(
7272
sanitized_storage_container_name
7373
).exists():
7474
raise HTTPException(
75-
status_code=500,
75+
status_code=status.HTTP_412_PRECONDITION_FAILED,
7676
detail=f"Storage container '{storage_container_name}' does not exist",
7777
)
7878

@@ -102,7 +102,7 @@ async def schedule_index_job(
102102
PipelineJobState(existing_job.status) == PipelineJobState.RUNNING
103103
):
104104
raise HTTPException(
105-
status_code=202, # request has been accepted for processing but is not complete.
105+
status_code=status.HTTP_425_TOO_EARLY, # request has been accepted for processing but is not complete.
106106
detail=f"Index '{index_container_name}' already exists and has not finished building.",
107107
)
108108
# if indexing job is in a failed state, delete the associated K8s job and pod to allow for a new job to be scheduled
@@ -258,7 +258,8 @@ async def delete_index(
258258
details={"container": container_name},
259259
)
260260
raise HTTPException(
261-
status_code=500, detail=f"Error deleting '{container_name}'."
261+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
262+
detail=f"Error deleting '{container_name}'.",
262263
)
263264

264265
return BaseResponse(status="Success")
@@ -277,7 +278,7 @@ async def get_index_status(
277278
if pipelinejob.item_exist(sanitized_container_name):
278279
pipeline_job = pipelinejob.load_item(sanitized_container_name)
279280
return IndexStatusResponse(
280-
status_code=200,
281+
status_code=status.HTTP_200_OK,
281282
index_name=pipeline_job.human_readable_index_name,
282283
storage_name=pipeline_job.human_readable_storage_name,
283284
status=pipeline_job.status.value,
@@ -286,5 +287,6 @@ async def get_index_status(
286287
)
287288
else:
288289
raise HTTPException(
289-
status_code=404, detail=f"'{container_name}' does not exist."
290+
status_code=status.HTTP_404_NOT_FOUND,
291+
detail=f"'{container_name}' does not exist.",
290292
)

backend/graphrag_app/api/query.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ async def global_query(request: GraphRequest):
5252

5353
if not _is_index_complete(sanitized_index_name):
5454
raise HTTPException(
55-
status_code=500,
55+
status_code=status.HTTP_425_TOO_EARLY,
5656
detail=f"{index_name} not ready for querying.",
5757
)
5858

@@ -131,7 +131,7 @@ async def local_query(request: GraphRequest):
131131

132132
if not _is_index_complete(sanitized_index_name):
133133
raise HTTPException(
134-
status_code=500,
134+
status_code=status.HTTP_425_TOO_EARLY,
135135
detail=f"{index_name} not ready for querying.",
136136
)
137137

backend/graphrag_app/main.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@
3333

3434

3535
async def catch_all_exceptions_middleware(request: Request, call_next):
36-
"""a function to globally catch all exceptions and return a 500 response with the exception message"""
36+
"""A global function to catch all exceptions and produce a standard error message"""
3737
try:
3838
return await call_next(request)
3939
except Exception as e:
@@ -44,7 +44,10 @@ async def catch_all_exceptions_middleware(request: Request, call_next):
4444
cause=e,
4545
stack=stack,
4646
)
47-
return Response("Unexpected internal server error.", status_code=500)
47+
return Response(
48+
"Unexpected internal server error.",
49+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
50+
)
4851

4952

5053
# NOTE: this function is not currently used, but it is a placeholder for future use once RBAC issues have been resolved

backend/graphrag_app/utils/common.py

Lines changed: 26 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -14,15 +14,18 @@
1414
from azure.cosmos import ContainerProxy, exceptions
1515
from azure.identity import DefaultAzureCredential
1616
from azure.storage.blob.aio import ContainerClient
17-
from fastapi import Header, HTTPException
17+
from fastapi import Header, HTTPException, status
1818

1919
from graphrag_app.logger.load_logger import load_pipeline_logger
2020
from graphrag_app.utils.azure_clients import AzureClientManager
2121

22+
FILE_UPLOAD_CACHE = "cache/uploaded_files.csv"
23+
2224

2325
def get_df(
2426
table_path: str,
2527
) -> pd.DataFrame:
28+
"""Read a parquet file from Azure Storage and return it as a pandas DataFrame."""
2629
df = pd.read_parquet(
2730
table_path,
2831
storage_options=pandas_storage_options(),
@@ -126,7 +129,10 @@ def get_cosmos_container_store_client() -> ContainerProxy:
126129
cause=e,
127130
stack=traceback.format_exc(),
128131
)
129-
raise HTTPException(status_code=500, detail="Error fetching cosmosdb client.")
132+
raise HTTPException(
133+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
134+
detail="Error fetching cosmosdb client.",
135+
)
130136

131137

132138
async def get_blob_container_client(name: str) -> ContainerClient:
@@ -144,7 +150,10 @@ async def get_blob_container_client(name: str) -> ContainerClient:
144150
cause=e,
145151
stack=traceback.format_exc(),
146152
)
147-
raise HTTPException(status_code=500, detail="Error fetching storage client.")
153+
raise HTTPException(
154+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
155+
detail="Error fetching storage client.",
156+
)
148157

149158

150159
def sanitize_name(container_name: str) -> str:
@@ -191,22 +200,23 @@ def desanitize_name(sanitized_container_name: str) -> str | None:
191200
return None
192201
except Exception:
193202
raise HTTPException(
194-
status_code=500, detail="Error retrieving original container name."
203+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
204+
detail="Error retrieving original container name.",
195205
)
196206

197207

198208
async def subscription_key_check(
199209
Ocp_Apim_Subscription_Key: Annotated[str, Header()],
200210
):
201211
"""
202-
Verifies if user has passed the Ocp_Apim_Subscription_Key (APIM subscription key) in the request header.
203-
If it is not present, an HTTPException with a 400 status code is raised.
204-
Note: this check is unnecessary (APIM validates subscription keys automatically), but this will add the key
212+
Verify if user has passed the Ocp_Apim_Subscription_Key (APIM subscription key) in the request header.
213+
Note: this check is unnecessary (APIM validates subscription keys automatically), but it effectively adds the key
205214
as a required parameter in the swagger docs page, enabling users to send requests using the swagger docs "Try it out" feature.
206215
"""
207216
if not Ocp_Apim_Subscription_Key:
208217
raise HTTPException(
209-
status_code=400, detail="Ocp-Apim-Subscription-Key required"
218+
status_code=status.HTTP_400_BAD_REQUEST,
219+
detail="Ocp-Apim-Subscription-Key required",
210220
)
211221
return Ocp_Apim_Subscription_Key
212222

@@ -216,7 +226,7 @@ async def create_cache(container_client: ContainerClient) -> None:
216226
Create a file cache (csv) to track uploaded files.
217227
"""
218228
try:
219-
cache_blob_client = container_client.get_blob_client("uploaded_files_cache.csv")
229+
cache_blob_client = container_client.get_blob_client(FILE_UPLOAD_CACHE)
220230
if not await cache_blob_client.exists():
221231
# create the empty file cache csv
222232
headers = [["Filename", "Hash"]]
@@ -231,20 +241,20 @@ async def create_cache(container_client: ContainerClient) -> None:
231241
os.remove("uploaded_files_cache.csv")
232242
except Exception:
233243
raise HTTPException(
234-
status_code=500,
244+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
235245
detail="Error creating file cache in Azure Blob Storage.",
236246
)
237247

238248

239249
async def check_cache(file_stream: BinaryIO, container_client: ContainerClient) -> bool:
240250
"""
241-
Check a cache file to determine if a file has previously been uploaded.
251+
Check a file cache (csv) to determine if a file has previously been uploaded.
242252
243253
Note: This function creates/checks a CSV file in azure storage to act as a cache of previously uploaded files.
244254
"""
245255
try:
246256
# load the file cache
247-
cache_blob_client = container_client.get_blob_client("uploaded_files_cache.csv")
257+
cache_blob_client = container_client.get_blob_client(FILE_UPLOAD_CACHE)
248258
cache_download_stream = await cache_blob_client.download_blob()
249259
cache_bytes = await cache_download_stream.readall()
250260
cache_content = StringIO(cache_bytes.decode("utf-8"))
@@ -258,7 +268,7 @@ async def check_cache(file_stream: BinaryIO, container_client: ContainerClient)
258268
return False
259269
except Exception:
260270
raise HTTPException(
261-
status_code=500,
271+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
262272
detail="Error checking file cache in Azure Blob Storage.",
263273
)
264274

@@ -267,11 +277,11 @@ async def update_cache(
267277
filename: str, file_stream: BinaryIO, container_client: ContainerClient
268278
) -> None:
269279
"""
270-
Update the file cache (csv) with a new file by adding a new row.
280+
Update a file cache (csv) with a new file by adding a new row.
271281
"""
272282
try:
273283
# Load the file cache
274-
cache_blob_client = container_client.get_blob_client("uploaded_files_cache.csv")
284+
cache_blob_client = container_client.get_blob_client(FILE_UPLOAD_CACHE)
275285
cache_download_stream = await cache_blob_client.download_blob()
276286
cache_bytes = await cache_download_stream.readall()
277287
cache_content = StringIO(cache_bytes.decode("utf-8"))
@@ -297,6 +307,6 @@ async def update_cache(
297307
)
298308
except Exception:
299309
raise HTTPException(
300-
status_code=500,
310+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
301311
detail="Error updating file cache in Azure Blob Storage.",
302312
)

0 commit comments

Comments
 (0)