Skip to content
This repository was archived by the owner on May 27, 2025. It is now read-only.

Commit ef1d7b7

Browse files
committed
update
1 parent 606f6f2 commit ef1d7b7

File tree

9 files changed

+63
-54
lines changed

9 files changed

+63
-54
lines changed

backend/src/api/common.py

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
import os
66
import re
77

8+
from azure.cosmos import exceptions
89
from azure.identity import DefaultAzureCredential
910
from fastapi import HTTPException
1011

@@ -37,6 +38,23 @@ def delete_blob_container(container_name: str):
3738
blob_service_client.delete_container(container_name)
3839

3940

41+
def delete_cosmos_container_item(container: str, item_id: str):
42+
"""
43+
Delete an item from a cosmosdb container. If it does not exist, do nothing.
44+
If exception is raised, the calling function should catch it.
45+
"""
46+
azure_client_manager = AzureClientManager()
47+
# cosmos_client = azure_client_manager.get_cosmos_client()
48+
try:
49+
azure_client_manager.get_cosmos_container_client(
50+
"graphrag", container
51+
).delete_item(item_id)
52+
# cosmos_client.get_database_client(database).get_container_client(container).delete_item(item_id)
53+
except exceptions.CosmosResourceNotFoundError:
54+
# If item does not exist, do nothing
55+
pass
56+
57+
4058
def validate_index_file_exist(index_name: str, file_name: str):
4159
"""
4260
Check if index exists and that the specified blob file exists.

backend/src/api/data.py

Lines changed: 7 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,6 @@
66
from math import ceil
77
from typing import List
88

9-
from azure.cosmos import exceptions
109
from azure.storage.blob import ContainerClient
1110
from fastapi import (
1211
APIRouter,
@@ -17,6 +16,7 @@
1716
from src.api.azure_clients import AzureClientManager
1817
from src.api.common import (
1918
delete_blob_container,
19+
delete_cosmos_container_item,
2020
sanitize_name,
2121
validate_blob_container_name,
2222
)
@@ -128,7 +128,6 @@ async def upload_files(
128128
Raises:
129129
HTTPException: If the container name is invalid or if any error occurs during the upload process.
130130
"""
131-
reporter = LoggerSingleton().get_instance()
132131
sanitized_storage_name = sanitize_name(storage_name)
133132
# ensure container name follows Azure Blob Storage naming conventions
134133
try:
@@ -171,7 +170,8 @@ async def upload_files(
171170
})
172171
return BaseResponse(status="File upload successful.")
173172
except Exception:
174-
reporter.on_error("Error uploading files.", details={"files": files})
173+
logger = LoggerSingleton().get_instance()
174+
logger.on_error("Error uploading files.", details={"files": files})
175175
raise HTTPException(
176176
status_code=500,
177177
detail=f"Error uploading files to container '{storage_name}'.",
@@ -188,25 +188,16 @@ async def delete_files(storage_name: str):
188188
"""
189189
Delete a specified data storage container.
190190
"""
191-
azure_client_manager = AzureClientManager()
191+
# azure_client_manager = AzureClientManager()
192192
sanitized_storage_name = sanitize_name(storage_name)
193193
try:
194194
# delete container in Azure Storage
195195
delete_blob_container(sanitized_storage_name)
196196
# delete entry from container-store in cosmosDB
197-
container_store_client = azure_client_manager.get_cosmos_container_client(
198-
database_name="graphrag", container_name="container-store"
199-
)
200-
try:
201-
container_store_client.delete_item(
202-
item=sanitized_storage_name,
203-
partition_key=sanitized_storage_name,
204-
)
205-
except exceptions.CosmosResourceNotFoundError:
206-
pass
197+
delete_cosmos_container_item("container-store", sanitized_storage_name)
207198
except Exception:
208-
reporter = LoggerSingleton().get_instance()
209-
reporter.on_error(
199+
logger = LoggerSingleton().get_instance()
200+
logger.on_error(
210201
f"Error deleting container {storage_name}.",
211202
details={"Container": storage_name},
212203
)

backend/src/api/graph.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -43,8 +43,8 @@ async def get_graphml_file(index_name: str):
4343
headers={"Content-Disposition": f"attachment; filename={graphml_filename}"},
4444
)
4545
except Exception:
46-
reporter = LoggerSingleton().get_instance()
47-
reporter.on_error("Could not retrieve graphml file")
46+
logger = LoggerSingleton().get_instance()
47+
logger.on_error("Could not retrieve graphml file")
4848
raise HTTPException(
4949
status_code=500,
5050
detail=f"Could not retrieve graphml file for index '{index_name}'.",

backend/src/api/index.py

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -170,7 +170,7 @@ async def _start_indexing_pipeline(index_name: str):
170170
"type": "index",
171171
})
172172

173-
reporter = LoggerSingleton().get_instance()
173+
logger = LoggerSingleton().get_instance()
174174
pipelinejob = PipelineJob()
175175
pipeline_job = pipelinejob.load_item(sanitized_index_name)
176176
sanitized_storage_name = pipeline_job.sanitized_storage_name
@@ -229,19 +229,19 @@ async def _start_indexing_pipeline(index_name: str):
229229
for workflow in pipeline_config.workflows:
230230
pipeline_job.all_workflows.append(workflow.name)
231231

232-
# create new reporters/callbacks just for this job
233-
reporters = []
234-
reporter_names = os.getenv("REPORTERS", Reporters.CONSOLE.name.upper()).split(",")
235-
for reporter_name in reporter_names:
232+
# create new loggers/callbacks just for this job
233+
loggers = []
234+
logger_names = os.getenv("REPORTERS", Reporters.CONSOLE.name.upper()).split(",")
235+
for logger_name in logger_names:
236236
try:
237-
reporters.append(Reporters[reporter_name.upper()])
237+
loggers.append(Reporters[logger_name.upper()])
238238
except KeyError:
239-
raise ValueError(f"Unknown reporter type: {reporter_name}")
239+
raise ValueError(f"Unknown logger type: {logger_name}")
240240
workflow_callbacks = load_pipeline_logger(
241241
index_name=index_name,
242242
num_workflow_steps=len(pipeline_job.all_workflows),
243243
reporting_dir=sanitized_index_name,
244-
reporters=reporters,
244+
reporters=loggers,
245245
)
246246

247247
# add pipeline job callback to the callback manager
@@ -305,7 +305,7 @@ async def _start_indexing_pipeline(index_name: str):
305305
details=error_details,
306306
)
307307
# log error in global index directory logs
308-
reporter.on_error(
308+
logger.on_error(
309309
message=f"Indexing pipeline failed for index '{index_name}'.",
310310
cause=e,
311311
stack=traceback.format_exc(),
@@ -337,8 +337,8 @@ async def get_all_indexes():
337337
if item["type"] == "index":
338338
items.append(item["human_readable_name"])
339339
except Exception:
340-
reporter = LoggerSingleton().get_instance()
341-
reporter.on_error("Error retrieving index names")
340+
logger = LoggerSingleton().get_instance()
341+
logger.on_error("Error retrieving index names")
342342
return IndexNameList(index_name=items)
343343

344344

@@ -363,13 +363,13 @@ def _delete_k8s_job(job_name: str, namespace: str) -> None:
363363
# function should only work when running in AKS
364364
if not os.getenv("KUBERNETES_SERVICE_HOST"):
365365
return None
366-
reporter = LoggerSingleton().get_instance()
366+
logger = LoggerSingleton().get_instance()
367367
kubernetes_config.load_incluster_config()
368368
try:
369369
batch_v1 = kubernetes_client.BatchV1Api()
370370
batch_v1.delete_namespaced_job(name=job_name, namespace=namespace)
371371
except Exception:
372-
reporter.on_error(
372+
logger.on_error(
373373
message=f"Error deleting k8s job {job_name}.",
374374
details={"container": job_name},
375375
)
@@ -380,7 +380,7 @@ def _delete_k8s_job(job_name: str, namespace: str) -> None:
380380
if job_pod:
381381
core_v1.delete_namespaced_pod(job_pod, namespace=namespace)
382382
except Exception:
383-
reporter.on_error(
383+
logger.on_error(
384384
message=f"Error deleting k8s pod for job {job_name}.",
385385
details={"container": job_name},
386386
)
@@ -442,8 +442,8 @@ async def delete_index(index_name: str):
442442
index_client.delete_index(ai_search_index_name)
443443

444444
except Exception:
445-
reporter = LoggerSingleton().get_instance()
446-
reporter.on_error(
445+
logger = LoggerSingleton().get_instance()
446+
logger.on_error(
447447
message=f"Error encountered while deleting all data for index {index_name}.",
448448
stack=None,
449449
details={"container": index_name},

backend/src/api/index_configuration.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -69,11 +69,11 @@ async def generate_prompts(storage_name: str, limit: int = 5):
6969
output=f"{temp_dir}/prompts",
7070
)
7171
except Exception as e:
72-
reporter = LoggerSingleton().get_instance()
72+
logger = LoggerSingleton().get_instance()
7373
error_details = {
7474
"storage_name": storage_name,
7575
}
76-
reporter.on_error(
76+
logger.on_error(
7777
message="Auto-prompt generation failed.",
7878
cause=e,
7979
stack=traceback.format_exc(),

backend/src/api/query.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -197,8 +197,8 @@ async def global_query(request: GraphRequest):
197197

198198
return GraphResponse(result=result[0], context_data=context_data)
199199
except Exception as e:
200-
reporter = LoggerSingleton().get_instance()
201-
reporter.on_error(
200+
logger = LoggerSingleton().get_instance()
201+
logger.on_error(
202202
message="Could not perform global search.",
203203
cause=e,
204204
stack=traceback.format_exc(),

backend/src/api/query_streaming.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -186,8 +186,8 @@ async def global_search_streaming(request: GraphRequest):
186186
media_type="application/json",
187187
)
188188
except Exception as e:
189-
reporter = LoggerSingleton().get_instance()
190-
reporter.on_error(
189+
logger = LoggerSingleton().get_instance()
190+
logger.on_error(
191191
message="Error encountered while streaming global search response",
192192
cause=e,
193193
stack=traceback.format_exc(),
@@ -429,8 +429,8 @@ async def local_search_streaming(request: GraphRequest):
429429
media_type="application/json",
430430
)
431431
except Exception as e:
432-
reporter = LoggerSingleton().get_instance()
433-
reporter.on_error(
432+
logger = LoggerSingleton().get_instance()
433+
logger.on_error(
434434
message="Error encountered while streaming local search response",
435435
cause=e,
436436
stack=traceback.format_exc(),

backend/src/api/source.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -55,8 +55,8 @@ async def get_report_info(index_name: str, report_id: str):
5555
][0]
5656
return ReportResponse(text=report_content)
5757
except Exception:
58-
reporter = LoggerSingleton().get_instance()
59-
reporter.on_error("Could not get report.")
58+
logger = LoggerSingleton().get_instance()
59+
logger.on_error("Could not get report.")
6060
raise HTTPException(
6161
status_code=500,
6262
detail=f"Error retrieving report '{report_id}' from index '{index_name}'.",
@@ -97,8 +97,8 @@ async def get_chunk_info(index_name: str, text_unit_id: str):
9797
text=row["chunk"].values[0], source_document=row["source_doc"].values[0]
9898
)
9999
except Exception:
100-
reporter = LoggerSingleton().get_instance()
101-
reporter.on_error("Could not get text chunk.")
100+
logger = LoggerSingleton().get_instance()
101+
logger.on_error("Could not get text chunk.")
102102
raise HTTPException(
103103
status_code=500,
104104
detail=f"Error retrieving text chunk '{text_unit_id}' from index '{index_name}'.",
@@ -127,8 +127,8 @@ async def get_entity_info(index_name: str, entity_id: int):
127127
text_units=row["text_unit_ids"].values[0].tolist(),
128128
)
129129
except Exception:
130-
reporter = LoggerSingleton().get_instance()
131-
reporter.on_error("Could not get entity")
130+
logger = LoggerSingleton().get_instance()
131+
logger.on_error("Could not get entity")
132132
raise HTTPException(
133133
status_code=500,
134134
detail=f"Error retrieving entity '{entity_id}' from index '{index_name}'.",
@@ -172,8 +172,8 @@ async def get_claim_info(index_name: str, claim_id: int):
172172
document_ids=row["document_ids"].values[0].tolist(),
173173
)
174174
except Exception:
175-
reporter = LoggerSingleton().get_instance()
176-
reporter.on_error("Could not get claim.")
175+
logger = LoggerSingleton().get_instance()
176+
logger.on_error("Could not get claim.")
177177
raise HTTPException(
178178
status_code=500,
179179
detail=f"Error retrieving claim '{claim_id}' from index '{index_name}'.",
@@ -218,8 +218,8 @@ async def get_relationship_info(index_name: str, relationship_id: int):
218218
], # extract text_unit_ids from a list of panda series
219219
)
220220
except Exception:
221-
reporter = LoggerSingleton().get_instance()
222-
reporter.on_error("Could not get relationship.")
221+
logger = LoggerSingleton().get_instance()
222+
logger.on_error("Could not get relationship.")
223223
raise HTTPException(
224224
status_code=500,
225225
detail=f"Error retrieving relationship '{relationship_id}' from index '{index_name}'.",

backend/tests/conftest.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@ def container_with_graphml_file(blob_service_client, cosmos_client):
7676
yield container_name
7777
# cleanup
7878
blob_service_client.delete_container(sanitized_name)
79-
container_store_client.delete_item(sanitized_name, sanitized_name)
79+
# container_store_client.delete_item(sanitized_name, sanitized_name)
8080

8181

8282
@pytest.fixture(scope="session")
@@ -120,7 +120,7 @@ def container_with_index_files(blob_service_client, cosmos_client):
120120
yield container_name
121121
# cleanup
122122
blob_service_client.delete_container(sanitized_name)
123-
container_store_client.delete_item(sanitized_name, sanitized_name)
123+
# container_store_client.delete_item(sanitized_name, sanitized_name)
124124

125125

126126
@pytest.fixture(scope="session")

0 commit comments

Comments
 (0)