Skip to content

Commit 2ed534d

Browse files
committed
Add handling for ConflictException
1 parent 3e6e283 commit 2ed534d

File tree

1 file changed

+60
-19
lines changed
  • packages/syncKnowledgeBaseFunction

1 file changed

+60
-19
lines changed

packages/syncKnowledgeBaseFunction/app.py

Lines changed: 60 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -31,13 +31,19 @@ def handler(event, context):
3131
if not knowledge_base_id or not data_source_id:
3232
logger.error(
3333
"Missing required environment variables",
34-
extra={"knowledge_base_id": bool(knowledge_base_id), "data_source_id": bool(data_source_id)},
34+
extra={
35+
"knowledge_base_id": bool(knowledge_base_id),
36+
"data_source_id": bool(data_source_id),
37+
},
3538
)
3639
return {"statusCode": 500, "body": "Configuration error"}
3740

3841
logger.info(
3942
"Starting knowledge base sync process",
40-
extra={"knowledge_base_id": knowledge_base_id, "data_source_id": data_source_id},
43+
extra={
44+
"knowledge_base_id": knowledge_base_id,
45+
"data_source_id": data_source_id,
46+
},
4147
)
4248

4349
try:
@@ -100,8 +106,9 @@ def handler(event, context):
100106
"job_id": job_id,
101107
"job_status": job_status,
102108
"knowledge_base_id": knowledge_base_id,
103-
"file_key": key,
109+
"trigger_file": key,
104110
"ingestion_request_duration_ms": round(ingestion_request_time * 1000, 2),
111+
"note": "Job will process all files in data source, not just trigger file",
105112
},
106113
)
107114

@@ -111,45 +118,79 @@ def handler(event, context):
111118
else:
112119
logger.warning(
113120
"Skipping non-S3 event",
114-
extra={"event_source": record.get("eventSource"), "record_index": record_index + 1},
121+
extra={
122+
"event_source": record.get("eventSource"),
123+
"record_index": record_index + 1,
124+
},
115125
)
116126

117127
# Calculate total processing time
118128
total_duration = time.time() - start_time
119129

120130
# Log successful completion summary
121131
logger.info(
122-
"Knowledge base sync completed successfully",
132+
"Knowledge base sync process completed",
123133
extra={
124-
"total_files_processed": len(processed_files),
134+
"trigger_files_processed": len(processed_files),
135+
"ingestion_jobs_started": len(job_ids),
125136
"job_ids": job_ids,
126-
"processed_files": processed_files,
137+
"trigger_files": processed_files,
127138
"total_duration_ms": round(total_duration * 1000, 2),
128139
"knowledge_base_id": knowledge_base_id,
140+
"next_steps": "Monitor Bedrock console for ingestion job completion status",
129141
},
130142
)
131143

144+
# Log explicit success message for easy monitoring
145+
logger.info("Ingestion jobs triggered successfully - check Bedrock console for final results")
146+
132147
return {
133148
"statusCode": 200,
134-
"body": f"Successfully triggered {len(job_ids)} ingestion job(s) for {len(processed_files)} file(s)",
149+
"body": (
150+
f"Successfully triggered {len(job_ids)} ingestion job(s) for {len(processed_files)} trigger file(s)",
151+
),
135152
}
136153

137154
except ClientError as e:
138155
# Handle AWS service errors with detailed logging
139156
error_code = e.response.get("Error", {}).get("Code", "Unknown")
140157
error_message = e.response.get("Error", {}).get("Message", str(e))
141158

142-
logger.error(
143-
"AWS service error occurred",
144-
extra={
145-
"error_code": error_code,
146-
"error_message": error_message,
147-
"knowledge_base_id": knowledge_base_id,
148-
"data_source_id": data_source_id,
149-
"duration_ms": round((time.time() - start_time) * 1000, 2),
150-
},
151-
)
152-
return {"statusCode": 500, "body": f"AWS error: {error_code} - {error_message}"}
159+
# Handling for ConflictException
160+
if error_code == "ConflictException":
161+
logger.warning(
162+
"Ingestion job already in progress",
163+
extra={
164+
"error_code": error_code,
165+
"error_message": error_message,
166+
"knowledge_base_id": knowledge_base_id,
167+
"data_source_id": data_source_id,
168+
"duration_ms": round((time.time() - start_time) * 1000, 2),
169+
"recommendation": (
170+
"This is normal when multiple files are uploaded quickly. "
171+
"The running job will process all files."
172+
),
173+
},
174+
)
175+
return {
176+
"statusCode": 409,
177+
"body": "Ingestion job already in progress - files will be processed by existing job",
178+
}
179+
else:
180+
logger.error(
181+
"AWS service error occurred",
182+
extra={
183+
"error_code": error_code,
184+
"error_message": error_message,
185+
"knowledge_base_id": knowledge_base_id,
186+
"data_source_id": data_source_id,
187+
"duration_ms": round((time.time() - start_time) * 1000, 2),
188+
},
189+
)
190+
return {
191+
"statusCode": 500,
192+
"body": f"AWS error: {error_code} - {error_message}",
193+
}
153194

154195
except Exception as e:
155196
# Handle unexpected errors

0 commit comments

Comments
 (0)