@@ -155,7 +155,14 @@ async def create_source_knowledge_graph_url(
155155 'gcs_project_id' :gcs_project_id , 'logging_time' : formatted_time (datetime .now (timezone .utc ))}
156156 logger .log_struct (json_obj , "INFO" )
157157 result = {'elapsed_api_time' : f'{ elapsed_time :.2f} ' }
158- return create_api_response ("Success" ,message = message ,success_count = success_count ,failed_count = failed_count ,file_name = lst_file_name ,data = result )
158+ return create_api_response ("Success" ,message = message ,success_count = success_count ,failed_count = failed_count ,file_name = lst_file_name ,data = result )
159+ except LLMGraphBuilderException as e :
160+ error_message = str (e )
161+ message = f" Unable to create source node for source type: { source_type } and source: { source } "
162+ # Set the status "Success" becuase we are treating these error already handled by application as like custom errors.
163+ json_obj = {'error_message' :error_message , 'status' :'Success' ,'db_url' :uri , 'userName' :userName , 'database' :database ,'success_count' :1 , 'source_type' : source_type , 'source_url' :source_url , 'wiki_query' :wiki_query , 'logging_time' : formatted_time (datetime .now (timezone .utc ))}
164+ logger .log_struct (json_obj , "INFO" )
165+ return create_api_response ('Failed' ,message = message + error_message [:80 ],error = error_message ,file_source = source_type )
159166 except Exception as e :
160167 error_message = str (e )
161168 message = f" Unable to create source node for source type: { source_type } and source: { source } "
@@ -261,42 +268,32 @@ async def extract_knowledge_graph_from_file(
261268 result ['gcs_bucket_folder' ] = gcs_bucket_folder
262269 result ['gcs_blob_filename' ] = gcs_blob_filename
263270 result ['gcs_project_id' ] = gcs_project_id
264- result ['allowedNodes' ] = allowedNodes
265- result ['allowedRelationship' ] = allowedRelationship
266271 result ['language' ] = language
267272 result ['retry_condition' ] = retry_condition
268273 logger .log_struct (result , "INFO" )
269274 result .update (uri_latency )
270275 logging .info (f"extraction completed in { extract_api_time :.2f} seconds for file name { file_name } " )
271276 return create_api_response ('Success' , data = result , file_source = source_type )
272- except LLMGraphBuilderException as app_exp :
273- job_status = "Completed"
274- obj_source_node = sourceNode ()
275- obj_source_node .file_name = file_name
276- obj_source_node .status = job_status
277- obj_source_node .error_message = str (app_exp )
278- obj_source_node .retry_condition = retry_condition
279- graphDb_data_Access .update_source_node (obj_source_node )
280- return create_api_response ("Success" , data = {"message" : str (app_exp )}, file_name = file_name )
277+ except LLMGraphBuilderException as e :
278+ error_message = str (e )
279+ graphDb_data_Access .update_exception_db (file_name ,error_message , retry_condition )
280+ failed_file_process (uri ,file_name , merged_file_path , source_type )
281+ node_detail = graphDb_data_Access .get_current_status_document_node (file_name )
282+ # Set the status "Completed" in logging becuase we are treating these error already handled by application as like custom errors.
283+ json_obj = {'api_name' :'extract' ,'message' :error_message ,'file_created_at' :node_detail [0 ]['created_time' ],'error_message' :error_message , 'file_name' : file_name ,'status' :'Completed' ,
284+ 'db_url' :uri , 'userName' :userName , 'database' :database ,'success_count' :1 , 'source_type' : source_type , 'source_url' :source_url , 'wiki_query' :wiki_query , 'logging_time' : formatted_time (datetime .now (timezone .utc ))}
285+ logger .log_struct (json_obj , "INFO" )
286+ return create_api_response ("Failed" , message = error_message , error = error_message , file_name = file_name )
281287 except Exception as e :
282288 message = f"Failed To Process File:{ file_name } or LLM Unable To Parse Content "
283289 error_message = str (e )
284290 graphDb_data_Access .update_exception_db (file_name ,error_message , retry_condition )
291+ failed_file_process (uri ,file_name , merged_file_path , source_type )
285292 node_detail = graphDb_data_Access .get_current_status_document_node (file_name )
286- gcs_file_cache = os .environ .get ('GCS_FILE_CACHE' )
287- if source_type == 'local file' :
288- if gcs_file_cache == 'True' :
289- folder_name = create_gcs_bucket_folder_name_hashed (uri ,file_name )
290- copy_failed_file (BUCKET_UPLOAD , BUCKET_FAILED_FILE , folder_name , file_name )
291- time .sleep (5 )
292- delete_file_from_gcs (BUCKET_UPLOAD ,folder_name ,file_name )
293- else :
294- logging .info (f'Deleted File Path: { merged_file_path } and Deleted File Name : { file_name } ' )
295- delete_uploaded_local_file (merged_file_path ,file_name )
296- json_obj = {'message' :message ,'file_created_at' :node_detail [0 ]['created_time' ],'error_message' :error_message , 'file_name' : file_name ,'status' :'Failed' ,
293+
294+ json_obj = {'api_name' :'extract' ,'message' :message ,'file_created_at' :node_detail [0 ]['created_time' ],'error_message' :error_message , 'file_name' : file_name ,'status' :'Failed' ,
297295 'db_url' :uri , 'userName' :userName , 'database' :database ,'failed_count' :1 , 'source_type' : source_type , 'source_url' :source_url , 'wiki_query' :wiki_query , 'logging_time' : formatted_time (datetime .now (timezone .utc ))}
298296 logger .log_struct (json_obj , "ERROR" )
299- logging .exception (f'File Failed in extraction: { json_obj } ' )
300297 return create_api_response ('Failed' , message = message + error_message [:100 ], error = error_message , file_name = file_name )
301298 finally :
302299 gc .collect ()
@@ -514,7 +511,7 @@ async def connect(uri=Form(), userName=Form(), password=Form(), database=Form())
514511 gcs_file_cache = os .environ .get ('GCS_FILE_CACHE' )
515512 end = time .time ()
516513 elapsed_time = end - start
517- json_obj = {'api_name' :'connect' ,'db_url' :uri , 'userName' :userName , 'database' :database ,'status' : result , 'count' :1 , 'logging_time' : formatted_time (datetime .now (timezone .utc )), 'elapsed_api_time' :f'{ elapsed_time :.2f} ' }
514+ json_obj = {'api_name' :'connect' ,'db_url' :uri , 'userName' :userName , 'database' :database , 'count' :1 , 'logging_time' : formatted_time (datetime .now (timezone .utc )), 'elapsed_api_time' :f'{ elapsed_time :.2f} ' }
518515 logger .log_struct (json_obj , "INFO" )
519516 result ['elapsed_api_time' ] = f'{ elapsed_time :.2f} '
520517 result ['gcs_file_cache' ] = gcs_file_cache
0 commit comments