2424from src .make_relationships import *
2525from src .document_sources .web_pages import *
2626from src .graph_query import get_graphDB_driver
27+ from src .graph_query import get_graphDB_driver
2728import re
2829from langchain_community .document_loaders import WikipediaLoader , WebBaseLoader
2930import warnings
@@ -401,6 +402,7 @@ async def processing_source(uri, userName, password, database, model, file_name,
401402 obj_source_node .processing_time = processed_time
402403 obj_source_node .processed_chunk = select_chunks_upto + select_chunks_with_retry
403404 if retry_condition == START_FROM_BEGINNING :
405+ result = execute_graph_query (graph ,QUERY_TO_GET_NODES_AND_RELATIONS_OF_A_DOCUMENT , params = {"filename" :file_name })
404406 result = execute_graph_query (graph ,QUERY_TO_GET_NODES_AND_RELATIONS_OF_A_DOCUMENT , params = {"filename" :file_name })
405407 obj_source_node .node_count = result [0 ]['nodes' ]
406408 obj_source_node .relationship_count = result [0 ]['rels' ]
@@ -504,6 +506,10 @@ async def processing_chunks(chunkId_chunkDoc_list,graph,uri, userName, password,
504506 logging .info (f'Time taken to create relationship between chunk and entities: { elapsed_relationship :.2f} seconds' )
505507 latency_processing_chunk ["relationship_between_chunk_entity" ] = f'{ elapsed_relationship :.2f} '
506508
509+ graphDb_data_Access = graphDBdataAccess (graph )
510+ count_response = graphDb_data_Access .update_node_relationship_count (file_name )
511+ node_count = count_response [file_name ].get ('nodeCount' ,"0" )
512+ rel_count = count_response [file_name ].get ('relationshipCount' ,"0" )
507513 graphDb_data_Access = graphDBdataAccess (graph )
508514 count_response = graphDb_data_Access .update_node_relationship_count (file_name )
509515 node_count = count_response [file_name ].get ('nodeCount' ,"0" )
@@ -530,6 +536,7 @@ def get_chunkId_chunkDoc_list(graph, file_name, pages, token_chunk_size, chunk_o
530536 else :
531537 chunkId_chunkDoc_list = []
532538 chunks = execute_graph_query (graph ,QUERY_TO_GET_CHUNKS , params = {"filename" :file_name })
539+ chunks = execute_graph_query (graph ,QUERY_TO_GET_CHUNKS , params = {"filename" :file_name })
533540
534541 if chunks [0 ]['text' ] is None or chunks [0 ]['text' ]== "" or not chunks :
535542 raise LLMGraphBuilderException (f"Chunks are not created for { file_name } . Please re-upload file and try again." )
@@ -541,11 +548,13 @@ def get_chunkId_chunkDoc_list(graph, file_name, pages, token_chunk_size, chunk_o
541548 if retry_condition == START_FROM_LAST_PROCESSED_POSITION :
542549 logging .info (f"Retry : start_from_last_processed_position" )
543550 starting_chunk = execute_graph_query (graph ,QUERY_TO_GET_LAST_PROCESSED_CHUNK_POSITION , params = {"filename" :file_name })
551+ starting_chunk = execute_graph_query (graph ,QUERY_TO_GET_LAST_PROCESSED_CHUNK_POSITION , params = {"filename" :file_name })
544552
545553 if starting_chunk and starting_chunk [0 ]["position" ] < len (chunkId_chunkDoc_list ):
546554 return len (chunks ), chunkId_chunkDoc_list [starting_chunk [0 ]["position" ] - 1 :]
547555
548556 elif starting_chunk and starting_chunk [0 ]["position" ] == len (chunkId_chunkDoc_list ):
557+ starting_chunk = execute_graph_query (graph ,QUERY_TO_GET_LAST_PROCESSED_CHUNK_WITHOUT_ENTITY , params = {"filename" :file_name })
549558 starting_chunk = execute_graph_query (graph ,QUERY_TO_GET_LAST_PROCESSED_CHUNK_WITHOUT_ENTITY , params = {"filename" :file_name })
550559 return len (chunks ), chunkId_chunkDoc_list [starting_chunk [0 ]["position" ] - 1 :]
551560
@@ -725,6 +734,7 @@ def manually_cancelled_job(graph, filenames, source_types, merged_dir, uri):
725734 delete_uploaded_local_file (merged_file_path ,file_name )
726735 return "Cancelled the processing job successfully"
727736
737+ def populate_graph_schema_from_text (text , model , is_schema_description_checked , is_local_storage ):
728738def populate_graph_schema_from_text (text , model , is_schema_description_checked , is_local_storage ):
729739 """_summary_
730740
@@ -738,6 +748,8 @@ def populate_graph_schema_from_text(text, model, is_schema_description_checked,
738748 """
739749 result = schema_extraction_from_text (text , model , is_schema_description_checked , is_local_storage )
740750 return result
751+ result = schema_extraction_from_text (text , model , is_schema_description_checked , is_local_storage )
752+ return result
741753
742754def set_status_retry (graph , file_name , retry_condition ):
743755 graphDb_data_Access = graphDBdataAccess (graph )
@@ -750,6 +762,7 @@ def set_status_retry(graph, file_name, retry_condition):
750762 if retry_condition == DELETE_ENTITIES_AND_START_FROM_BEGINNING or retry_condition == START_FROM_BEGINNING :
751763 obj_source_node .processed_chunk = 0
752764 if retry_condition == DELETE_ENTITIES_AND_START_FROM_BEGINNING :
765+ execute_graph_query (graph ,QUERY_TO_DELETE_EXISTING_ENTITIES , params = {"filename" :file_name })
753766 execute_graph_query (graph ,QUERY_TO_DELETE_EXISTING_ENTITIES , params = {"filename" :file_name })
754767 obj_source_node .node_count = 0
755768 obj_source_node .relationship_count = 0
0 commit comments