@@ -105,7 +105,7 @@ def run( # type: ignore
105105 output_filepath .parent .mkdir (parents = True , exist_ok = True )
106106
107107 with open (output_filepath , "w" ) as file :
108- json . dump (_GraphData .from_nx (nx_graph ).model_dump (), file , indent = 4 )
108+ file . write (_GraphData .from_nx (nx_graph ).model_dump_json () )
109109
110110 return output_filepath
111111
@@ -196,7 +196,7 @@ def from_nx(cls, nx_graph: "MultiDiGraph") -> _GraphData:
196196
197197
198198class _Node (BaseModel ):
199- model_config = ConfigDict (use_enum_values = True )
199+ model_config = ConfigDict ()
200200
201201 id_ : str = Field (default_factory = lambda : str (uuid .uuid4 ()))
202202 labels : list [Label ] = Field (default_factory = list )
@@ -207,20 +207,20 @@ def __hash__(self):
207207
208208
209209class _Edge (BaseModel ):
210- model_config = ConfigDict (use_enum_values = True )
210+ model_config = ConfigDict ()
211211
212212 source_id : str
213213 destination_id : str
214214 relationship : Relationship
215215
216216
217- class Label (str , Enum ):
217+ class Label (Enum ):
218218 UNSTRUCTURED_ELEMENT = "UnstructuredElement"
219219 CHUNK = "Chunk"
220220 DOCUMENT = "Document"
221221
222222
223- class Relationship (str , Enum ):
223+ class Relationship (Enum ):
224224 PART_OF_DOCUMENT = "PART_OF_DOCUMENT"
225225 PART_OF_CHUNK = "PART_OF_CHUNK"
226226 NEXT_CHUNK = "NEXT_CHUNK"
@@ -263,23 +263,23 @@ async def run_async(self, path: Path, file_data: FileData, **kwargs) -> None: #
263263 async def _create_uniqueness_constraints (self , client : AsyncDriver ) -> None :
264264 for label in Label :
265265 logger .info (
266- f"Adding id uniqueness constraint for nodes labeled '{ label } '"
266+ f"Adding id uniqueness constraint for nodes labeled '{ label . value } '"
267267 " if it does not already exist."
268268 )
269- constraint_name = f"{ label .lower ()} _id"
269+ constraint_name = f"{ label .value . lower ()} _id"
270270 await client .execute_query (
271271 f"""
272272 CREATE CONSTRAINT { constraint_name } IF NOT EXISTS
273- FOR (n: { label } ) REQUIRE n.id IS UNIQUE
273+ FOR (n: { label . value } ) REQUIRE n.id IS UNIQUE
274274 """
275275 )
276276
277277 async def _delete_old_data_if_exists (self , file_data : FileData , client : AsyncDriver ) -> None :
278278 logger .info (f"Deleting old data for the record '{ file_data .identifier } ' (if present)." )
279279 _ , summary , _ = await client .execute_query (
280280 f"""
281- MATCH (n: { Label .DOCUMENT } {{id: $identifier}})
282- MATCH (n)--(m: { Label .CHUNK } |{ Label .UNSTRUCTURED_ELEMENT } )
281+ MATCH (n: { Label .DOCUMENT . value } {{id: $identifier}})
282+ MATCH (n)--(m: { Label .CHUNK . value } |{ Label .UNSTRUCTURED_ELEMENT . value } )
283283 DETACH DELETE m""" ,
284284 identifier = file_data .identifier ,
285285 )
@@ -349,7 +349,7 @@ async def _execute_queries(
349349
350350 @staticmethod
351351 def _create_nodes_query (nodes : list [_Node ], labels : tuple [Label , ...]) -> tuple [str , dict ]:
352- labels_string = ", " .join (labels )
352+ labels_string = ", " .join ([ label . value for label in labels ] )
353353 logger .info (f"Preparing MERGE query for { len (nodes )} nodes labeled '{ labels_string } '." )
354354 query_string = f"""
355355 UNWIND $nodes AS node
@@ -366,7 +366,7 @@ def _create_edges_query(edges: list[_Edge], relationship: Relationship) -> tuple
366366 UNWIND $edges AS edge
367367 MATCH (u {{id: edge.source}})
368368 MATCH (v {{id: edge.destination}})
369- MERGE (u)-[:{ relationship } ]->(v)
369+ MERGE (u)-[:{ relationship . value } ]->(v)
370370 """
371371 parameters = {
372372 "edges" : [
0 commit comments