@@ -503,7 +503,9 @@ def remove_oldest_memory(
503503 cursor .execute (delete_query , delete_params )
504504 deleted_count = cursor .rowcount
505505 logger .info (
506- f"Removed { deleted_count } oldest { memory_type } memories, keeping { keep_latest } latest for user { user_name } "
506+ f"Removed { deleted_count } oldest { memory_type } memories, "
507+ f"keeping { keep_latest } latest for user { user_name } , "
508+ f"removed ids: { ids_to_delete } "
507509 )
508510 except Exception as e :
509511 logger .error (f"[remove_oldest_memory] Failed: { e } " , exc_info = True )
@@ -2803,6 +2805,28 @@ def _parse_node(self, node_data: dict[str, Any]) -> dict[str, Any]:
28032805 if time_field in node and hasattr (node [time_field ], "isoformat" ):
28042806 node [time_field ] = node [time_field ].isoformat ()
28052807
2808+ # Deserialize sources from JSON strings back to dict objects
2809+ if "sources" in node and node .get ("sources" ):
2810+ sources = node ["sources" ]
2811+ if isinstance (sources , list ):
2812+ deserialized_sources = []
2813+ for source_item in sources :
2814+ if isinstance (source_item , str ):
2815+ # Try to parse JSON string
2816+ try :
2817+ parsed = json .loads (source_item )
2818+ deserialized_sources .append (parsed )
2819+ except (json .JSONDecodeError , TypeError ):
2820+ # If parsing fails, keep as string or create a simple dict
2821+ deserialized_sources .append ({"type" : "doc" , "content" : source_item })
2822+ elif isinstance (source_item , dict ):
2823+ # Already a dict, keep as is
2824+ deserialized_sources .append (source_item )
2825+ else :
2826+ # Unknown type, create a simple dict
2827+ deserialized_sources .append ({"type" : "doc" , "content" : str (source_item )})
2828+ node ["sources" ] = deserialized_sources
2829+
28062830 return {"id" : node .get ("id" ), "memory" : node .get ("memory" , "" ), "metadata" : node }
28072831
28082832 def _parse_node_new (self , node_data : dict [str , Any ]) -> dict [str , Any ]:
@@ -2835,6 +2859,28 @@ def _strip_wrapping_quotes(value: Any) -> Any:
28352859 if time_field in node and hasattr (node [time_field ], "isoformat" ):
28362860 node [time_field ] = node [time_field ].isoformat ()
28372861
2862+ # Deserialize sources from JSON strings back to dict objects
2863+ if "sources" in node and node .get ("sources" ):
2864+ sources = node ["sources" ]
2865+ if isinstance (sources , list ):
2866+ deserialized_sources = []
2867+ for source_item in sources :
2868+ if isinstance (source_item , str ):
2869+ # Try to parse JSON string
2870+ try :
2871+ parsed = json .loads (source_item )
2872+ deserialized_sources .append (parsed )
2873+ except (json .JSONDecodeError , TypeError ):
2874+ # If parsing fails, keep as string or create a simple dict
2875+ deserialized_sources .append ({"type" : "doc" , "content" : source_item })
2876+ elif isinstance (source_item , dict ):
2877+ # Already a dict, keep as is
2878+ deserialized_sources .append (source_item )
2879+ else :
2880+ # Unknown type, create a simple dict
2881+ deserialized_sources .append ({"type" : "doc" , "content" : str (source_item )})
2882+ node ["sources" ] = deserialized_sources
2883+
28382884 # Do not remove user_name; keep all fields
28392885
28402886 return {"id" : node .pop ("id" ), "memory" : node .pop ("memory" , "" ), "metadata" : node }
0 commit comments