@@ -127,8 +127,17 @@ def generate_memory_hash(memory: MemoryRecord) -> str:
127127 Returns:
128128 A stable hash string
129129 """
130- # Create a deterministic string representation of the key fields
131- return hashlib .sha256 (memory .model_dump_json ().encode ()).hexdigest ()
130+ # Create a deterministic string representation of the key content fields only
131+ # This ensures merged memories with same content have the same hash
132+ content_fields = {
133+ "text" : memory .text ,
134+ "user_id" : memory .user_id ,
135+ "session_id" : memory .session_id ,
136+ "namespace" : memory .namespace ,
137+ "memory_type" : memory .memory_type ,
138+ }
139+ content_json = json .dumps (content_fields , sort_keys = True )
140+ return hashlib .sha256 (content_json .encode ()).hexdigest ()
132141
133142
134143async def merge_memories_with_llm (
@@ -382,14 +391,23 @@ async def compact_long_term_memories(
382391 # and delete the rest
383392 memories_to_delete = []
384393
385- for j in range (1 , len (search_results ), 2 ):
394+ # Each memory result has: key + 6 field-value pairs = 13 elements
395+ # Keys are at positions: 1, 14, 27, ... (1 + n * 13)
396+ elements_per_memory = 1 + 6 * 2 # key + 6 field-value pairs
397+ for n in range (num_duplicates ):
398+ key_index = 1 + n * elements_per_memory
386399 # Skip the last item (newest) which we'll keep
387- if (
388- j < (int (num_duplicates ) - 1 ) * 2 + 1
389- and search_results [j ] is not None
400+ if n < num_duplicates - 1 and key_index < len (
401+ search_results
390402 ):
391- key = search_results [j ].decode ()
392- memories_to_delete .append (key )
403+ key = search_results [key_index ]
404+ if key is not None :
405+ key_str = (
406+ key .decode ()
407+ if isinstance (key , bytes )
408+ else key
409+ )
410+ memories_to_delete .append (key_str )
393411
394412 # Delete older duplicates
395413 if memories_to_delete :
@@ -501,7 +519,7 @@ async def compact_long_term_memories(
501519 discrete_memory_extracted = memory_result .discrete_memory_extracted , # type: ignore
502520 )
503521
504- # Add this memory to processed list
522+ # Add this memory to processed list BEFORE processing to prevent cycles
505523 processed_ids .add (memory_id )
506524
507525 # Check for semantic duplicates
@@ -530,6 +548,8 @@ async def compact_long_term_memories(
530548 redis_client = redis_client ,
531549 deduplicate = False , # Already deduplicated
532550 )
551+ # Mark the merged memory as processed to prevent cycles
552+ processed_ids .add (merged_memory .id )
533553 logger .info (
534554 f"Completed semantic deduplication. Merged { semantic_memories_merged } memories."
535555 )
0 commit comments