11import json
22import os
33
4- from datetime import datetime
4+ from datetime import datetime , timezone
55
66import numpy as np
77
@@ -191,7 +191,9 @@ def example_shared_db(db_name: str = "shared-traval-group"):
191191 "user_name" : os .getenv ("NEBULAR_USER" , "root" ),
192192 "password" : os .getenv ("NEBULAR_PASSWORD" , "xxxxxx" ),
193193 "space" : db_name ,
194+ "auto_create" : True ,
194195 "embedding_dimension" : 3072 ,
196+ "use_multi_db" : False ,
195197 },
196198 )
197199 graph_alice = GraphStoreFactory .from_config (config_alice )
@@ -200,9 +202,171 @@ def example_shared_db(db_name: str = "shared-traval-group"):
200202 print (str (graph_alice .get_node (node ["id" ]))[:1000 ])
201203
202204
205+ def run_user_session (
206+ user_name : str ,
207+ db_name : str ,
208+ topic_text : str ,
209+ concept_texts : list [str ],
210+ fact_texts : list [str ],
211+ ):
212+ print (f"\n === { user_name } starts building their memory graph ===" )
213+
214+ # Manually initialize correct GraphDB class
215+
216+ config = GraphDBConfigFactory (
217+ backend = "nebular" ,
218+ config = {
219+ "hosts" : json .loads (os .getenv ("NEBULAR_HOSTS" , "localhost" )),
220+ "user_name" : os .getenv ("NEBULAR_USER" , "root" ),
221+ "password" : os .getenv ("NEBULAR_PASSWORD" , "xxxxxx" ),
222+ "space" : db_name ,
223+ "auto_create" : True ,
224+ "embedding_dimension" : 3072 ,
225+ "use_multi_db" : False ,
226+ },
227+ )
228+ graph = GraphStoreFactory .from_config (config )
229+
230+ # Start with a clean slate for this user
231+ graph .clear ()
232+
233+ now = datetime .now (timezone .utc ).isoformat ()
234+
235+ # === Step 1: Create a root topic node (e.g., user's research focus) ===
236+ topic = TextualMemoryItem (
237+ memory = topic_text ,
238+ metadata = TreeNodeTextualMemoryMetadata (
239+ memory_type = "LongTermMemory" ,
240+ key = "Research Topic" ,
241+ hierarchy_level = "topic" ,
242+ type = "fact" ,
243+ memory_time = "2024-01-01" ,
244+ status = "activated" ,
245+ visibility = "public" ,
246+ updated_at = now ,
247+ embedding = embed_memory_item (topic_text ),
248+ ),
249+ )
250+ graph .add_node (topic .id , topic .memory , topic .metadata .model_dump (exclude_none = True ))
251+
252+ # === Step 2: Create two concept nodes linked to the topic ===
253+ concept_items = []
254+ for i , text in enumerate (concept_texts ):
255+ concept = TextualMemoryItem (
256+ memory = text ,
257+ metadata = TreeNodeTextualMemoryMetadata (
258+ memory_type = "LongTermMemory" ,
259+ key = f"Concept { i + 1 } " ,
260+ hierarchy_level = "concept" ,
261+ type = "fact" ,
262+ memory_time = "2024-01-01" ,
263+ status = "activated" ,
264+ visibility = "public" ,
265+ updated_at = now ,
266+ embedding = embed_memory_item (text ),
267+ tags = ["concept" ],
268+ confidence = 90 + i ,
269+ ),
270+ )
271+ graph .add_node (concept .id , concept .memory , concept .metadata .model_dump (exclude_none = True ))
272+ graph .add_edge (topic .id , concept .id , type = "PARENT" )
273+ concept_items .append (concept )
274+
275+ # === Step 3: Create supporting facts under each concept ===
276+ for i , text in enumerate (fact_texts ):
277+ fact = TextualMemoryItem (
278+ memory = text ,
279+ metadata = TreeNodeTextualMemoryMetadata (
280+ memory_type = "WorkingMemory" ,
281+ key = f"Fact { i + 1 } " ,
282+ hierarchy_level = "fact" ,
283+ type = "fact" ,
284+ memory_time = "2024-01-01" ,
285+ status = "activated" ,
286+ visibility = "public" ,
287+ updated_at = now ,
288+ embedding = embed_memory_item (text ),
289+ confidence = 85.0 ,
290+ tags = ["fact" ],
291+ ),
292+ )
293+ graph .add_node (fact .id , fact .memory , fact .metadata .model_dump (exclude_none = True ))
294+ graph .add_edge (concept_items [i % len (concept_items )].id , fact .id , type = "PARENT" )
295+
296+ # === Step 4: Retrieve memory using semantic search ===
297+ vector = embed_memory_item ("How is memory retrieved?" )
298+ search_result = graph .search_by_embedding (vector , top_k = 2 )
299+ for r in search_result :
300+ node = graph .get_node (r ["id" ])
301+ print ("🔍 Search result:" , node ["memory" ])
302+
303+ # === Step 5: Tag-based neighborhood discovery ===
304+ neighbors = graph .get_neighbors_by_tag (["concept" ], exclude_ids = [], top_k = 2 )
305+ print ("📎 Tag-related nodes:" , [neighbor ["memory" ] for neighbor in neighbors ])
306+
307+ # === Step 6: Retrieve children (facts) of first concept ===
308+ children = graph .get_children_with_embeddings (concept_items [0 ].id )
309+ print ("📍 Children of concept:" , [child ["memory" ] for child in children ])
310+
311+ # === Step 7: Export a local subgraph and grouped statistics ===
312+ subgraph = graph .get_subgraph (topic .id , depth = 2 )
313+ print ("📌 Subgraph node count:" , len (subgraph ["neighbors" ]))
314+
315+ stats = graph .get_grouped_counts (["memory_type" , "status" ])
316+ print ("📊 Grouped counts:" , stats )
317+
318+ # === Step 8: Demonstrate updates and cleanup ===
319+ graph .update_node (
320+ concept_items [0 ].id , {"confidence" : 99.0 , "created_at" : "2025-07-24T20:11:56.375687" }
321+ )
322+ graph .remove_oldest_memory ("WorkingMemory" , keep_latest = 1 )
323+ graph .delete_edge (topic .id , concept_items [0 ].id , type = "PARENT" )
324+ graph .delete_node (concept_items [1 ].id )
325+
326+ # === Step 9: Export and re-import the entire graph structure ===
327+ exported = graph .export_graph ()
328+ graph .import_graph (exported )
329+ print ("📦 Graph exported and re-imported, total nodes:" , len (exported ["nodes" ]))
330+
331+
332+ def example_complex_shared_db (db_name : str = "shared-traval-group-complex" ):
333+ # User 1: Alice explores structured memory for LLMs
334+ run_user_session (
335+ user_name = "alice" ,
336+ db_name = db_name ,
337+ topic_text = "Alice studies structured memory and long-term memory optimization in LLMs." ,
338+ concept_texts = [
339+ "Short-term memory can be simulated using WorkingMemory blocks." ,
340+ "A structured memory graph improves retrieval precision for agents." ,
341+ ],
342+ fact_texts = [
343+ "Embedding search is used to find semantically similar memory items." ,
344+ "User memories are stored as node-edge structures that support hierarchical reasoning." ,
345+ ],
346+ )
347+
348+ # User 2: Bob focuses on GNN-based reasoning
349+ run_user_session (
350+ user_name = "bob" ,
351+ db_name = db_name ,
352+ topic_text = "Bob investigates how graph neural networks can support knowledge reasoning." ,
353+ concept_texts = [
354+ "GNNs can learn high-order relations among entities." ,
355+ "Attention mechanisms in graphs improve inference precision." ,
356+ ],
357+ fact_texts = [
358+ "GAT outperforms GCN in graph classification tasks." ,
359+ "Multi-hop reasoning helps answer complex queries." ,
360+ ],
361+ )
362+
363+
203364if __name__ == "__main__" :
204365 print ("\n === Example: Multi-DB ===" )
205366 example_multi_db (db_name = "paper" )
206367
207368 print ("\n === Example: Single-DB ===" )
208369 example_shared_db (db_name = "shared_traval_group" )
370+
371+ print ("\n === Example: Single-DB-Complex ===" )
372+ example_complex_shared_db (db_name = "shared-traval-group-complex-new" )
0 commit comments