|
| 1 | +""" |
| 2 | +Example: Using Neo4j connection pooling to reduce connection overhead. |
| 3 | +
|
| 4 | +This example demonstrates how to use the neo4j-pooled backend to share |
| 5 | +database connections across multiple users/memory instances. |
| 6 | +""" |
| 7 | + |
| 8 | +from memos.configs.mem_cube import GeneralMemCubeConfig |
| 9 | +from memos.graph_dbs.connection_pool import connection_pool |
| 10 | +from memos.mem_cube.general import GeneralMemCube |
| 11 | + |
| 12 | + |
| 13 | +def create_user_cube(user_id: str, openai_api_key: str) -> GeneralMemCube: |
| 14 | + """Create a memory cube for a user using pooled connections.""" |
| 15 | + |
| 16 | + config = GeneralMemCubeConfig( |
| 17 | + cube_id=f"user_{user_id}", |
| 18 | + text_mem={ |
| 19 | + "backend": "tree_text", |
| 20 | + "config": { |
| 21 | + "extractor_llm": { |
| 22 | + "backend": "openai", |
| 23 | + "config": { |
| 24 | + "api_key": openai_api_key, |
| 25 | + "model_name": "gpt-4o-mini", |
| 26 | + }, |
| 27 | + }, |
| 28 | + "dispatcher_llm": { |
| 29 | + "backend": "openai", |
| 30 | + "config": { |
| 31 | + "api_key": openai_api_key, |
| 32 | + "model_name": "gpt-4o-mini", |
| 33 | + }, |
| 34 | + }, |
| 35 | + "graph_db": { |
| 36 | + "backend": "neo4j-pooled", # Use pooled version |
| 37 | + "config": { |
| 38 | + "uri": "bolt://localhost:7687", |
| 39 | + "user": "neo4j", |
| 40 | + "password": "12345678", |
| 41 | + "db_name": "shared_memos", |
| 42 | + "user_name": f"user_{user_id}", |
| 43 | + "use_multi_db": False, |
| 44 | + "auto_create": True, |
| 45 | + "embedding_dimension": 3072, |
| 46 | + }, |
| 47 | + }, |
| 48 | + "embedder": { |
| 49 | + "backend": "sentence_transformer", |
| 50 | + "config": {"model_name_or_path": "sentence-transformers/all-mpnet-base-v2"}, |
| 51 | + }, |
| 52 | + "reorganize": False, |
| 53 | + }, |
| 54 | + }, |
| 55 | + ) |
| 56 | + |
| 57 | + return GeneralMemCube(config) |
| 58 | + |
| 59 | + |
| 60 | +def main(): |
| 61 | + """Demonstrate connection pooling with multiple users.""" |
| 62 | + |
| 63 | + # Replace with your actual OpenAI API key |
| 64 | + openai_api_key = "your-openai-api-key-here" |
| 65 | + |
| 66 | + print("=== Neo4j Connection Pooling Demo ===") |
| 67 | + print(f"Initial connections: {connection_pool.get_active_connections()}") |
| 68 | + |
| 69 | + # Create multiple user cubes |
| 70 | + users = ["alice", "bob", "charlie"] |
| 71 | + cubes = {} |
| 72 | + |
| 73 | + for user_id in users: |
| 74 | + print(f"\nCreating cube for user: {user_id}") |
| 75 | + cubes[user_id] = create_user_cube(user_id, openai_api_key) |
| 76 | + print(f"Active connections: {connection_pool.get_active_connections()}") |
| 77 | + |
| 78 | + # Add some memories for each user |
| 79 | + memories = { |
| 80 | + "alice": "Alice loves hiking in the mountains.", |
| 81 | + "bob": "Bob is a software engineer who enjoys cooking.", |
| 82 | + "charlie": "Charlie plays guitar and loves jazz music.", |
| 83 | + } |
| 84 | + |
| 85 | + print("\n=== Adding memories ===") |
| 86 | + for user_id, memory in memories.items(): |
| 87 | + if cubes[user_id].text_mem: |
| 88 | + cubes[user_id].text_mem.add(memory) |
| 89 | + print(f"Added memory for {user_id}") |
| 90 | + |
| 91 | + # Search memories |
| 92 | + print("\n=== Searching memories ===") |
| 93 | + for user_id in users: |
| 94 | + if cubes[user_id].text_mem: |
| 95 | + results = cubes[user_id].text_mem.search("hobbies", top_k=1) |
| 96 | + if results: |
| 97 | + print(f"{user_id}'s memory: {results[0].memory}") |
| 98 | + |
| 99 | + print(f"\nFinal active connections: {connection_pool.get_active_connections()}") |
| 100 | + print("Note: All users share the same database connection!") |
| 101 | + |
| 102 | + |
| 103 | +if __name__ == "__main__": |
| 104 | + main() |
0 commit comments