Skip to content

Commit 6f66aef

Browse files
authored
Dev zdy 1205 (#623)
* update pool * fix _convert_graph_edges none * fix get_relevant_subgraph none * add log
1 parent a52a9e8 commit 6f66aef

File tree

2 files changed

+17
-29
lines changed

2 files changed

+17
-29
lines changed

src/memos/graph_dbs/polardb.py

Lines changed: 13 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -151,7 +151,7 @@ def __init__(self, config: PolarDBGraphDBConfig):
151151
# Create connection pool
152152
self.connection_pool = psycopg2.pool.ThreadedConnectionPool(
153153
minconn=5,
154-
maxconn=2000,
154+
maxconn=100,
155155
host=host,
156156
port=port,
157157
user=user,
@@ -1338,6 +1338,7 @@ def get_subgraph(
13381338
"edges": [...]
13391339
}
13401340
"""
1341+
logger.info(f"[get_subgraph] center_id: {center_id}")
13411342
if not 1 <= depth <= 5:
13421343
raise ValueError("depth must be 1-5")
13431344

@@ -1375,6 +1376,7 @@ def get_subgraph(
13751376
$$ ) as (centers agtype, neighbors agtype, rels agtype);
13761377
"""
13771378
conn = self._get_connection()
1379+
logger.info(f"[get_subgraph] Query: {query}")
13781380
try:
13791381
with conn.cursor() as cursor:
13801382
cursor.execute(query)
@@ -1746,6 +1748,7 @@ def search_by_embedding(
17461748

17471749
# Build filter conditions using common method
17481750
filter_conditions = self._build_filter_conditions_sql(filter)
1751+
logger.info(f"[search_by_embedding] filter_conditions: {filter_conditions}")
17491752
where_clauses.extend(filter_conditions)
17501753

17511754
where_clause = f"WHERE {' AND '.join(where_clauses)}" if where_clauses else ""
@@ -1918,7 +1921,7 @@ def get_by_metadata(
19181921
knowledgebase_ids=knowledgebase_ids,
19191922
default_user_name=self._get_config_value("user_name"),
19201923
)
1921-
print(f"[111get_by_metadata] user_name_conditions: {user_name_conditions}")
1924+
logger.info(f"[get_by_metadata] user_name_conditions: {user_name_conditions}")
19221925

19231926
# Add user_name WHERE clause
19241927
if user_name_conditions:
@@ -1929,6 +1932,7 @@ def get_by_metadata(
19291932

19301933
# Build filter conditions using common method
19311934
filter_where_clause = self._build_filter_conditions_cypher(filter)
1935+
logger.info(f"[get_by_metadata] filter_where_clause: {filter_where_clause}")
19321936

19331937
where_str = " AND ".join(where_conditions) + filter_where_clause
19341938

@@ -2393,6 +2397,7 @@ def get_all_memory_items(
23932397

23942398
# Build filter conditions using common method
23952399
filter_where_clause = self._build_filter_conditions_cypher(filter)
2400+
logger.info(f"[get_all_memory_items] filter_where_clause: {filter_where_clause}")
23962401

23972402
# Use cypher query to retrieve memory items
23982403
if include_embedding:
@@ -2426,6 +2431,7 @@ def get_all_memory_items(
24262431
nodes = []
24272432
node_ids = set()
24282433
conn = self._get_connection()
2434+
logger.info(f"[get_all_memory_items] cypher_query: {cypher_query}")
24292435
try:
24302436
with conn.cursor() as cursor:
24312437
cursor.execute(cypher_query)
@@ -3456,7 +3462,11 @@ def _convert_graph_edges(self, core_node: dict) -> dict:
34563462
id_map = {}
34573463
core_node = data.get("core_node", {})
34583464
if not core_node:
3459-
return core_node
3465+
return {
3466+
"core_node": None,
3467+
"neighbors": data.get("neighbors", []),
3468+
"edges": data.get("edges", []),
3469+
}
34603470
core_meta = core_node.get("metadata", {})
34613471
if "graph_id" in core_meta and "id" in core_node:
34623472
id_map[core_meta["graph_id"]] = core_node["id"]
@@ -3507,7 +3517,6 @@ def _build_user_name_and_kb_ids_conditions_cypher(
35073517
"""
35083518
user_name_conditions = []
35093519
effective_user_name = user_name if user_name else default_user_name
3510-
print(f"[delete_node_by_prams] effective_user_name: {effective_user_name}")
35113520

35123521
if effective_user_name:
35133522
escaped_user_name = effective_user_name.replace("'", "''")

src/memos/memories/textual/tree.py

Lines changed: 4 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -262,15 +262,16 @@ def get_relevant_subgraph(
262262
)
263263

264264
if subgraph is None or not subgraph["core_node"]:
265-
logger.info(f"Skipping node {core_id} (inactive or not found).")
266-
continue
265+
node = self.graph_store.get_node(core_id, user_name=user_name)
266+
subgraph["neighbors"] = [node]
267267

268268
core_node = subgraph["core_node"]
269269
neighbors = subgraph["neighbors"]
270270
edges = subgraph["edges"]
271271

272272
# Collect nodes
273-
all_nodes[core_node["id"]] = core_node
273+
if core_node:
274+
all_nodes[core_node["id"]] = core_node
274275
for n in neighbors:
275276
all_nodes[n["id"]] = n
276277

@@ -339,28 +340,6 @@ def delete_all(self) -> None:
339340
logger.error(f"An error occurred while deleting all memories: {e}")
340341
raise
341342

342-
def delete_by_filter(
343-
self,
344-
writable_cube_ids: list[str],
345-
memory_ids: list[str] | None = None,
346-
file_ids: list[str] | None = None,
347-
filter: dict | None = None,
348-
) -> int:
349-
"""Delete memories by filter.
350-
Returns:
351-
int: Number of nodes deleted.
352-
"""
353-
try:
354-
return self.graph_store.delete_node_by_prams(
355-
writable_cube_ids=writable_cube_ids,
356-
memory_ids=memory_ids,
357-
file_ids=file_ids,
358-
filter=filter,
359-
)
360-
except Exception as e:
361-
logger.error(f"An error occurred while deleting memories by filter: {e}")
362-
raise
363-
364343
def load(self, dir: str) -> None:
365344
try:
366345
memory_file = os.path.join(dir, self.config.memory_filename)

0 commit comments

Comments
 (0)