77from abc import ABC , abstractmethod
88from collections .abc import Callable
99from datetime import UTC , datetime
10+ from functools import reduce
1011from typing import Any , TypeVar
1112
1213from langchain_core .documents import Document
1314from langchain_core .embeddings import Embeddings
1415from langchain_core .vectorstores import VectorStore
1516from langchain_redis .vectorstores import RedisVectorStore
17+ from redisvl .query import RangeQuery , VectorQuery
1618
1719from agent_memory_server .filters import (
1820 CreatedAt ,
@@ -415,6 +417,7 @@ def generate_memory_hash(self, memory: MemoryRecord) -> str:
415417 A stable hash string
416418 """
417419 # Use the same hash logic as long_term_memory.py for consistency
420+ # Lazy import to avoid circular dependency
418421 from agent_memory_server .long_term_memory import generate_memory_hash
419422
420423 return generate_memory_hash (memory )
@@ -435,11 +438,10 @@ def _apply_client_side_recency_reranking(
435438 return memory_results
436439
437440 try :
438- from datetime import UTC as _UTC , datetime as _dt
439-
441+ # Lazy import to avoid circular dependency
440442 from agent_memory_server .long_term_memory import rerank_with_recency
441443
442- now = _dt .now (_UTC )
444+ now = datetime .now (UTC )
443445 params = {
444446 "semantic_weight" : float (recency_params .get ("semantic_weight" , 0.8 ))
445447 if recency_params
@@ -686,8 +688,6 @@ async def count_memories(
686688 """Count memories in the vector store using LangChain."""
687689 try :
688690 # Convert basic filters to our filter objects, then to backend format
689- from agent_memory_server .filters import Namespace , SessionId , UserId
690-
691691 namespace_filter = Namespace (eq = namespace ) if namespace else None
692692 user_id_filter = UserId (eq = user_id ) if user_id else None
693693 session_id_filter = SessionId (eq = session_id ) if session_id else None
@@ -891,12 +891,6 @@ async def _search_with_redis_aggregation(
891891 Raises:
892892 Exception: If Redis aggregation fails (caller should handle fallback)
893893 """
894- from datetime import UTC as _UTC , datetime as _dt
895-
896- from langchain_core .documents import Document
897- from redisvl .query import RangeQuery , VectorQuery
898-
899- from agent_memory_server .utils .redis_query import RecencyAggregationQuery
900894
901895 index = self ._get_vectorstore_index ()
902896 if index is None :
@@ -923,7 +917,10 @@ async def _search_with_redis_aggregation(
923917 )
924918
925919 # Aggregate with APPLY/SORTBY boosted score via helper
926- now_ts = int (_dt .now (_UTC ).timestamp ())
920+ # Lazy import to avoid circular dependency
921+ from agent_memory_server .utils .redis_query import RecencyAggregationQuery
922+
923+ now_ts = int (datetime .now (UTC ).timestamp ())
927924 agg = (
928925 RecencyAggregationQuery .from_vector_query (
929926 knn , filter_expression = redis_filter
@@ -1035,8 +1032,6 @@ async def search_memories(
10351032 if len (filters ) == 1 :
10361033 redis_filter = filters [0 ]
10371034 else :
1038- from functools import reduce
1039-
10401035 redis_filter = reduce (lambda x , y : x & y , filters )
10411036
10421037 # If server-side recency is requested, attempt RedisVL query first (DB-level path)
@@ -1179,18 +1174,12 @@ async def count_memories(
11791174 filters = []
11801175
11811176 if namespace :
1182- from agent_memory_server .filters import Namespace
1183-
11841177 namespace_filter = Namespace (eq = namespace ).to_filter ()
11851178 filters .append (namespace_filter )
11861179 if user_id :
1187- from agent_memory_server .filters import UserId
1188-
11891180 user_filter = UserId (eq = user_id ).to_filter ()
11901181 filters .append (user_filter )
11911182 if session_id :
1192- from agent_memory_server .filters import SessionId
1193-
11941183 session_filter = SessionId (eq = session_id ).to_filter ()
11951184 filters .append (session_filter )
11961185
@@ -1200,8 +1189,6 @@ async def count_memories(
12001189 if len (filters ) == 1 :
12011190 redis_filter = filters [0 ]
12021191 else :
1203- from functools import reduce
1204-
12051192 redis_filter = reduce (lambda x , y : x & y , filters )
12061193
12071194 # Use the same search method as search_memories but for counting
0 commit comments