11from fastapi import APIRouter , BackgroundTasks , Depends , HTTPException
22
3- from redis_memory_server import messages
3+ from redis_memory_server import long_term_memory , messages
44from redis_memory_server .config import settings
55from redis_memory_server .logging import get_logger
66from redis_memory_server .models import (
77 AckResponse ,
8+ CreateLongTermMemoryPayload ,
89 GetSessionsQuery ,
10+ LongTermMemoryResultsResponse ,
911 SearchPayload ,
10- SearchResults ,
12+ SessionListResponse ,
1113 SessionMemory ,
1214 SessionMemoryResponse ,
1315)
14- from redis_memory_server .utils import (
15- get_openai_client ,
16- get_redis_conn ,
17- )
16+ from redis_memory_server .utils import get_redis_conn
1817
1918
2019logger = get_logger (__name__ )
2120
2221router = APIRouter ()
2322
2423
25- @router .get ("/sessions/" , response_model = list [ str ] )
24+ @router .get ("/sessions/" , response_model = SessionListResponse )
2625async def list_sessions (
2726 options : GetSessionsQuery = Depends (),
2827):
@@ -35,19 +34,20 @@ async def list_sessions(
3534 Returns:
3635 List of session IDs
3736 """
38- # TODO: Pydantic should validate this
39- if options .page > 100 :
40- raise HTTPException (status_code = 400 , detail = "Page must not exceed 100" )
41-
4237 redis = get_redis_conn ()
4338
44- return await messages .list_sessions (
39+ total , session_ids = await messages .list_sessions (
4540 redis = redis ,
46- page = options .page ,
47- size = options .size ,
41+ limit = options .limit ,
42+ offset = options .offset ,
4843 namespace = options .namespace ,
4944 )
5045
46+ return SessionListResponse (
47+ sessions = session_ids ,
48+ total = total ,
49+ )
50+
5151
5252@router .get ("/sessions/{session_id}/memory" , response_model = SessionMemoryResponse )
5353async def get_session_memory (
@@ -133,12 +133,36 @@ async def delete_session_memory(
133133 return AckResponse (status = "ok" )
134134
135135
136- @router .post ("/messages/search" , response_model = SearchResults )
137- async def messages_search (payload : SearchPayload ):
136+ @router .post ("/long-term-memory" , response_model = AckResponse )
137+ async def create_long_term_memory (payload : CreateLongTermMemoryPayload ):
138+ """
139+ Create a long-term memory
140+
141+ Args:
142+ payload: Long-term memory payload
143+
144+ Returns:
145+ Acknowledgement response
146+ """
147+ redis = get_redis_conn ()
148+
149+ if not settings .long_term_memory :
150+ raise HTTPException (status_code = 400 , detail = "Long-term memory is disabled" )
151+
152+ await long_term_memory .index_long_term_memories (
153+ redis = redis ,
154+ memories = payload .memories ,
155+ )
156+ return AckResponse (status = "ok" )
157+
158+
159+ @router .post ("/long-term-memory/search" , response_model = LongTermMemoryResultsResponse )
160+ async def search_long_term_memory (payload : SearchPayload ):
138161 """
139- Run a semantic search on messages
162+ Run a semantic search on long-term memory
140163
141- TODO: Infer topics for `text`
164+ TODO: Infer topics, entities for `text` and attempt to use them
165+ as boosts or filters in the search.
142166
143167 Args:
144168 payload: Search payload
@@ -149,13 +173,9 @@ async def messages_search(payload: SearchPayload):
149173 redis = get_redis_conn ()
150174
151175 if not settings .long_term_memory :
152- raise HTTPException (status_code = 400 , detail = "Long term memory is disabled" )
176+ raise HTTPException (status_code = 400 , detail = "Long- term memory is disabled" )
153177
154- # For embeddings, we always use OpenAI models since Anthropic doesn't support embeddings
155- client = await get_openai_client ()
156-
157- return await messages .search_messages (
158- client = client ,
159- redis_conn = redis ,
178+ return await long_term_memory .search_long_term_memories (
179+ redis = redis ,
160180 ** payload .model_dump (exclude_none = True ),
161181 )
0 commit comments