-
Notifications
You must be signed in to change notification settings - Fork 45
fix: forward hybrid/keyword search params through all search paths and update docs #251
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from 5 commits
07a9ce1
9178c57
67ae703
e47412f
4ac1b79
6d175bf
eb2e92e
436b442
49008ec
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -1386,13 +1386,13 @@ async def handle_tool_calls(client, tool_calls): | |
| "type": "function", | ||
| "function": { | ||
| "name": "search_memory", | ||
| "description": "Search long-term memory for relevant information using semantic vector search. Use this when you need to find previously stored information about the user, such as their preferences, past conversations, or important facts. Examples: 'Find information about user food preferences', 'What did they say about their job?', 'Look for travel preferences'. This searches only long-term memory, not current working memory - use get_working_memory for current session info. IMPORTANT: The result includes 'memories' with an 'id' field; use these IDs when calling edit_long_term_memory or delete_long_term_memories.", | ||
| "description": "Search long-term memory for relevant information using semantic, keyword, or hybrid search. Use this when you need to find previously stored information about the user, such as their preferences, past conversations, or important facts. Examples: 'Find information about user food preferences', 'What did they say about their job?', 'Look for travel preferences'. This searches only long-term memory, not current working memory - use get_or_create_working_memory for current session info. IMPORTANT: The result includes 'memories' with an 'id' field; use these IDs when calling edit_long_term_memory or delete_long_term_memories.", | ||
| "parameters": { | ||
nkanu17 marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| "type": "object", | ||
| "properties": { | ||
nkanu17 marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| "query": { | ||
| "type": "string", | ||
| "description": "The query for vector search describing what information you're looking for", | ||
| "description": "The search query describing what information you're looking for", | ||
| }, | ||
| "search_mode": { | ||
| "type": "string", | ||
|
|
@@ -1875,7 +1875,7 @@ def get_update_memory_data_tool_schema(cls) -> ToolSchema: | |
| "type": "function", | ||
| "function": { | ||
| "name": "update_working_memory_data", | ||
| "description": "Store or update structured session data (JSON objects) in working memory. Use this for complex session-specific information that needs to be accessed and modified during the conversation. Examples: Travel itinerary {'destination': 'Paris', 'dates': ['2024-03-15', '2024-03-20']}, project details {'name': 'Website Redesign', 'deadline': '2024-04-01', 'status': 'in_progress'}. Different from add_memory_to_working_memory which stores simple text facts.", | ||
| "description": "Store or update structured session data (JSON objects) in working memory. Use this for complex session-specific information that needs to be accessed and modified during the conversation. Examples: Travel itinerary {'destination': 'Paris', 'dates': ['2024-03-15', '2024-03-20']}, project details {'name': 'Website Redesign', 'deadline': '2024-04-01', 'status': 'in_progress'}. Different from lazily_create_long_term_memory which stores simple text facts for later promotion to long-term storage.", | ||
| "parameters": { | ||
| "type": "object", | ||
| "properties": { | ||
|
|
@@ -2692,9 +2692,15 @@ async def _resolve_search_memory(self, args: dict[str, Any]) -> dict[str, Any]: | |
| max_results = args.get("max_results", 5) | ||
| min_relevance = args.get("min_relevance") | ||
| user_id = args.get("user_id") | ||
| search_mode = args.get("search_mode", "semantic") | ||
| hybrid_alpha = args.get("hybrid_alpha") | ||
| text_scorer = args.get("text_scorer") | ||
nkanu17 marked this conversation as resolved.
Show resolved
Hide resolved
nkanu17 marked this conversation as resolved.
Show resolved
Hide resolved
|
||
|
|
||
| return await self.search_memory_tool( | ||
| query=query, | ||
| search_mode=search_mode, | ||
| hybrid_alpha=hybrid_alpha, | ||
| text_scorer=text_scorer, | ||
nkanu17 marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| topics=topics, | ||
nkanu17 marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| entities=entities, | ||
| memory_type=memory_type, | ||
|
|
@@ -2706,7 +2712,7 @@ async def _resolve_search_memory(self, args: dict[str, Any]) -> dict[str, Any]: | |
| async def _resolve_get_working_memory( | ||
| self, session_id: str, namespace: str | None, user_id: str | None = None | ||
| ) -> dict[str, Any]: | ||
| """Resolve get_working_memory function call.""" | ||
| """Resolve get_working_memory (deprecated) function call.""" | ||
| return await self.get_working_memory_tool( | ||
| session_id=session_id, | ||
| namespace=namespace, | ||
|
|
@@ -2731,7 +2737,7 @@ async def _resolve_add_memory( | |
| namespace: str | None, | ||
| user_id: str | None = None, | ||
| ) -> dict[str, Any]: | ||
| """Resolve add_memory_to_working_memory function call.""" | ||
| """Resolve lazily_create_long_term_memory (formerly add_memory_to_working_memory) function call.""" | ||
| text = args.get("text", "") | ||
| if not text: | ||
| raise ValueError("Text parameter is required for adding memory") | ||
|
|
@@ -2790,11 +2796,11 @@ async def _resolve_get_long_term_memory( | |
| async def _resolve_create_long_term_memory( | ||
| self, args: dict[str, Any], namespace: str | None, user_id: str | None = None | ||
| ) -> dict[str, Any]: | ||
| """Resolve create_long_term_memory function call.""" | ||
| """Resolve eagerly_create_long_term_memory (and deprecated create_long_term_memory alias) function call.""" | ||
| memories_data = args.get("memories") | ||
| if not memories_data: | ||
| raise ValueError( | ||
| "memories parameter is required for create_long_term_memory" | ||
| "memories parameter is required for eagerly_create_long_term_memory" | ||
| ) | ||
|
||
|
|
||
| # Convert dict memories to ClientMemoryRecord objects | ||
|
|
@@ -2907,7 +2913,7 @@ async def resolve_function_calls( | |
| # Handle multiple function calls | ||
| calls = [ | ||
| {"name": "search_memory", "arguments": {"query": "user preferences"}}, | ||
| {"name": "get_working_memory", "arguments": {}}, | ||
| {"name": "get_or_create_working_memory", "arguments": {}}, | ||
| ] | ||
|
|
||
| results = await client.resolve_function_calls(calls, "session123") | ||
|
|
@@ -3392,6 +3398,9 @@ async def hydrate_memory_prompt( | |
| user_id: dict[str, Any] | None = None, | ||
| distance_threshold: float | None = None, | ||
| memory_type: dict[str, Any] | None = None, | ||
| search_mode: SearchModeEnum | str = SearchModeEnum.SEMANTIC, | ||
| hybrid_alpha: float | None = None, | ||
| text_scorer: str | None = None, | ||
| limit: int = 10, | ||
| offset: int = 0, | ||
| optimize_query: bool = False, | ||
|
|
@@ -3403,7 +3412,7 @@ async def hydrate_memory_prompt( | |
| long-term memory search with the specified filters. | ||
|
|
||
| Args: | ||
| query: The query for vector search to find relevant context for | ||
| query: The search query to find relevant context for | ||
| session_id: Optional session ID filter (as dict) | ||
| namespace: Optional namespace filter (as dict) | ||
| topics: Optional topics filter (as dict) | ||
|
|
@@ -3413,6 +3422,9 @@ async def hydrate_memory_prompt( | |
| user_id: Optional user ID filter (as dict) | ||
| distance_threshold: Optional distance threshold | ||
| memory_type: Optional memory type filter (as dict) | ||
| search_mode: Search strategy to use ("semantic", "keyword", or "hybrid") | ||
| hybrid_alpha: Optional weight for vector similarity in hybrid search (0.0-1.0) | ||
| text_scorer: Optional Redis full-text scoring algorithm for keyword and hybrid search | ||
| limit: Maximum number of long-term memories to include | ||
| offset: Offset for pagination (default: 0) | ||
| optimize_query: Whether to optimize the query for vector search using a fast model (default: True) | ||
nkanu17 marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
|
|
@@ -3443,6 +3455,16 @@ async def hydrate_memory_prompt( | |
| long_term_search["distance_threshold"] = distance_threshold | ||
| if memory_type is not None: | ||
| long_term_search["memory_type"] = memory_type | ||
| normalized_search_mode = ( | ||
| search_mode.value | ||
| if isinstance(search_mode, SearchModeEnum) | ||
| else str(search_mode) | ||
| ) | ||
| long_term_search["search_mode"] = normalized_search_mode | ||
| if hybrid_alpha is not None: | ||
| long_term_search["hybrid_alpha"] = hybrid_alpha | ||
| if text_scorer is not None: | ||
| long_term_search["text_scorer"] = text_scorer | ||
|
|
||
| return await self.memory_prompt( | ||
| query=query, | ||
|
|
||
Uh oh!
There was an error while loading. Please reload this page.