Skip to content

Commit 603ae12

Browse files
authored
fix: proactive examples (#273)
1 parent 710f14d commit 603ae12

5 files changed

Lines changed: 62 additions & 54 deletions

File tree

examples/proactive/memory/config.py

Lines changed: 20 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
"prompt": "# Task Objective\nYou will be given a conversation between a user and an coding agent. Your goal is to extract detailed records for what are planed to do, and what have been done.",
1010
},
1111
"workflow": {
12-
"ordinal": 10,
12+
"ordinal": 20,
1313
"prompt": "# Workflow\nRead through the conversation and extract records. You should expecially focus on:\n- What the user ask the agent to do\n- What plan does the agent suggest\n- What the agent has done",
1414
},
1515
"rules": {
@@ -37,14 +37,30 @@
3737
"prompt": "# Workflow\nRead through the existing markdown file and the new records. Then update the markdown file to reflect:\n- What existing tasks are completed\n- What new tasks are added\n- What tasks are still in progress",
3838
},
3939
"rules": {
40-
"ordinal": -1,
41-
"prompt": None,
40+
"ordinal": 30,
41+
"prompt": "# Rules\nFor each action-like record, explictly mark it as [Done] or [Todo].",
4242
},
4343
"examples": {
4444
"ordinal": 50,
45-
"prompt": "# Example\n## Output\n```markdown\n# Task\n## Task Objective\nThe user ask the agent to generate a code example for fastapi\n## Workflow\nThe agent suggest to use the code example from the document\nThe agent ask the user to specify the response type\n```",
45+
"prompt": "# Example\n## Output\n```markdown\n# Task\n## Task Objective\nThe user ask the agent to generate a code example for fastapi\n## Breakdown\n- [Done] The agent suggest to use the code example from the document\n- [Todo] The agent ask the user to specify the response type\n```",
4646
},
4747
},
4848
}
4949
],
5050
}
51+
52+
retrieve_config = {
53+
"method": "rag",
54+
"route_intention": False,
55+
"sufficiency_check": False,
56+
"category": {
57+
"enabled": False,
58+
},
59+
"item": {
60+
"enabled": True,
61+
"top_k": 10,
62+
},
63+
"resource": {
64+
"enabled": False,
65+
},
66+
}
Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
import os
2+
3+
from memu.app import MemoryService
4+
5+
from ..config import memorize_config, retrieve_config
6+
7+
USER_ID = "claude_user"
8+
SHARED_MEMORY_SERVICE = None
9+
10+
11+
def get_memory_service() -> MemoryService:
12+
global SHARED_MEMORY_SERVICE
13+
if SHARED_MEMORY_SERVICE is not None:
14+
return SHARED_MEMORY_SERVICE
15+
16+
api_key = os.getenv("OPENAI_API_KEY")
17+
if not api_key:
18+
msg = "Please set OPENAI_API_KEY environment variable"
19+
raise ValueError(msg)
20+
21+
SHARED_MEMORY_SERVICE = MemoryService(
22+
llm_profiles={
23+
"default": {
24+
"api_key": api_key,
25+
"chat_model": "gpt-4o-mini",
26+
},
27+
},
28+
memorize_config=memorize_config,
29+
retrieve_config=retrieve_config,
30+
)
31+
return SHARED_MEMORY_SERVICE
Lines changed: 3 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,11 @@
11
import json
2-
import os
32
from collections.abc import Awaitable
43
from pathlib import Path
54
from typing import Any
65

76
import pendulum
87

9-
from memu.app import MemoryService
10-
11-
from ..config import memorize_config
8+
from .common import get_memory_service
129

1310
USER_ID = "claude_user"
1411

@@ -28,26 +25,14 @@ def dump_conversation_resource(
2825
}
2926
time_string = pendulum.now().format("YYYYMMDD_HHmmss")
3027
resource_url = Path(__file__).parent / "data" / f"conv_{time_string}.json"
28+
resource_url.parent.mkdir(parents=True, exist_ok=True)
3129
with open(resource_url, "w") as f:
3230
json.dump(resource_data, f, indent=4, ensure_ascii=False)
3331
return resource_url.as_posix()
3432

3533

3634
def memorize(conversation_messages: list[dict[str, Any]]) -> Awaitable[dict[str, Any]]:
37-
api_key = os.getenv("OPENAI_API_KEY")
38-
if not api_key:
39-
msg = "Please set OPENAI_API_KEY environment variable"
40-
raise ValueError(msg)
41-
42-
memory_service = MemoryService(
43-
llm_profiles={
44-
"default": {
45-
"api_key": api_key,
46-
"chat_model": "gpt-4o-mini",
47-
},
48-
},
49-
memorize_config=memorize_config,
50-
)
35+
memory_service = get_memory_service()
5136

5237
resource_url = dump_conversation_resource(conversation_messages)
5338
return memory_service.memorize(resource_url=resource_url, modality="conversation", user={"user_id": USER_ID})

examples/proactive/memory/local/tools.py

Lines changed: 3 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,8 @@
1-
import os
21
from typing import Any
32

43
from claude_agent_sdk import create_sdk_mcp_server, tool
54

6-
from memu.app import MemoryService
5+
from .common import get_memory_service
76

87
USER_ID = "claude_user"
98

@@ -13,42 +12,15 @@ async def get_memory(args: dict[str, Any]) -> dict[str, Any]:
1312
"""Retrieve memory from the memory API based on the provided query."""
1413
query = {"role": "user", "content": args["query"]}
1514

16-
api_key = os.getenv("OPENAI_API_KEY")
17-
if not api_key:
18-
msg = "Please set OPENAI_API_KEY environment variable"
19-
raise ValueError(msg)
20-
21-
memory_service = MemoryService(
22-
llm_profiles={
23-
"default": {
24-
"api_key": api_key,
25-
"chat_model": "gpt-4o-mini",
26-
},
27-
},
28-
retrieve_config={
29-
"method": "rag",
30-
"route_intention": False,
31-
"sufficiency_check": False,
32-
"category": {
33-
"enabled": False,
34-
},
35-
"item": {
36-
"enabled": True,
37-
"top_k": 10,
38-
},
39-
"resource": {
40-
"enabled": False,
41-
},
42-
},
43-
)
15+
memory_service = get_memory_service()
4416

4517
result = await memory_service.retrieve(query, where={"user_id": USER_ID})
4618

4719
return {"content": [{"type": "text", "text": str(result)}]}
4820

4921

5022
async def _get_todos() -> str:
51-
memory_service = MemoryService()
23+
memory_service = get_memory_service()
5224

5325
result = await memory_service.list_memory_categories(where={"user_id": USER_ID})
5426

examples/proactive/proactive.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,11 @@ async def get_next_input(iteration: int) -> tuple[str | None, bool]:
4747
return await get_user_input()
4848

4949
todos = await _get_todos()
50-
if todos:
50+
51+
print(f">>> Todos:\n{todos}\n")
52+
print("-" * 40)
53+
54+
if todos and "[todo]" in todos.lower():
5155
return f"Please continue with the following todos:\n{todos}", False
5256

5357
return await get_user_input()

0 commit comments

Comments
 (0)