Skip to content

Commit c5631cc

Browse files
Feat(Log): Add comprehensive diagnostic logs for /product/add flow and apply ruff formatting
Introduces detailed INFO level diagnostic logs across the entire call chain for the /product/add API endpoint. These logs include relevant context, such as full request bodies, message items before scheduler submission, and messages before RabbitMQ publication, to aid in debugging deployment discrepancies and tracing data flow, especially concerning task_id propagation. Also applies automatic code formatting using ruff format to all modified files. Logs added/enhanced in: - src/memos/api/routers/product_router.py - src/memos/api/handlers/add_handler.py - src/memos/multi_mem_cube/single_cube.py - src/memos/mem_os/core.py - src/memos/mem_scheduler/general_scheduler.py - src/memos/mem_scheduler/base_scheduler.py - src/memos/mem_scheduler/webservice_modules/rabbitmq_service.py
1 parent 82a95c4 commit c5631cc

File tree

6 files changed

+31
-10
lines changed

6 files changed

+31
-10
lines changed

src/memos/api/handlers/add_handler.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,9 @@ def handle_add_memories(self, add_req: APIADDRequest) -> MemoryResponse:
4545
Returns:
4646
MemoryResponse with added memory information
4747
"""
48-
self.logger.info(f"[DIAGNOSTIC] server_router -> add_handler.handle_add_memories called. Full request: {add_req.model_dump_json(indent=2)}")
48+
self.logger.info(
49+
f"[DIAGNOSTIC] server_router -> add_handler.handle_add_memories called. Full request: {add_req.model_dump_json(indent=2)}"
50+
)
4951

5052
if add_req.info:
5153
exclude_fields = list_all_fields()

src/memos/mem_os/core.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -788,7 +788,9 @@ def process_textual_memory():
788788
timestamp=datetime.utcnow(),
789789
task_id=task_id,
790790
)
791-
logger.info(f"[DIAGNOSTIC] core.add: Submitting message to scheduler: {message_item.model_dump_json(indent=2)}")
791+
logger.info(
792+
f"[DIAGNOSTIC] core.add: Submitting message to scheduler: {message_item.model_dump_json(indent=2)}"
793+
)
792794
self.mem_scheduler.memos_message_queue.submit_messages(
793795
messages=[message_item]
794796
)

src/memos/mem_scheduler/base_scheduler.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -596,7 +596,9 @@ def _submit_web_logs(
596596
"""
597597
messages_list = [messages] if isinstance(messages, ScheduleLogForWebItem) else messages
598598
for message in messages_list:
599-
logger.info(f"[DIAGNOSTIC] base_scheduler._submit_web_logs called. Message to publish: {message.model_dump_json(indent=2)}")
599+
logger.info(
600+
f"[DIAGNOSTIC] base_scheduler._submit_web_logs called. Message to publish: {message.model_dump_json(indent=2)}"
601+
)
600602
if self.rabbitmq_config is None:
601603
return
602604

src/memos/mem_scheduler/general_scheduler.py

Lines changed: 15 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -380,7 +380,9 @@ def _add_message_consumer(self, messages: list[ScheduleMessageItem]) -> None:
380380
memcube_name=self._map_memcube_name(msg.mem_cube_id),
381381
)
382382
# 3. 后置赋值 log_content
383-
event.log_content = f"Knowledge Base Memory Update: {len(kb_log_content)} changes."
383+
event.log_content = (
384+
f"Knowledge Base Memory Update: {len(kb_log_content)} changes."
385+
)
384386
event.task_id = msg.task_id
385387
self._submit_web_logs([event])
386388
else:
@@ -475,7 +477,9 @@ def _add_message_consumer(self, messages: list[ScheduleMessageItem]) -> None:
475477
logger.error(f"Error: {e}", exc_info=True)
476478

477479
def _mem_read_message_consumer(self, messages: list[ScheduleMessageItem]) -> None:
478-
logger.info(f"[DIAGNOSTIC] general_scheduler._mem_read_message_consumer called. Received messages: {[msg.model_dump_json(indent=2) for msg in messages]}")
480+
logger.info(
481+
f"[DIAGNOSTIC] general_scheduler._mem_read_message_consumer called. Received messages: {[msg.model_dump_json(indent=2) for msg in messages]}"
482+
)
479483
logger.info(f"Messages {messages} assigned to {MEM_READ_LABEL} handler.")
480484

481485
def process_message(message: ScheduleMessageItem):
@@ -540,7 +544,9 @@ def _process_memories_with_reader(
540544
task_id: str | None = None,
541545
info: dict | None = None,
542546
) -> None:
543-
logger.info(f"[DIAGNOSTIC] general_scheduler._process_memories_with_reader called. mem_ids: {mem_ids}, user_id: {user_id}, mem_cube_id: {mem_cube_id}, task_id: {task_id}")
547+
logger.info(
548+
f"[DIAGNOSTIC] general_scheduler._process_memories_with_reader called. mem_ids: {mem_ids}, user_id: {user_id}, mem_cube_id: {mem_cube_id}, task_id: {task_id}"
549+
)
544550
"""
545551
Process memories using mem_reader for enhanced memory processing.
546552
@@ -638,7 +644,9 @@ def _process_memories_with_reader(
638644
}
639645
)
640646
if kb_log_content:
641-
logger.info(f"[DIAGNOSTIC] general_scheduler._process_memories_with_reader: Creating event log for KB update. Label: knowledgeBaseUpdate, user_id: {user_id}, mem_cube_id: {mem_cube_id}, task_id: {task_id}. KB content: {json.dumps(kb_log_content, indent=2)}")
647+
logger.info(
648+
f"[DIAGNOSTIC] general_scheduler._process_memories_with_reader: Creating event log for KB update. Label: knowledgeBaseUpdate, user_id: {user_id}, mem_cube_id: {mem_cube_id}, task_id: {task_id}. KB content: {json.dumps(kb_log_content, indent=2)}"
649+
)
642650
event = self.create_event_log(
643651
label="knowledgeBaseUpdate",
644652
from_memory_type=USER_INPUT_TYPE,
@@ -651,7 +659,9 @@ def _process_memories_with_reader(
651659
memory_len=len(kb_log_content),
652660
memcube_name=self._map_memcube_name(mem_cube_id),
653661
)
654-
event.log_content = f"Knowledge Base Memory Update: {len(kb_log_content)} changes."
662+
event.log_content = (
663+
f"Knowledge Base Memory Update: {len(kb_log_content)} changes."
664+
)
655665
event.task_id = task_id
656666
self._submit_web_logs([event])
657667
else:

src/memos/mem_scheduler/webservice_modules/rabbitmq_service.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
import time
66

77
from pathlib import Path
8+
89
from memos.configs.mem_scheduler import AuthConfig, RabbitMQConfig
910
from memos.context.context import ContextThread
1011
from memos.dependency import require_python_package
@@ -288,7 +289,9 @@ def rabbitmq_publish_message(self, message: dict):
288289
logger.error("Cannot publish - no active connection")
289290
return False
290291

291-
logger.info(f"[DIAGNOSTIC] rabbitmq_service.rabbitmq_publish_message: Attempting to publish message. Exchange: {self.rabbitmq_exchange_name}, Routing Key: {self.rabbit_queue_name}, Message Content: {json.dumps(message, indent=2)}")
292+
logger.info(
293+
f"[DIAGNOSTIC] rabbitmq_service.rabbitmq_publish_message: Attempting to publish message. Exchange: {self.rabbitmq_exchange_name}, Routing Key: {self.rabbit_queue_name}, Message Content: {json.dumps(message, indent=2)}"
294+
)
292295
try:
293296
self.rabbitmq_channel.basic_publish(
294297
exchange=self.rabbitmq_exchange_name,

src/memos/multi_mem_cube/single_cube.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,9 @@ def add_memories(self, add_req: APIADDRequest) -> list[dict[str, Any]]:
5656
but scoped to a single cube_id.
5757
"""
5858
sync_mode = add_req.async_mode or self._get_sync_mode()
59-
self.logger.info(f"[DIAGNOSTIC] single_cube.add_memories called for cube_id: {self.cube_id}. sync_mode: {sync_mode}. Request: {add_req.model_dump_json(indent=2)}")
59+
self.logger.info(
60+
f"[DIAGNOSTIC] single_cube.add_memories called for cube_id: {self.cube_id}. sync_mode: {sync_mode}. Request: {add_req.model_dump_json(indent=2)}"
61+
)
6062
user_context = UserContext(
6163
user_id=add_req.user_id,
6264
mem_cube_id=self.cube_id,

0 commit comments

Comments
 (0)