Skip to content

Commit 82a95c4

Browse files
author
glin1993@outlook.com
committed
Feat(Log): Add comprehensive diagnostic logs for /product/add flow
Introduces detailed INFO level diagnostic logs across the entire call chain for the /product/add API endpoint. These logs include relevant context, such as full request bodies, message items before scheduler submission, and messages before RabbitMQ publication, to aid in debugging deployment discrepancies and tracing data flow, especially concerning task_id propagation. Logs added/enhanced in: - src/memos/api/routers/product_router.py - src/memos/api/handlers/add_handler.py - src/memos/multi_mem_cube/single_cube.py - src/memos/mem_os/core.py - src/memos/mem_scheduler/general_scheduler.py - src/memos/mem_scheduler/base_scheduler.py - src/memos/mem_scheduler/webservice_modules/rabbitmq_service.py
1 parent bceaf68 commit 82a95c4

File tree

6 files changed

+11
-2
lines changed

6 files changed

+11
-2
lines changed

src/memos/api/handlers/add_handler.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ def handle_add_memories(self, add_req: APIADDRequest) -> MemoryResponse:
4545
Returns:
4646
MemoryResponse with added memory information
4747
"""
48-
self.logger.info(f"[AddHandler] Add Req is: {add_req}")
48+
self.logger.info(f"[DIAGNOSTIC] server_router -> add_handler.handle_add_memories called. Full request: {add_req.model_dump_json(indent=2)}")
4949

5050
if add_req.info:
5151
exclude_fields = list_all_fields()

src/memos/mem_os/core.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -788,6 +788,7 @@ def process_textual_memory():
788788
timestamp=datetime.utcnow(),
789789
task_id=task_id,
790790
)
791+
logger.info(f"[DIAGNOSTIC] core.add: Submitting message to scheduler: {message_item.model_dump_json(indent=2)}")
791792
self.mem_scheduler.memos_message_queue.submit_messages(
792793
messages=[message_item]
793794
)

src/memos/mem_scheduler/base_scheduler.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -594,6 +594,9 @@ def _submit_web_logs(
594594
Args:
595595
messages: Single log message or list of log messages
596596
"""
597+
messages_list = [messages] if isinstance(messages, ScheduleLogForWebItem) else messages
598+
for message in messages_list:
599+
logger.info(f"[DIAGNOSTIC] base_scheduler._submit_web_logs called. Message to publish: {message.model_dump_json(indent=2)}")
597600
if self.rabbitmq_config is None:
598601
return
599602

src/memos/mem_scheduler/general_scheduler.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -475,6 +475,7 @@ def _add_message_consumer(self, messages: list[ScheduleMessageItem]) -> None:
475475
logger.error(f"Error: {e}", exc_info=True)
476476

477477
def _mem_read_message_consumer(self, messages: list[ScheduleMessageItem]) -> None:
478+
logger.info(f"[DIAGNOSTIC] general_scheduler._mem_read_message_consumer called. Received messages: {[msg.model_dump_json(indent=2) for msg in messages]}")
478479
logger.info(f"Messages {messages} assigned to {MEM_READ_LABEL} handler.")
479480

480481
def process_message(message: ScheduleMessageItem):
@@ -539,6 +540,7 @@ def _process_memories_with_reader(
539540
task_id: str | None = None,
540541
info: dict | None = None,
541542
) -> None:
543+
logger.info(f"[DIAGNOSTIC] general_scheduler._process_memories_with_reader called. mem_ids: {mem_ids}, user_id: {user_id}, mem_cube_id: {mem_cube_id}, task_id: {task_id}")
542544
"""
543545
Process memories using mem_reader for enhanced memory processing.
544546
@@ -636,7 +638,7 @@ def _process_memories_with_reader(
636638
}
637639
)
638640
if kb_log_content:
639-
logger.info("DIAGNOSTIC: Preparing to create event log for KB update in _process_memories_with_reader.")
641+
logger.info(f"[DIAGNOSTIC] general_scheduler._process_memories_with_reader: Creating event log for KB update. Label: knowledgeBaseUpdate, user_id: {user_id}, mem_cube_id: {mem_cube_id}, task_id: {task_id}. KB content: {json.dumps(kb_log_content, indent=2)}")
640642
event = self.create_event_log(
641643
label="knowledgeBaseUpdate",
642644
from_memory_type=USER_INPUT_TYPE,

src/memos/mem_scheduler/webservice_modules/rabbitmq_service.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -288,6 +288,7 @@ def rabbitmq_publish_message(self, message: dict):
288288
logger.error("Cannot publish - no active connection")
289289
return False
290290

291+
logger.info(f"[DIAGNOSTIC] rabbitmq_service.rabbitmq_publish_message: Attempting to publish message. Exchange: {self.rabbitmq_exchange_name}, Routing Key: {self.rabbit_queue_name}, Message Content: {json.dumps(message, indent=2)}")
291292
try:
292293
self.rabbitmq_channel.basic_publish(
293294
exchange=self.rabbitmq_exchange_name,

src/memos/multi_mem_cube/single_cube.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,8 @@ def add_memories(self, add_req: APIADDRequest) -> list[dict[str, Any]]:
5555
This is basically your current handle_add_memories logic,
5656
but scoped to a single cube_id.
5757
"""
58+
sync_mode = add_req.async_mode or self._get_sync_mode()
59+
self.logger.info(f"[DIAGNOSTIC] single_cube.add_memories called for cube_id: {self.cube_id}. sync_mode: {sync_mode}. Request: {add_req.model_dump_json(indent=2)}")
5860
user_context = UserContext(
5961
user_id=add_req.user_id,
6062
mem_cube_id=self.cube_id,

0 commit comments

Comments
 (0)