Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 0 additions & 3 deletions backend/routers/pusher.py
Original file line number Diff line number Diff line change
Expand Up @@ -447,9 +447,6 @@ async def receive_tasks():
res = json.loads(bytes(data[4:]).decode("utf-8"))
segments = res.get('segments')
memory_id = res.get('memory_id')
# Update conversation_id from transcript if provided
if memory_id:
current_conversation_id = memory_id
if len(transcript_queue) >= TRANSCRIPT_QUEUE_WARN_SIZE:
logger.warning(f"Warning: transcript_queue size {len(transcript_queue)} {uid}")
# Use memory_id if available, otherwise use current_conversation_id for conversations
Expand Down
10 changes: 5 additions & 5 deletions backend/tests/unit/test_listen_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -633,15 +633,15 @@ def test_flaw_header_103_conversation_switch_flushes_private_cloud(self):
assert len(private_cloud_queue[0]['data']) == 500
assert len(private_cloud_sync_buffer) == 0

def test_flaw_header_102_memory_id_updates_conversation_id(self):
"""FLAW TEST: memory_id in transcript should update current_conversation_id."""
def test_header_102_memory_id_does_not_update_conversation_id(self):
"""Inline logic test: verifies current_conversation_id is not mutated."""
current_conversation_id = 'old-conv'
payload = {'segments': [], 'memory_id': 'new-conv-from-memory'}
res = payload
memory_id = res.get('memory_id')
if memory_id:
current_conversation_id = memory_id
assert current_conversation_id == 'new-conv-from-memory'
conversation_or_memory_id = memory_id or current_conversation_id
assert conversation_or_memory_id == 'new-conv-from-memory'
assert current_conversation_id == 'old-conv'
Comment thread
AasheeshLikePanner marked this conversation as resolved.


# ===================================================================
Expand Down
Loading