Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion jupyter_rtc_core/rooms/yroom.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,6 +167,7 @@ async def _process_message_queue(self) -> None:
# Determine message type & subtype from header
message_type = message[0]
sync_message_subtype = "*"
# message subtypes only exist on sync messages, hence this condition
if message_type == YMessageType.SYNC and len(message) >= 2:
sync_message_subtype = message[1]

Expand All @@ -184,7 +185,7 @@ async def _process_message_queue(self) -> None:
self.log.warning(
"Ignoring an unrecognized message with header "
f"'{message_type},{sync_message_subtype}' from client "
"'{client_id}'. Messages must have one of the following "
f"'{client_id}'. Messages must have one of the following "
"headers: '0,0' (SyncStep1), '0,1' (SyncStep2), "
"'0,2' (SyncUpdate), or '1,*' (AwarenessUpdate)."
)
Expand Down
4 changes: 2 additions & 2 deletions jupyter_rtc_core/rooms/yroom_file_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,8 +208,8 @@ async def _save_jupyter_ydoc(self):
path
))

# Mark 'dirty' as `False`. This hides the "unsaved changes" icon
# in the JupyterLab tab rendering this YDoc in the frontend.
# Setting `dirty` to `False` hides the "unsaved changes" icon in the
# JupyterLab tab for this YDoc in the frontend.
self.jupyter_ydoc.dirty = False
except Exception as e:
self.log.error("An exception occurred when saving JupyterYDoc.")
Expand Down
51 changes: 45 additions & 6 deletions jupyter_rtc_core/rooms/yroom_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@

from .yroom import YRoom
from typing import TYPE_CHECKING
import asyncio

if TYPE_CHECKING:
import asyncio
import logging
from jupyter_server_fileid.manager import BaseFileIdManager
from jupyter_server.services.contents.manager import AsyncContentsManager, ContentsManager
Expand Down Expand Up @@ -69,22 +69,61 @@ async def delete_room(self, room_id: str) -> None:
"""
Gracefully deletes a YRoom given a room ID. This stops the YRoom first,
which finishes applying all updates & saves the content automatically.

Returns `True` if the room was deleted successfully. Returns `False` if
an exception was raised.
"""
yroom = self._rooms_by_id.pop(room_id, None)
if not yroom:
return

self.log.info(f"Stopping YRoom '{room_id}'.")
await yroom.stop()
self.log.info(f"Stopped YRoom '{room_id}'.")
try:
await yroom.stop()
self.log.info(f"Stopped YRoom '{room_id}'.")
return True
except Exception as e:
self.log.error(f"Exception raised when stopping YRoom '{room_id}:")
self.log.exception(e)
return False


async def stop(self) -> None:
"""
Gracefully deletes each `YRoom`. See `delete_room()` for more info.
"""
self.log.info(f"Stopping `YRoomManager` and deleting all YRooms.")
room_ids = list(self._rooms_by_id.keys())
room_count = len(room_ids)

if room_count == 0:
return

self.log.info(
f"Stopping `YRoomManager` and deleting all {room_count} YRooms."
)

# Delete rooms in parallel.
# Note that we do not use `asyncio.TaskGroup` here because that cancels
# all other tasks when any task raises an exception.
deletion_tasks = []
for room_id in room_ids:
await self.delete_room(room_id)
self.log.info(f"Stopped `YRoomManager` and deleted all YRooms.")
dt = asyncio.create_task(self.delete_room(room_id))
deletion_tasks.append(dt)

# Use returned values to log success/failure of room deletion
results: list[bool] = await asyncio.gather(*deletion_tasks)
failures = results.count(False)

if failures:
self.log.error(
"An exception occurred when stopping `YRoomManager`. "
"Exceptions were raised when stopping "
f"({failures}/{room_count}) `YRoom` instances, "
"which are printed above."
)
else:
self.log.info(
"Successfully stopped `YRoomManager` and deleted all "
f"{room_count} YRooms."
)

Loading