diff --git a/api/v1.yaml b/api/v1.yaml
index 46f6722b..b4c1e42b 100644
--- a/api/v1.yaml
+++ b/api/v1.yaml
@@ -805,6 +805,171 @@ paths:
schema:
$ref: "#/components/schemas/MetricMatrix"
+ /collab/notebooks:
+ get:
+ summary: List collaborative notebooks
+ description: Retrieve a list of collaborative notebooks
+ operationId: listCollabNotebooks
+ security:
+ - BearerAuth: []
+ responses:
+ "200":
+ description: Successfully retrieved collaborative notebook list
+ content:
+ application/json:
+ schema:
+ type: array
+ items:
+ $ref: "#/components/schemas/CollabNotebook"
+ post:
+ summary: Create a collaborative notebook
+ description: Create a new collaborative notebook
+ operationId: createCollabNotebook
+ security:
+ - BearerAuth: []
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/CollabNotebookCreateRequest"
+ responses:
+ "201":
+ description: Successfully created collaborative notebook
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/CollabNotebook"
+
+ /collab/notebooks/{notebookID}:
+ get:
+ summary: Get collaborative notebook details
+ description: Retrieve details of a specific collaborative notebook
+ operationId: getCollabNotebook
+ security:
+ - BearerAuth: []
+ parameters:
+ - name: notebookID
+ in: path
+ required: true
+ schema:
+ type: string
+ responses:
+ "200":
+ description: Successfully retrieved collaborative notebook
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/CollabNotebook"
+
+ put:
+ summary: Update a collaborative notebook
+ description: Update details of a specific collaborative notebook
+ operationId: updateCollabNotebook
+ security:
+ - BearerAuth: []
+ parameters:
+ - name: notebookID
+ in: path
+ required: true
+ schema:
+ type: string
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/CollabNotebook"
+ responses:
+ "200":
+ description: Successfully upd-ated collaborative notebook
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/CollabNotebook"
+
+ delete:
+ summary: Delete a collaborative notebook
+ description: Permanently delete a collaborative notebook
+ operationId: deleteCollabNotebook
+ security:
+ - BearerAuth: []
+ parameters:
+ - name: notebookID
+ in: path
+ required: true
+ schema:
+ type: string
+ responses:
+ "204":
+ description: Notebook deleted successfully
+ "403":
+ description: Operation not permitted
+ "404":
+ description: Notebook not found
+ /collab/notebooks/{notebookID}/transfer:
+ post:
+ summary: Transfer collaborative notebook scope
+ description: Move a collaborative notebook between personal and organization scopes.
+ operationId: transferCollabNotebook
+ security:
+ - BearerAuth: []
+ parameters:
+ - name: notebookID
+ in: path
+ required: true
+ schema:
+ type: string
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/CollabNotebookTransferRequest"
+ responses:
+ "200":
+ description: Successfully transferred collaborative notebook
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/CollabNotebook"
+ "400":
+ description: Invalid transfer request
+ "403":
+ description: Operation not permitted
+ "404":
+ description: Notebook not found
+
+ /collab/notebooks/{notebookID}/snapshot:
+ put:
+ summary: Upload a collaborative notebook snapshot
+ description: Persist the latest Yjs snapshot for a collaborative notebook.
+ operationId: uploadCollabNotebookSnapshot
+ security:
+ - BearerAuth: []
+ parameters:
+ - name: notebookID
+ in: path
+ required: true
+ schema:
+ type: string
+ description: Unique identifier of the collaborative notebook.
+ requestBody:
+ required: true
+ content:
+ application/octet-stream:
+ schema:
+ type: string
+ format: binary
+ examples:
+ snapshot:
+ summary: Encoded Yjs snapshot payload
+ value: ""
+ responses:
+ "204":
+ description: Snapshot stored successfully.
+
+
components:
securitySchemes:
BearerAuth:
@@ -1648,6 +1813,65 @@ components:
type: integer
format: int32
+ CollabNotebook:
+ type: object
+ required: [notebookID, title, createdAt, updatedAt]
+ properties:
+ notebookID:
+ type: string
+ description: Unique identifier of the collaborative notebook
+ title:
+ type: string
+ description: Title of the collaborative notebook
+ scope:
+ type: string
+ enum: [personal, organization]
+ description: Ownership scope of the notebook
+ organizationID:
+ type: integer
+ format: int32
+ description: Organization that currently owns the notebook
+ ownerUserID:
+ type: integer
+ format: int32
+ description: User that owns the notebook when scope is personal
+ createdByUserID:
+ type: integer
+ format: int32
+ description: User that originally created the notebook
+ createdAt:
+ type: string
+ format: date-time
+ description: Creation timestamp
+ updatedAt:
+ type: string
+ format: date-time
+ description: Last update timestamp
+
+ CollabNotebookCreateRequest:
+ type: object
+ required: [title]
+ properties:
+ title:
+ type: string
+ description: Title of the collaborative notebook
+ scope:
+ type: string
+ enum: [personal, organization]
+ description: Ownership scope of the notebook; defaults to personal when omitted
+
+ CollabNotebookTransferRequest:
+ type: object
+ required: [targetScope]
+ properties:
+ targetScope:
+ type: string
+ enum: [personal, organization]
+ description: Desired ownership scope
+ ownerUserID:
+ type: integer
+ format: int32
+ description: Target owner when moving to personal scope; defaults to the caller
PromdumpOpt:
type: object
required: [endpoint, start, end, step, query, gzip, parts, memoryRatio]
diff --git a/dev/anclax.yaml b/dev/anclax.yaml
index 11a6bdea..c8bcb66d 100644
--- a/dev/anclax.yaml
+++ b/dev/anclax.yaml
@@ -3,7 +3,8 @@ externals:
wire: v0.6.0
sqlc: v1.29.0
mockgen: v0.5.0
- anclax: v0.6.6
+ # Align with go.mod: dev-v0.7 resolves to v0.6.16 pseudo-version
+ anclax: dev-v0.7
oapi-codegen:
path: api/v1.yaml
@@ -46,3 +47,5 @@ mockgen:
- source: pkg/conn/http/http.go
destination: pkg/conn/http/mock/http_mock_gen.go
package: mock
+
+anclaxdef: dev/anclax
\ No newline at end of file
diff --git a/dev/anclax/sql/migrations/0001_init.down.sql b/dev/anclax/sql/migrations/0001_init.down.sql
new file mode 100644
index 00000000..77daf861
--- /dev/null
+++ b/dev/anclax/sql/migrations/0001_init.down.sql
@@ -0,0 +1,7 @@
+BEGIN;
+
+DROP TABLE IF EXISTS anclax.users;
+DROP TABLE IF EXISTS anclax.keys;
+DROP TABLE IF EXISTS anclax.opaque_tokens;
+
+COMMIT;
diff --git a/dev/anclax/sql/migrations/0001_init.up.sql b/dev/anclax/sql/migrations/0001_init.up.sql
new file mode 100644
index 00000000..65d26e2a
--- /dev/null
+++ b/dev/anclax/sql/migrations/0001_init.up.sql
@@ -0,0 +1,120 @@
+BEGIN;
+
+CREATE SCHEMA IF NOT EXISTS anclax;
+
+CREATE TABLE IF NOT EXISTS anclax.orgs (
+ id SERIAL PRIMARY KEY,
+ name TEXT NOT NULL,
+ tz TEXT NOT NULL DEFAULT 'Asia/Shanghai',
+ created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL,
+ updated_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL
+);
+
+CREATE TABLE IF NOT EXISTS anclax.users (
+ id SERIAL PRIMARY KEY,
+ name TEXT NOT NULL,
+ password_hash TEXT NOT NULL,
+ password_salt TEXT NOT NULL,
+ created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL,
+ updated_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL,
+ deleted_at TIMESTAMPTZ
+);
+
+CREATE TABLE IF NOT EXISTS anclax.user_default_orgs (
+ user_id INTEGER NOT NULL REFERENCES anclax.users(id) ON UPDATE CASCADE ON DELETE CASCADE,
+ org_id INTEGER NOT NULL REFERENCES anclax.orgs(id) ON UPDATE CASCADE ON DELETE CASCADE,
+ created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL,
+
+ PRIMARY KEY (user_id)
+);
+
+CREATE TABLE IF NOT EXISTS anclax.org_users (
+ org_id INTEGER NOT NULL REFERENCES anclax.orgs(id) ON UPDATE CASCADE ON DELETE CASCADE,
+ user_id INTEGER NOT NULL REFERENCES anclax.users(id) ON UPDATE CASCADE ON DELETE CASCADE,
+ created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL,
+ updated_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL,
+
+ PRIMARY KEY (org_id, user_id)
+);
+
+CREATE TABLE IF NOT EXISTS anclax.org_owners (
+ org_id INTEGER NOT NULL REFERENCES anclax.orgs(id) ON UPDATE CASCADE ON DELETE CASCADE,
+ user_id INTEGER NOT NULL REFERENCES anclax.users(id) ON UPDATE CASCADE,
+ created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL,
+
+ PRIMARY KEY (org_id)
+);
+
+CREATE TABLE IF NOT EXISTS anclax.opaque_keys (
+ id BIGSERIAL PRIMARY KEY,
+ key BYTEA NOT NULL,
+ user_id INT NOT NULL REFERENCES anclax.users(id) ON DELETE CASCADE ON UPDATE CASCADE,
+ created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL,
+ updated_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL
+);
+
+CREATE TABLE IF NOT EXISTS anclax.access_key_pairs (
+ access_key VARCHAR(20) NOT NULL,
+ secret_key VARCHAR(40) NOT NULL,
+ created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL,
+ updated_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL,
+
+ PRIMARY KEY (access_key)
+);
+
+CREATE TABLE IF NOT EXISTS anclax.access_rules (
+ name VARCHAR(255) NOT NULL,
+ description TEXT NOT NULL,
+ created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL,
+ updated_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL,
+
+ PRIMARY KEY (name)
+);
+
+CREATE TABLE IF NOT EXISTS anclax.roles (
+ id SERIAL PRIMARY KEY,
+ org_id INTEGER NOT NULL REFERENCES anclax.orgs(id) ON UPDATE CASCADE,
+ name VARCHAR(255) NOT NULL,
+ description TEXT NOT NULL,
+ created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL,
+ updated_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL
+);
+
+CREATE TABLE IF NOT EXISTS anclax.role_access_rules (
+ role_id INTEGER NOT NULL,
+ access_rule_name VARCHAR(255) NOT NULL REFERENCES anclax.access_rules(name) ON UPDATE CASCADE,
+ created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL,
+ updated_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL,
+
+ PRIMARY KEY (role_id, access_rule_name)
+);
+
+CREATE TABLE IF NOT EXISTS anclax.users_roles (
+ user_id INTEGER NOT NULL,
+ role_id INTEGER NOT NULL,
+ created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL,
+ updated_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL,
+
+ PRIMARY KEY (user_id, role_id)
+);
+
+CREATE TABLE IF NOT EXISTS anclax.tasks (
+ id SERIAL PRIMARY KEY,
+ attributes JSONB NOT NULL,
+ spec JSONB NOT NULL,
+ status VARCHAR(255) NOT NULL,
+ unique_tag VARCHAR(255), -- for unique task
+ started_at TIMESTAMPTZ,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+
+ UNIQUE (unique_tag)
+);
+
+CREATE TABLE IF NOT EXISTS anclax.events (
+ id SERIAL PRIMARY KEY,
+ spec JSONB NOT NULL,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP
+);
+
+COMMIT;
diff --git a/dev/anclax/sql/migrations/0002_retry_count.down.sql b/dev/anclax/sql/migrations/0002_retry_count.down.sql
new file mode 100644
index 00000000..44b416b7
--- /dev/null
+++ b/dev/anclax/sql/migrations/0002_retry_count.down.sql
@@ -0,0 +1,6 @@
+BEGIN;
+
+ALTER TABLE anclax.tasks DROP COLUMN attempts;
+
+COMMIT;
+
diff --git a/dev/anclax/sql/migrations/0002_retry_count.up.sql b/dev/anclax/sql/migrations/0002_retry_count.up.sql
new file mode 100644
index 00000000..de08bfb4
--- /dev/null
+++ b/dev/anclax/sql/migrations/0002_retry_count.up.sql
@@ -0,0 +1,7 @@
+BEGIN;
+
+ALTER TABLE anclax.tasks ADD COLUMN attempts INTEGER NOT NULL DEFAULT 0;
+
+UPDATE anclax.tasks SET attributes = jsonb_set(attributes, '{retryPolicy, maxAttempts}', '-1') WHERE (attributes->'retryPolicy'->'always_retry_on_failure')::BOOLEAN;
+
+COMMIT;
diff --git a/dev/anclax/sql/migrations/0003_rename.down.sql b/dev/anclax/sql/migrations/0003_rename.down.sql
new file mode 100644
index 00000000..c4c4836d
--- /dev/null
+++ b/dev/anclax/sql/migrations/0003_rename.down.sql
@@ -0,0 +1,3 @@
+BEGIN;
+
+COMMIT;
diff --git a/dev/anclax/sql/migrations/0003_rename.up.sql b/dev/anclax/sql/migrations/0003_rename.up.sql
new file mode 100644
index 00000000..b2de0e84
--- /dev/null
+++ b/dev/anclax/sql/migrations/0003_rename.up.sql
@@ -0,0 +1,15 @@
+BEGIN;
+
+DO $$
+BEGIN
+ IF NOT EXISTS (
+ SELECT 1
+ FROM pg_namespace
+ WHERE nspname = 'anclax'
+ ) THEN
+ EXECUTE 'ALTER SCHEMA anchor RENAME TO anclax';
+ END IF;
+END;
+$$;
+
+COMMIT;
diff --git a/go.mod b/go.mod
index e198d003..12f6aec2 100644
--- a/go.mod
+++ b/go.mod
@@ -11,6 +11,7 @@ require (
github.com/google/wire v0.6.0
github.com/jackc/pgx/v5 v5.7.5
github.com/oapi-codegen/runtime v1.1.1
+ github.com/oklog/ulid/v2 v2.1.1
github.com/pkg/errors v0.9.1
github.com/prometheus/client_golang v1.22.0
github.com/prometheus/common v0.63.0
diff --git a/go.sum b/go.sum
index 376503da..03f0be5b 100644
--- a/go.sum
+++ b/go.sum
@@ -124,10 +124,13 @@ github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f h1:KUppIJq7/+
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/oapi-codegen/runtime v1.1.1 h1:EXLHh0DXIJnWhdRPN2w4MXAzFyE4CskzhNLUmtpMYro=
github.com/oapi-codegen/runtime v1.1.1/go.mod h1:SK9X900oXmPWilYR5/WKPzt3Kqxn/uS/+lbpREv+eCg=
+github.com/oklog/ulid/v2 v2.1.1 h1:suPZ4ARWLOJLegGFiZZ1dFAkqzhMjL3J1TzI+5wHz8s=
+github.com/oklog/ulid/v2 v2.1.1/go.mod h1:rcEKHmBBKfef9DhnvX7y1HZBYxjXb0cP5ExxNsTT1QQ=
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug=
github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM=
+github.com/pborman/getopt v0.0.0-20170112200414-7148bc3a4c30/go.mod h1:85jBQOZwpVEaDAr341tbn15RS4fCAsIst0qp7i8ex1o=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
diff --git a/pkg/controller/controller.go b/pkg/controller/controller.go
index 3ddc7409..f1212366 100644
--- a/pkg/controller/controller.go
+++ b/pkg/controller/controller.go
@@ -2,6 +2,7 @@ package controller
import (
"bufio"
+ "encoding/base64"
"errors"
"fmt"
"strings"
@@ -14,6 +15,7 @@ import (
"github.com/risingwavelabs/promdump/pkg/promdump"
"github.com/risingwavelabs/risingwave-console/pkg/conn/metricsstore"
"github.com/risingwavelabs/risingwave-console/pkg/service"
+ "github.com/risingwavelabs/risingwave-console/pkg/trd/lib0"
"github.com/risingwavelabs/risingwave-console/pkg/utils"
"github.com/risingwavelabs/risingwave-console/pkg/zgen/apigen"
)
@@ -312,6 +314,206 @@ func (controller *Controller) RestoreClusterSnapshot(c *fiber.Ctx, id int32, sna
return c.Status(fiber.StatusOK).SendString("Hello, World!")
}
+func (controller *Controller) CreateCollabNotebook(c *fiber.Ctx) error {
+ orgID, err := auth.GetOrgID(c)
+ if err != nil {
+ return c.Status(fiber.StatusUnauthorized).SendString("missing orgID in request context")
+ }
+
+ userID, err := auth.GetUserID(c)
+ if err != nil {
+ return c.Status(fiber.StatusUnauthorized).SendString("missing userID in request context")
+ }
+
+ var payload apigen.CollabNotebook
+ if err := c.BodyParser(&payload); err != nil {
+ return c.SendStatus(fiber.StatusBadRequest)
+ }
+
+ created, err := controller.svc.CreateCollabNotebook(c.Context(), payload, orgID, userID)
+ if err != nil {
+ switch {
+ case errors.Is(err, service.ErrNotebookIDEmpty), errors.Is(err, service.ErrNotebookTitleEmpty), errors.Is(err, service.ErrNotebookInvalidScope), errors.Is(err, service.ErrNotebookInvalidOwner):
+ return c.Status(fiber.StatusBadRequest).SendString(err.Error())
+ case errors.Is(err, service.ErrNotebookAlreadyExists):
+ return c.Status(fiber.StatusConflict).SendString(err.Error())
+ default:
+ return err
+ }
+ }
+
+ return c.Status(fiber.StatusCreated).JSON(created)
+}
+
+func (controller *Controller) ListCollabNotebooks(c *fiber.Ctx) error {
+ orgID, err := auth.GetOrgID(c)
+ if err != nil {
+ return c.Status(fiber.StatusUnauthorized).SendString("missing orgID in request context")
+ }
+
+ userID, err := auth.GetUserID(c)
+ if err != nil {
+ return c.Status(fiber.StatusUnauthorized).SendString("missing userID in request context")
+ }
+
+ notebooks, err := controller.svc.ListCollabNotebooks(c.Context(), orgID, userID)
+ if err != nil {
+ return err
+ }
+
+ return c.Status(fiber.StatusOK).JSON(notebooks)
+}
+
+func (controller *Controller) GetCollabNotebook(c *fiber.Ctx, notebookID string) error {
+ orgID, err := auth.GetOrgID(c)
+ if err != nil {
+ return c.Status(fiber.StatusUnauthorized).SendString("missing orgID in request context")
+ }
+
+ userID, err := auth.GetUserID(c)
+ if err != nil {
+ return c.Status(fiber.StatusUnauthorized).SendString("missing userID in request context")
+ }
+
+ notebook, err := controller.svc.GetCollabNotebook(c.Context(), notebookID, orgID, userID)
+ if err != nil {
+ if errors.Is(err, service.ErrNotebookNotFound) {
+ return c.SendStatus(fiber.StatusNotFound)
+ }
+ return err
+ }
+
+ return c.Status(fiber.StatusOK).JSON(notebook)
+}
+
+func (controller *Controller) UpdateCollabNotebook(c *fiber.Ctx, notebookID string) error {
+ orgID, err := auth.GetOrgID(c)
+ if err != nil {
+ return c.Status(fiber.StatusUnauthorized).SendString("missing orgID in request context")
+ }
+
+ userID, err := auth.GetUserID(c)
+ if err != nil {
+ return c.Status(fiber.StatusUnauthorized).SendString("missing userID in request context")
+ }
+
+ var payload apigen.CollabNotebook
+ if err := c.BodyParser(&payload); err != nil {
+ return c.SendStatus(fiber.StatusBadRequest)
+ }
+
+ if payload.NotebookID != "" && payload.NotebookID != notebookID {
+ return c.Status(fiber.StatusBadRequest).SendString("notebook id mismatch")
+ }
+
+ updated, err := controller.svc.UpdateCollabNotebook(c.Context(), notebookID, payload.Title, orgID, userID)
+ if err != nil {
+ switch {
+ case errors.Is(err, service.ErrNotebookNotFound):
+ return c.SendStatus(fiber.StatusNotFound)
+ case errors.Is(err, service.ErrNotebookTitleEmpty), errors.Is(err, service.ErrNotebookIDEmpty):
+ return c.Status(fiber.StatusBadRequest).SendString(err.Error())
+ default:
+ return err
+ }
+ }
+
+ return c.Status(fiber.StatusOK).JSON(updated)
+}
+
+func (controller *Controller) DeleteCollabNotebook(c *fiber.Ctx, notebookID string) error {
+ orgID, err := auth.GetOrgID(c)
+ if err != nil {
+ return c.Status(fiber.StatusUnauthorized).SendString("missing orgID in request context")
+ }
+
+ userID, err := auth.GetUserID(c)
+ if err != nil {
+ return c.Status(fiber.StatusUnauthorized).SendString("missing userID in request context")
+ }
+
+ if err := controller.svc.DeleteCollabNotebook(c.Context(), notebookID, orgID, userID); err != nil {
+ switch {
+ case errors.Is(err, service.ErrNotebookNotFound):
+ return c.SendStatus(fiber.StatusNotFound)
+ case errors.Is(err, service.ErrNotebookIDEmpty):
+ return c.Status(fiber.StatusBadRequest).SendString(err.Error())
+ case errors.Is(err, service.ErrNotebookDeleteNotAllowed):
+ return c.Status(fiber.StatusForbidden).SendString(err.Error())
+ default:
+ return err
+ }
+ }
+
+ return c.SendStatus(fiber.StatusNoContent)
+}
+
+func (controller *Controller) TransferCollabNotebook(c *fiber.Ctx, notebookID string) error {
+ orgID, err := auth.GetOrgID(c)
+ if err != nil {
+ return c.Status(fiber.StatusUnauthorized).SendString("missing orgID in request context")
+ }
+
+ userID, err := auth.GetUserID(c)
+ if err != nil {
+ return c.Status(fiber.StatusUnauthorized).SendString("missing userID in request context")
+ }
+
+ var payload apigen.CollabNotebookTransferRequest
+ if err := c.BodyParser(&payload); err != nil {
+ return c.SendStatus(fiber.StatusBadRequest)
+ }
+
+ updated, err := controller.svc.TransferCollabNotebook(c.Context(), notebookID, payload, orgID, userID)
+ if err != nil {
+ switch {
+ case errors.Is(err, service.ErrNotebookNotFound):
+ return c.SendStatus(fiber.StatusNotFound)
+ case errors.Is(err, service.ErrNotebookInvalidScope), errors.Is(err, service.ErrNotebookInvalidOwner), errors.Is(err, service.ErrNotebookIDEmpty):
+ return c.Status(fiber.StatusBadRequest).SendString(err.Error())
+ case errors.Is(err, service.ErrNotebookTransferNotAllowed):
+ return c.Status(fiber.StatusForbidden).SendString(err.Error())
+ default:
+ return err
+ }
+ }
+
+ return c.Status(fiber.StatusOK).JSON(updated)
+}
+
+func (controller *Controller) UploadCollabNotebookSnapshot(c *fiber.Ctx, notebookID string) error {
+ if _, err := auth.GetOrgID(c); err != nil {
+ return c.Status(fiber.StatusUnauthorized).SendString("missing orgID in request context")
+ }
+
+ var stateVector map[uint64]uint64
+ if header := c.Get("X-Yjs-State-Vector"); header != "" {
+ raw, err := base64.StdEncoding.DecodeString(header)
+ if err != nil {
+ return c.Status(fiber.StatusBadRequest).SendString("invalid X-Yjs-State-Vector header")
+ }
+ sv, err := lib0.DecodeYjsStateVector(raw)
+ if err != nil {
+ return c.Status(fiber.StatusBadRequest).SendString("failed to decode Yjs state vector")
+ }
+ stateVector = sv
+ }
+
+ payload := c.Body()
+ if len(payload) == 0 {
+ return c.Status(fiber.StatusBadRequest).SendString("snapshot payload must not be empty")
+ }
+
+ if err := controller.svc.UpsertCollabDocSnapshot(c.Context(), notebookID, payload, stateVector); err != nil {
+ if errors.Is(err, service.ErrSnapshotPayloadEmpty) {
+ return c.Status(fiber.StatusBadRequest).SendString(err.Error())
+ }
+ return err
+ }
+
+ return c.SendStatus(fiber.StatusNoContent)
+}
+
func (controller *Controller) ListClusterSnapshots(c *fiber.Ctx, id int32) error {
orgID, err := auth.GetOrgID(c)
if err != nil {
diff --git a/pkg/controller/ws_handler.go b/pkg/controller/ws_handler.go
index 0722c6c0..c8f5eb21 100644
--- a/pkg/controller/ws_handler.go
+++ b/pkg/controller/ws_handler.go
@@ -2,21 +2,22 @@ package controller
import (
"github.com/cloudcarver/anclax/pkg/ws"
+ "github.com/risingwavelabs/risingwave-console/pkg/service"
)
type WsController struct {
+ collab *service.CollaborativeService
}
-func NewWsController() *WsController {
- c := &WsController{}
-
- return c
+func NewWsController(collab *service.CollaborativeService) *WsController {
+ return &WsController{collab: collab}
}
func (w *WsController) OnSessionCreated(s *ws.Session) error {
- return nil
+ // Subscribe session to doc room if query param present
+ return w.collab.JoinDocRoomByQuery(s)
}
func (w *WsController) Handle(ctx *ws.Ctx, data []byte) error {
- return nil
+ return w.collab.Handle(ctx, data)
}
diff --git a/pkg/init.go b/pkg/init.go
index 039ad9e3..f2d0768e 100644
--- a/pkg/init.go
+++ b/pkg/init.go
@@ -15,7 +15,7 @@ import (
)
// This will run before the application starts.
-func Init(cfg *config.Config, anclaxApp *anclax_app.Application, initService *service.InitService, console anclax_app.Plugin) (*app.App, error) {
+func Init(cfg *config.Config, anclaxApp *anclax_app.Application, initService *service.InitService, console anclax_app.Plugin, wsc *controller.WsController) (*app.App, error) {
if err := anclaxApp.Plug(console); err != nil {
return nil, err
}
@@ -27,6 +27,9 @@ func Init(cfg *config.Config, anclaxApp *anclax_app.Application, initService *se
return nil, err
}
+ anclaxApp.GetServer().Websocket().SetOnSessionCreated(wsc.OnSessionCreated)
+ anclaxApp.GetServer().Websocket().SetMessageHandler(wsc.Handle)
+
return &app.App{
AnclaxApp: anclaxApp,
}, nil
@@ -34,7 +37,7 @@ func Init(cfg *config.Config, anclaxApp *anclax_app.Application, initService *se
// InitAnclaxApplication initializes the Anclax application with the provided configuration.
// You can modify this function to customize the initialization process,
-func InitAnclaxApplication(cfg *config.Config, wsc *controller.WsController) (*anclax_app.Application, error) {
+func InitAnclaxApplication(cfg *config.Config) (*anclax_app.Application, error) {
anclaxApp, err := anclax_wire.InitializeApplication(&cfg.Server, &anclax_config.LibConfig{
Pg: &anclax_config.PgCfg{
MaxConnections: 10,
@@ -48,9 +51,5 @@ func InitAnclaxApplication(cfg *config.Config, wsc *controller.WsController) (*a
if err != nil {
return nil, err
}
-
- anclaxApp.GetServer().Websocket().SetOnSessionCreated(wsc.OnSessionCreated)
- anclaxApp.GetServer().Websocket().SetMessageHandler(wsc.Handle)
-
return anclaxApp, nil
}
diff --git a/pkg/service/collaborative_service.go b/pkg/service/collaborative_service.go
new file mode 100644
index 00000000..3d99363d
--- /dev/null
+++ b/pkg/service/collaborative_service.go
@@ -0,0 +1,483 @@
+package service
+
+import (
+ "bytes"
+ "sync"
+ "time"
+
+ "github.com/cloudcarver/anclax/pkg/ws"
+ "github.com/jackc/pgx/v5"
+ "github.com/pkg/errors"
+ "github.com/risingwavelabs/risingwave-console/pkg/trd/lib0"
+ "github.com/risingwavelabs/risingwave-console/pkg/zcore/model"
+)
+
+// Query parameter key for document id; agreed with frontend.
+const DocIDKey = "doc"
+
+type SyncStep = uint64
+
+// Top-level websocket message types defined by y-websocket/y-protocols
+type MessageType = uint64
+
+const (
+ YwsMessageSync MessageType = 0
+ YwsMessageAwareness MessageType = 1
+ YwsMessageAuth MessageType = 2
+
+ // Custom message types
+ YwsMessageSnapshotRequest MessageType = 100
+ YwsMessageUpdateMeta MessageType = 101
+)
+
+const (
+ YjsSyncStep1 SyncStep = 0
+ YjsSyncStep2 SyncStep = 1
+ YjsUpdate SyncStep = 2
+)
+
+const (
+ snapshotDebounceInterval = 30 * time.Second
+ snapshotCheckInterval = 5 * time.Second
+ snapshotRequestRetryWindow = 15 * time.Second
+ pendingUpdateMaxAge = 10 * time.Minute
+)
+
+// CollaborativeSnapshotRecorder receives notifications when a document snapshot
+// has been persisted so the scheduler can clear pending state.
+type CollaborativeSnapshotRecorder interface {
+ RecordSnapshotSaved(docID string, savedAt time.Time, stateVector map[uint64]uint64)
+}
+
+type docUpdate struct {
+ at time.Time
+ payload []byte
+ clientID uint64
+ clock uint64
+ hasMeta bool
+}
+
+type updateMeta struct {
+ clientID uint64
+ clock uint64
+}
+
+type collabDocState struct {
+ sessions map[string]*ws.Session
+ lastUpdate time.Time
+ lastSave time.Time
+ hasUnsavedChanges bool
+ awaitingSnapshot bool
+ lastRequest time.Time
+ pendingUpdates []docUpdate
+ pendingMeta []updateMeta
+}
+
+// CollaborativeService relays Yjs payloads and handles snapshot persistence/write-back only.
+type CollaborativeService struct {
+ hub *ws.Hub
+ m model.ModelInterface
+
+ mu sync.Mutex
+ docs map[string]*collabDocState
+ now func() time.Time
+ checkInterval time.Duration
+ debounce time.Duration
+ requestBackoff time.Duration
+}
+
+// emptyYDocStateUpdate is a valid Yjs state update for an empty Y.Doc,
+// encoded via encodeStateAsUpdate(new Y.Doc()) in the JS implementation.
+// Hex representation was generated once via:
+// Buffer.from(Y.encodeStateAsUpdate(new Y.Doc())).toString("hex") -> "0000"
+var emptyYDocStateUpdate = []byte{0x00, 0x00}
+
+func encodeYSyncMessage(step SyncStep, payload []byte) ([]byte, error) {
+ buf := &bytes.Buffer{}
+ if err := lib0.WriteVarUint(buf, YwsMessageSync); err != nil {
+ return nil, err
+ }
+ if err := lib0.WriteVarUint(buf, step); err != nil {
+ return nil, err
+ }
+ if err := lib0.WriteVarUint(buf, uint64(len(payload))); err != nil {
+ return nil, err
+ }
+ if _, err := buf.Write(payload); err != nil {
+ return nil, err
+ }
+ return buf.Bytes(), nil
+}
+
+func encodeSnapshotRequestMessage(docID string) ([]byte, error) {
+ buf := &bytes.Buffer{}
+ if err := lib0.WriteVarUint(buf, YwsMessageSnapshotRequest); err != nil {
+ return nil, err
+ }
+ if err := lib0.WriteVarString(buf, docID); err != nil {
+ return nil, err
+ }
+ return buf.Bytes(), nil
+}
+
+func NewCollaborativeService(hub *ws.Hub, m model.ModelInterface) *CollaborativeService {
+ svc := &CollaborativeService{
+ hub: hub,
+ m: m,
+ docs: make(map[string]*collabDocState),
+ now: time.Now,
+ checkInterval: snapshotCheckInterval,
+ debounce: snapshotDebounceInterval,
+ requestBackoff: snapshotRequestRetryWindow,
+ }
+ go svc.run()
+ return svc
+}
+
+// Handle processes an incoming websocket binary message.
+// This service stays a thin relay that persists and replays snapshots without digging into CRDT semantics.
+func (s *CollaborativeService) Handle(ctx *ws.Ctx, data []byte) error {
+ r := bytes.NewReader(data)
+ // messageType follows lib0 varuint encoding
+ messageType, err := lib0.ReadVarUintFrom(r)
+ if err != nil {
+ return err
+ }
+ session := ctx.Session
+ if session == nil {
+ return nil
+ }
+ switch messageType {
+ case YwsMessageSync:
+ // Next varuint in payload is the sync-step (0|1|2)
+ step, err := lib0.ReadVarUintFrom(r)
+ if err != nil {
+ return err
+ }
+
+ docID := session.Conn().Query(DocIDKey)
+ if docID == "" {
+ return nil
+ }
+
+ // * YjsSyncStep1: Includes the State Set of the sending client. When received, the client should reply with YjsSyncStep2.
+ // * YjsSyncStep2: Includes all missing structs and the complete delete set. When received, the client is assured that it
+ // received all information from the remote client.
+ if step == YjsSyncStep1 {
+ snapshot, err := s.m.GetCollabDocSnapshot(ctx, docID)
+ if err != nil {
+ // For brand-new documents with no snapshot yet,
+ // empty Yjs state update so that clients
+ // can still complete the sync handshake (provider.synced=true)
+ // without hitting decode errors.
+ if errors.Is(err, pgx.ErrNoRows) {
+ snapshot = emptyYDocStateUpdate
+ } else {
+ return err
+ }
+ }
+
+ pending := s.collectPendingUpdates(docID)
+
+ step2, err := encodeYSyncMessage(YjsSyncStep2, snapshot)
+ if err != nil {
+ return err
+ }
+ if err := session.WriteBinaryMessage(step2); err != nil {
+ return err
+ }
+
+ for _, update := range pending {
+ if err := session.WriteBinaryMessage(update); err != nil {
+ return err
+ }
+ }
+
+ session.BroadcastBinary(docID, data)
+ }
+ if step == YjsSyncStep2 {
+ s.hub.BroadcastBinary(docID, data)
+ return nil
+ }
+ if step == YjsUpdate {
+ s.hub.BroadcastBinary(docID, data)
+ s.recordDocUpdate(docID, data)
+ }
+
+ return nil
+ case YwsMessageAwareness:
+ // Forward awareness payload as-is to the doc room
+ if docID := session.Conn().Query(DocIDKey); docID != "" {
+ s.hub.BroadcastBinary(docID, data)
+ }
+ return nil
+ case YwsMessageSnapshotRequest:
+ // Ignore requests initiated by clients
+ return nil
+ case YwsMessageUpdateMeta:
+ docID := session.Conn().Query(DocIDKey)
+ if docID == "" {
+ return nil
+ }
+ clientID, err := lib0.ReadVarUintFrom(r)
+ if err != nil {
+ return err
+ }
+ clock, err := lib0.ReadVarUintFrom(r)
+ if err != nil {
+ return err
+ }
+ s.recordUpdateMeta(docID, clientID, clock)
+ return nil
+ default:
+ return nil
+ }
+}
+
+func (s *CollaborativeService) JoinDocRoomByQuery(sess *ws.Session) error {
+ if sess == nil {
+ return nil
+ }
+ docID := sess.Conn().Query(DocIDKey)
+ if docID == "" {
+ return nil
+ }
+
+ if err := s.hub.AddTopic(docID); err != nil && !errors.Is(err, ws.ErrTopicAlreadyExists) {
+ return err
+ }
+ if err := s.hub.Subscribe(docID, sess); err != nil {
+ return err
+ }
+ s.registerSession(docID, sess)
+ sess.RegisterOnClose(func() error {
+ s.unregisterSession(docID, sess.ID())
+ return s.hub.Unsubscribe(docID, sess)
+ })
+ return nil
+}
+
+func (s *CollaborativeService) run() {
+ ticker := time.NewTicker(s.checkInterval)
+ defer ticker.Stop()
+ for range ticker.C {
+ s.enqueueSnapshotRequests()
+ }
+}
+
+func (s *CollaborativeService) enqueueSnapshotRequests() {
+ now := s.now()
+ type target struct {
+ docID string
+ session *ws.Session
+ }
+
+ targets := make([]target, 0)
+
+ s.mu.Lock()
+ for docID, state := range s.docs {
+ if state == nil {
+ continue
+ }
+ if !state.hasUnsavedChanges {
+ if len(state.sessions) == 0 {
+ delete(s.docs, docID)
+ }
+ continue
+ }
+ if len(state.sessions) == 0 {
+ // No active clients; wait until someone joins again.
+ state.awaitingSnapshot = false
+ continue
+ }
+ if state.lastUpdate.IsZero() {
+ continue
+ }
+
+ // Debounce relative to the latest mutation we have seen.
+ if now.Sub(state.lastUpdate) < s.debounce {
+ continue
+ }
+
+ if state.awaitingSnapshot && now.Sub(state.lastRequest) < s.requestBackoff {
+ continue
+ }
+ state.awaitingSnapshot = false
+
+ var pick *ws.Session
+ for _, session := range state.sessions {
+ pick = session
+ if pick != nil {
+ break
+ }
+ }
+ if pick == nil {
+ continue
+ }
+ state.awaitingSnapshot = true
+ state.lastRequest = now
+ targets = append(targets, target{docID: docID, session: pick})
+ }
+ s.mu.Unlock()
+
+ for _, t := range targets {
+ payload, err := encodeSnapshotRequestMessage(t.docID)
+ if err != nil {
+ continue
+ }
+ if err := t.session.WriteBinaryMessage(payload); err != nil {
+ s.mu.Lock()
+ if state, ok := s.docs[t.docID]; ok {
+ state.awaitingSnapshot = false
+ }
+ s.mu.Unlock()
+ }
+ }
+}
+
+func (s *CollaborativeService) recordDocUpdate(docID string, payload []byte) {
+ if docID == "" {
+ return
+ }
+ now := s.now()
+ s.mu.Lock()
+ state := s.ensureStateLocked(docID)
+ state.lastUpdate = now
+ state.hasUnsavedChanges = true
+ state.awaitingSnapshot = false
+ state.pendingUpdates = append(state.pendingUpdates, docUpdate{at: now, payload: cloneBytes(payload)})
+ if len(state.pendingMeta) > 0 {
+ meta := state.pendingMeta[0]
+ state.pendingMeta = state.pendingMeta[1:]
+ lastIdx := len(state.pendingUpdates) - 1
+ state.pendingUpdates[lastIdx].clientID = meta.clientID
+ state.pendingUpdates[lastIdx].clock = meta.clock
+ state.pendingUpdates[lastIdx].hasMeta = true
+ }
+ s.mu.Unlock()
+}
+
+func (s *CollaborativeService) recordUpdateMeta(docID string, clientID, clock uint64) {
+ if docID == "" {
+ return
+ }
+ s.mu.Lock()
+ defer s.mu.Unlock()
+ state, ok := s.docs[docID]
+ if !ok || state == nil {
+ return
+ }
+ for i := range state.pendingUpdates {
+ if state.pendingUpdates[i].hasMeta {
+ continue
+ }
+ state.pendingUpdates[i].clientID = clientID
+ state.pendingUpdates[i].clock = clock
+ state.pendingUpdates[i].hasMeta = true
+ return
+ }
+ state.pendingMeta = append(state.pendingMeta, updateMeta{clientID: clientID, clock: clock})
+}
+
+func (s *CollaborativeService) registerSession(docID string, sess *ws.Session) {
+ if docID == "" || sess == nil {
+ return
+ }
+ s.mu.Lock()
+ state := s.ensureStateLocked(docID)
+ if state.sessions == nil {
+ state.sessions = make(map[string]*ws.Session)
+ }
+ state.sessions[sess.ID()] = sess
+ s.mu.Unlock()
+}
+
+func (s *CollaborativeService) unregisterSession(docID, sessionID string) {
+ if docID == "" || sessionID == "" {
+ return
+ }
+ s.mu.Lock()
+ defer s.mu.Unlock()
+ if state, ok := s.docs[docID]; ok {
+ delete(state.sessions, sessionID)
+ if len(state.sessions) == 0 && !state.hasUnsavedChanges {
+ delete(s.docs, docID)
+ }
+ }
+}
+
+func (s *CollaborativeService) ensureStateLocked(docID string) *collabDocState {
+ if state, ok := s.docs[docID]; ok && state != nil {
+ return state
+ }
+ state := &collabDocState{
+ sessions: make(map[string]*ws.Session),
+ }
+ s.docs[docID] = state
+ return state
+}
+
+func (s *CollaborativeService) collectPendingUpdates(docID string) [][]byte {
+ s.mu.Lock()
+ defer s.mu.Unlock()
+ state, ok := s.docs[docID]
+ if !ok || state == nil || len(state.pendingUpdates) == 0 {
+ return nil
+ }
+ updates := make([][]byte, 0, len(state.pendingUpdates))
+ for _, update := range state.pendingUpdates {
+ updates = append(updates, cloneBytes(update.payload))
+ }
+ return updates
+}
+
+func cloneBytes(data []byte) []byte {
+ if len(data) == 0 {
+ return nil
+ }
+ dup := make([]byte, len(data))
+ copy(dup, data)
+ return dup
+}
+
+// RecordSnapshotSaved updates in-memory doc bookkeeping after persisting a snapshot.
+func (s *CollaborativeService) RecordSnapshotSaved(docID string, savedAt time.Time, stateVector map[uint64]uint64) {
+ if docID == "" {
+ return
+ }
+ if savedAt.IsZero() {
+ savedAt = s.now()
+ }
+ s.mu.Lock()
+ defer s.mu.Unlock()
+ state := s.ensureStateLocked(docID)
+ state.lastSave = savedAt
+ state.awaitingSnapshot = false
+ state.lastRequest = time.Time{}
+ if len(state.pendingUpdates) == 0 {
+ state.hasUnsavedChanges = false
+ return
+ }
+ cutoff := savedAt.Add(-pendingUpdateMaxAge)
+ filtered := state.pendingUpdates[:0]
+ for _, update := range state.pendingUpdates {
+ snapshotClock, ok := stateVector[update.clientID]
+ if ok && update.hasMeta && snapshotClock >= update.clock {
+ // Precisely covered by snapshot via state vector semantics.
+ continue
+ }
+ if (!update.hasMeta || !ok) && !cutoff.IsZero() && (update.at.Before(cutoff) || update.at.Equal(cutoff)) {
+ // Fallback: legacy or malformed clients that never send metadata can
+ // otherwise cause unbounded growth of pendingUpdates. For such updates,
+ // we apply a conservative time-based window and eventually drop very old
+ // entries to avoid memory leaks.
+ continue
+ }
+ filtered = append(filtered, update)
+ }
+ state.pendingUpdates = nil
+ if len(filtered) > 0 {
+ state.pendingUpdates = filtered
+ }
+ state.hasUnsavedChanges = len(state.pendingUpdates) > 0
+}
diff --git a/pkg/service/init_service.go b/pkg/service/init_service.go
index 0d1de65e..09524510 100644
--- a/pkg/service/init_service.go
+++ b/pkg/service/init_service.go
@@ -6,6 +6,10 @@ import (
"net/http"
"os"
+ "github.com/cloudcarver/anclax/core"
+ anclax_app "github.com/cloudcarver/anclax/pkg/app"
+ "github.com/cloudcarver/anclax/pkg/logger"
+ anclax_svc "github.com/cloudcarver/anclax/pkg/service"
"github.com/go-playground/validator/v10"
"github.com/gofiber/fiber/v2"
"github.com/gofiber/fiber/v2/middleware/filesystem"
@@ -18,11 +22,6 @@ import (
"github.com/risingwavelabs/risingwave-console/pkg/zgen/querier"
"go.uber.org/zap"
"gopkg.in/yaml.v3"
-
- "github.com/cloudcarver/anclax/core"
- anclax_app "github.com/cloudcarver/anclax/pkg/app"
- "github.com/cloudcarver/anclax/pkg/logger"
- anclax_svc "github.com/cloudcarver/anclax/pkg/service"
)
var initLog = logger.NewLogAgent("init")
diff --git a/pkg/service/notebook_service.go b/pkg/service/notebook_service.go
new file mode 100644
index 00000000..a23fb5ec
--- /dev/null
+++ b/pkg/service/notebook_service.go
@@ -0,0 +1,426 @@
+package service
+
+import (
+ "context"
+ "strings"
+ "time"
+
+ "github.com/jackc/pgx/v5"
+ "github.com/jackc/pgx/v5/pgconn"
+ "github.com/oklog/ulid/v2"
+ "github.com/pkg/errors"
+ "github.com/risingwavelabs/risingwave-console/pkg/zgen/apigen"
+ "github.com/risingwavelabs/risingwave-console/pkg/zgen/querier"
+)
+
+func (s *Service) CreateCollabNotebook(ctx context.Context, params apigen.CollabNotebook, orgID int32, userID int32) (*apigen.CollabNotebook, error) {
+ title := strings.TrimSpace(params.Title)
+ if title == "" {
+ return nil, ErrNotebookTitleEmpty
+ }
+
+ notebookID := ulid.Make().String()
+
+ var scope querier.NotebookScope
+ if params.Scope == nil {
+ scope = querier.NotebookScopePersonal
+ } else {
+ switch *params.Scope {
+ case apigen.CollabNotebookScopePersonal:
+ scope = querier.NotebookScopePersonal
+ case apigen.CollabNotebookScopeOrganization:
+ scope = querier.NotebookScopeOrganization
+ default:
+ return nil, ErrNotebookInvalidScope
+ }
+ }
+
+ var ownerUserID *int32
+ if scope == querier.NotebookScopePersonal {
+ ownerCandidate := userID
+ if params.OwnerUserID != nil && *params.OwnerUserID != 0 {
+ ownerCandidate = *params.OwnerUserID
+ }
+
+ if ownerCandidate == 0 {
+ return nil, ErrNotebookInvalidOwner
+ }
+
+ ownerOrg, err := s.m.GetOrgSettings(ctx, ownerCandidate)
+ if err != nil {
+ if errors.Is(err, pgx.ErrNoRows) {
+ return nil, ErrNotebookInvalidOwner
+ }
+ return nil, errors.Wrap(err, "failed to lookup target owner organization")
+ }
+ if ownerOrg.OrgID != orgID {
+ return nil, ErrNotebookInvalidOwner
+ }
+
+ owner := ownerCandidate
+ ownerUserID = &owner
+ }
+
+ var createdBy *int32
+ if userID != 0 {
+ creator := userID
+ createdBy = &creator
+ }
+
+ created, err := s.m.CreateNotebook(ctx, querier.CreateNotebookParams{
+ ID: notebookID,
+ Scope: scope,
+ OrgID: orgID,
+ OwnerUserID: ownerUserID,
+ Title: title,
+ CreatedBy: createdBy,
+ })
+ if err != nil {
+ var pgErr *pgconn.PgError
+ if errors.As(err, &pgErr) && pgErr.Code == "23505" {
+ return nil, ErrNotebookAlreadyExists
+ }
+ return nil, errors.Wrap(err, "failed to create collaborative notebook")
+ }
+
+ return mapNotebookToAPI(
+ created.ID,
+ created.Title,
+ created.Scope,
+ created.OrganizationID,
+ created.OwnerUserID,
+ created.CreatedBy,
+ created.CreatedAt,
+ created.UpdatedAt,
+ ), nil
+}
+
+func (s *Service) ListCollabNotebooks(ctx context.Context, orgID int32, userID int32) ([]*apigen.CollabNotebook, error) {
+ var ownerUserID *int32
+ if userID != 0 {
+ ownerUserID = &userID
+ }
+
+ notebooks, err := s.m.ListAccessibleNotebooks(ctx, querier.ListAccessibleNotebooksParams{
+ OrgID: orgID,
+ OwnerUserID: ownerUserID,
+ })
+ if err != nil {
+ return nil, errors.Wrap(err, "failed to list collaborative notebooks")
+ }
+
+ result := make([]*apigen.CollabNotebook, 0, len(notebooks))
+ for _, nb := range notebooks {
+ result = append(result, mapNotebookToAPI(
+ nb.ID,
+ nb.Title,
+ nb.Scope,
+ nb.OrganizationID,
+ nb.OwnerUserID,
+ nb.CreatedBy,
+ nb.CreatedAt,
+ nb.UpdatedAt,
+ ))
+ }
+ return result, nil
+}
+
+func (s *Service) GetCollabNotebook(ctx context.Context, notebookID string, orgID int32, userID int32) (*apigen.CollabNotebook, error) {
+ nb, err := s.m.GetNotebook(ctx, notebookID)
+ if err != nil {
+ if errors.Is(err, pgx.ErrNoRows) {
+ return nil, ErrNotebookNotFound
+ }
+ return nil, errors.Wrap(err, "failed to get collaborative notebook")
+ }
+
+ if nb.OrganizationID != orgID {
+ return nil, ErrNotebookNotFound
+ }
+
+ if nb.Scope == querier.NotebookScopePersonal {
+ if nb.OwnerUserID == nil || userID == 0 || *nb.OwnerUserID != userID {
+ return nil, ErrNotebookNotFound
+ }
+ }
+
+ return mapNotebookToAPI(
+ nb.ID,
+ nb.Title,
+ nb.Scope,
+ nb.OrganizationID,
+ nb.OwnerUserID,
+ nb.CreatedBy,
+ nb.CreatedAt,
+ nb.UpdatedAt,
+ ), nil
+}
+
+func (s *Service) UpsertCollabDocSnapshot(ctx context.Context, notebookID string, snapshot []byte, stateVector map[uint64]uint64) error {
+ if strings.TrimSpace(notebookID) == "" {
+ return ErrNotebookIDEmpty
+ }
+ if len(snapshot) == 0 {
+ return ErrSnapshotPayloadEmpty
+ }
+
+ if err := s.m.UpsertCollabDocSnapshot(ctx, querier.UpsertCollabDocSnapshotParams{
+ NotebookID: notebookID,
+ Snapshot: snapshot,
+ }); err != nil {
+ return errors.Wrap(err, "failed to upsert collab doc snapshot")
+ }
+ if s.collab != nil {
+ s.collab.RecordSnapshotSaved(notebookID, s.now(), stateVector)
+ }
+ return nil
+}
+
+func (s *Service) UpdateCollabNotebook(ctx context.Context, notebookID string, title string, orgID int32, userID int32) (*apigen.CollabNotebook, error) {
+ if strings.TrimSpace(notebookID) == "" {
+ return nil, ErrNotebookIDEmpty
+ }
+
+ nb, err := s.m.GetNotebook(ctx, notebookID)
+ if err != nil {
+ if errors.Is(err, pgx.ErrNoRows) {
+ return nil, ErrNotebookNotFound
+ }
+ return nil, errors.Wrap(err, "failed to get collaborative notebook")
+ }
+
+ if nb.OrganizationID != orgID {
+ return nil, ErrNotebookNotFound
+ }
+
+ if nb.Scope == querier.NotebookScopePersonal {
+ if nb.OwnerUserID == nil || userID == 0 || *nb.OwnerUserID != userID {
+ return nil, ErrNotebookNotFound
+ }
+ }
+
+ if strings.TrimSpace(title) == "" {
+ return nil, ErrNotebookTitleEmpty
+ }
+
+ updated, err := s.m.UpdateNotebookTitle(ctx, querier.UpdateNotebookTitleParams{
+ ID: notebookID,
+ Title: title,
+ })
+ if err != nil {
+ return nil, errors.Wrap(err, "failed to update collaborative notebook")
+ }
+
+ return mapNotebookToAPI(
+ updated.ID,
+ updated.Title,
+ updated.Scope,
+ updated.OrganizationID,
+ updated.OwnerUserID,
+ updated.CreatedBy,
+ updated.CreatedAt,
+ updated.UpdatedAt,
+ ), nil
+}
+
+func (s *Service) TransferCollabNotebook(ctx context.Context, notebookID string, payload apigen.CollabNotebookTransferRequest, orgID int32, userID int32) (*apigen.CollabNotebook, error) {
+ if strings.TrimSpace(notebookID) == "" {
+ return nil, ErrNotebookIDEmpty
+ }
+ if userID == 0 {
+ return nil, ErrNotebookTransferNotAllowed
+ }
+
+ nb, err := s.m.GetNotebook(ctx, notebookID)
+ if err != nil {
+ if errors.Is(err, pgx.ErrNoRows) {
+ return nil, ErrNotebookNotFound
+ }
+ return nil, errors.Wrap(err, "failed to get collaborative notebook")
+ }
+
+ if nb.OrganizationID != orgID {
+ return nil, ErrNotebookNotFound
+ }
+
+ var targetScope querier.NotebookScope
+ switch payload.TargetScope {
+ case apigen.CollabNotebookTransferRequestTargetScope(apigen.CollabNotebookScopePersonal):
+ targetScope = querier.NotebookScopePersonal
+ case apigen.CollabNotebookTransferRequestTargetScope(apigen.CollabNotebookScopeOrganization):
+ targetScope = querier.NotebookScopeOrganization
+ default:
+ return nil, ErrNotebookInvalidScope
+ }
+
+ if targetScope == nb.Scope {
+ return mapNotebookToAPI(
+ nb.ID,
+ nb.Title,
+ nb.Scope,
+ nb.OrganizationID,
+ nb.OwnerUserID,
+ nb.CreatedBy,
+ nb.CreatedAt,
+ nb.UpdatedAt,
+ ), nil
+ }
+
+ isOrgOwner := false
+ if userID != 0 {
+ isOrgOwner, err = s.m.IsOrgOwner(ctx, querier.IsOrgOwnerParams{UserID: userID, OrgID: orgID})
+ if err != nil {
+ return nil, errors.Wrap(err, "failed to check organization owner")
+ }
+ }
+
+ switch targetScope {
+ case querier.NotebookScopeOrganization:
+ if nb.Scope != querier.NotebookScopePersonal {
+ return nil, ErrNotebookTransferNotAllowed
+ }
+
+ ownsNotebook := nb.OwnerUserID != nil && *nb.OwnerUserID == userID
+ if !ownsNotebook && !isOrgOwner {
+ return nil, ErrNotebookTransferNotAllowed
+ }
+
+ updated, err := s.m.UpdateNotebookScope(ctx, querier.UpdateNotebookScopeParams{
+ ID: notebookID,
+ Scope: querier.NotebookScopeOrganization,
+ OwnerUserID: nil,
+ })
+ if err != nil {
+ return nil, errors.Wrap(err, "failed to update notebook scope")
+ }
+
+ return mapNotebookToAPI(
+ updated.ID,
+ updated.Title,
+ updated.Scope,
+ updated.OrganizationID,
+ updated.OwnerUserID,
+ updated.CreatedBy,
+ updated.CreatedAt,
+ updated.UpdatedAt,
+ ), nil
+
+ case querier.NotebookScopePersonal:
+ if nb.Scope != querier.NotebookScopeOrganization {
+ return nil, ErrNotebookTransferNotAllowed
+ }
+
+ var ownerCandidate int32
+ if payload.OwnerUserID != nil {
+ ownerCandidate = *payload.OwnerUserID
+ } else {
+ ownerCandidate = userID
+ }
+
+ if ownerCandidate == 0 {
+ return nil, ErrNotebookInvalidOwner
+ }
+
+ // Ensure the target owner belongs to the organization
+ ownerOrgID, err := s.m.GetUserOrganization(ctx, ownerCandidate)
+ if err != nil {
+ if errors.Is(err, pgx.ErrNoRows) {
+ return nil, ErrNotebookInvalidOwner
+ }
+ return nil, errors.Wrap(err, "failed to lookup target owner organization")
+ }
+ if ownerOrgID != orgID {
+ return nil, ErrNotebookInvalidOwner
+ }
+
+ isCreator := nb.CreatedBy != nil && *nb.CreatedBy == userID
+ if !isCreator && !isOrgOwner {
+ return nil, ErrNotebookTransferNotAllowed
+ }
+
+ ownerPtr := ownerCandidate
+ updated, err := s.m.UpdateNotebookScope(ctx, querier.UpdateNotebookScopeParams{
+ ID: notebookID,
+ Scope: querier.NotebookScopePersonal,
+ OwnerUserID: &ownerPtr,
+ })
+ if err != nil {
+ return nil, errors.Wrap(err, "failed to update notebook scope")
+ }
+
+ return mapNotebookToAPI(
+ updated.ID,
+ updated.Title,
+ updated.Scope,
+ updated.OrganizationID,
+ updated.OwnerUserID,
+ updated.CreatedBy,
+ updated.CreatedAt,
+ updated.UpdatedAt,
+ ), nil
+ }
+
+ return nil, ErrNotebookInvalidScope
+}
+func (s *Service) DeleteCollabNotebook(ctx context.Context, notebookID string, orgID int32, userID int32) error {
+ if strings.TrimSpace(notebookID) == "" {
+ return ErrNotebookIDEmpty
+ }
+
+ nb, err := s.m.GetNotebook(ctx, notebookID)
+ if err != nil {
+ if errors.Is(err, pgx.ErrNoRows) {
+ return ErrNotebookNotFound
+ }
+ return errors.Wrap(err, "failed to get collaborative notebook")
+ }
+
+ if nb.OrganizationID != orgID {
+ return ErrNotebookNotFound
+ }
+
+ isOrgOwner := false
+ if userID != 0 {
+ isOrgOwner, err = s.m.IsOrgOwner(ctx, querier.IsOrgOwnerParams{UserID: userID, OrgID: orgID})
+ if err != nil {
+ return errors.Wrap(err, "failed to check organization owner")
+ }
+ }
+
+ switch nb.Scope {
+ case querier.NotebookScopePersonal:
+ ownsNotebook := nb.OwnerUserID != nil && userID != 0 && *nb.OwnerUserID == userID
+ if !ownsNotebook && !isOrgOwner {
+ return ErrNotebookDeleteNotAllowed
+ }
+ case querier.NotebookScopeOrganization:
+ isCreator := nb.CreatedBy != nil && userID != 0 && *nb.CreatedBy == userID
+ if !isCreator && !isOrgOwner {
+ return ErrNotebookDeleteNotAllowed
+ }
+ default:
+ return ErrNotebookInvalidScope
+ }
+
+ if err := s.m.DeleteNotebook(ctx, notebookID); err != nil {
+ return errors.Wrap(err, "failed to delete collaborative notebook")
+ }
+
+ return nil
+}
+
+func mapNotebookToAPI(id string, title string, scope querier.NotebookScope, orgID int32, ownerUserID *int32, createdBy *int32, createdAt time.Time, updatedAt time.Time) *apigen.CollabNotebook {
+ scopeVal := apigen.CollabNotebookScope(scope)
+ orgIDCopy := orgID
+
+ return &apigen.CollabNotebook{
+ NotebookID: id,
+ Title: title,
+ Scope: &scopeVal,
+ OrganizationID: &orgIDCopy,
+ OwnerUserID: ownerUserID,
+ CreatedByUserID: createdBy,
+ CreatedAt: createdAt,
+ UpdatedAt: updatedAt,
+ }
+}
diff --git a/pkg/service/service.go b/pkg/service/service.go
index d6708837..6a6a024c 100644
--- a/pkg/service/service.go
+++ b/pkg/service/service.go
@@ -40,6 +40,15 @@ var (
ErrClusterNotFound = errors.New("cluster not found")
ErrClusterHasDatabaseConnections = errors.New("cluster has database connections")
ErrDiagnosticNotFound = errors.New("diagnostic not found")
+ ErrNotebookNotFound = errors.New("notebook not found")
+ ErrNotebookIDEmpty = errors.New("notebook id must not be empty")
+ ErrNotebookTitleEmpty = errors.New("notebook title must not be empty")
+ ErrSnapshotPayloadEmpty = errors.New("snapshot payload is empty")
+ ErrNotebookInvalidScope = errors.New("invalid notebook scope")
+ ErrNotebookTransferNotAllowed = errors.New("notebook transfer not allowed")
+ ErrNotebookInvalidOwner = errors.New("invalid notebook owner")
+ ErrNotebookAlreadyExists = errors.New("notebook already exists")
+ ErrNotebookDeleteNotAllowed = errors.New("notebook delete not allowed")
)
const (
@@ -152,6 +161,26 @@ type ServiceInterface interface {
// ListClustersByMetricsStoreID lists all clusters by metrics store ID
ListClustersByMetricsStoreID(ctx context.Context, id int32) ([]*apigen.Cluster, error)
+ // CreateCollabNotebook creates a new collaborative notebook
+ CreateCollabNotebook(ctx context.Context, params apigen.CollabNotebook, orgID int32, userID int32) (*apigen.CollabNotebook, error)
+
+ // ListCollabNotebooks lists all collaborative notebooks for a user in an organization
+ ListCollabNotebooks(ctx context.Context, orgID int32, userID int32) ([]*apigen.CollabNotebook, error)
+
+ // GetCollabNotebook gets a collaborative notebook by its ID
+ GetCollabNotebook(ctx context.Context, notebookID string, orgID int32, userID int32) (*apigen.CollabNotebook, error)
+
+ // UpdateCollabNotebook updates a collaborative notebook's title
+ UpdateCollabNotebook(ctx context.Context, notebookID string, title string, orgID int32, userID int32) (*apigen.CollabNotebook, error)
+
+ // TransferCollabNotebook transfers ownership of a collaborative notebook to another user
+ TransferCollabNotebook(ctx context.Context, notebookID string, params apigen.CollabNotebookTransferRequest, orgID int32, userID int32) (*apigen.CollabNotebook, error)
+
+ // DeleteCollabNotebook deletes a collaborative notebook
+ DeleteCollabNotebook(ctx context.Context, notebookID string, orgID int32, userID int32) error
+
+ // UpsertCollabDocSnapshot persists the latest collaborative notebook snapshot
+ UpsertCollabDocSnapshot(ctx context.Context, notebookID string, snapshot []byte, stateVector map[uint64]uint64) error
//
PromDump(ctx context.Context, w *bufio.Writer, cfg *promdump.DumpMultipartCfg)
}
@@ -166,6 +195,7 @@ type Service struct {
taskRunner taskgen.TaskRunner
taskstore taskcore.TaskStoreInterface
anclaxSvc anclax_svc.ServiceInterface
+ collab CollaborativeSnapshotRecorder
now func() time.Time
generateHashAndSalt func(password string) (string, string, error)
@@ -182,6 +212,7 @@ func NewService(
taskRunner taskgen.TaskRunner,
taskstore taskcore.TaskStoreInterface,
anclaxSvc anclax_svc.ServiceInterface,
+ collab CollaborativeSnapshotRecorder,
) (ServiceInterface, error) {
s := &Service{
m: m,
@@ -195,6 +226,7 @@ func NewService(
taskRunner: taskRunner,
taskstore: taskstore,
anclaxSvc: anclaxSvc,
+ collab: collab,
}
return s, nil
}
diff --git a/pkg/service/service_mock_gen.go b/pkg/service/service_mock_gen.go
index c4a4c76b..fdc75806 100644
--- a/pkg/service/service_mock_gen.go
+++ b/pkg/service/service_mock_gen.go
@@ -88,6 +88,21 @@ func (mr *MockServiceInterfaceMockRecorder) CreateClusterSnapshot(ctx, id, name,
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateClusterSnapshot", reflect.TypeOf((*MockServiceInterface)(nil).CreateClusterSnapshot), ctx, id, name, orgID)
}
+// CreateCollabNotebook mocks base method.
+func (m *MockServiceInterface) CreateCollabNotebook(ctx context.Context, params apigen.CollabNotebook, orgID, userID int32) (*apigen.CollabNotebook, error) {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "CreateCollabNotebook", ctx, params, orgID, userID)
+ ret0, _ := ret[0].(*apigen.CollabNotebook)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// CreateCollabNotebook indicates an expected call of CreateCollabNotebook.
+func (mr *MockServiceInterfaceMockRecorder) CreateCollabNotebook(ctx, params, orgID, userID any) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateCollabNotebook", reflect.TypeOf((*MockServiceInterface)(nil).CreateCollabNotebook), ctx, params, orgID, userID)
+}
+
// DeleteCluster mocks base method.
func (m *MockServiceInterface) DeleteCluster(ctx context.Context, id int32, cascade bool, orgID int32) error {
m.ctrl.T.Helper()
@@ -116,6 +131,20 @@ func (mr *MockServiceInterfaceMockRecorder) DeleteClusterSnapshot(ctx, id, snaps
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteClusterSnapshot", reflect.TypeOf((*MockServiceInterface)(nil).DeleteClusterSnapshot), ctx, id, snapshotID, orgID)
}
+// DeleteCollabNotebook mocks base method.
+func (m *MockServiceInterface) DeleteCollabNotebook(ctx context.Context, notebookID string, orgID, userID int32) error {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "DeleteCollabNotebook", ctx, notebookID, orgID, userID)
+ ret0, _ := ret[0].(error)
+ return ret0
+}
+
+// DeleteCollabNotebook indicates an expected call of DeleteCollabNotebook.
+func (mr *MockServiceInterfaceMockRecorder) DeleteCollabNotebook(ctx, notebookID, orgID, userID any) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteCollabNotebook", reflect.TypeOf((*MockServiceInterface)(nil).DeleteCollabNotebook), ctx, notebookID, orgID, userID)
+}
+
// DeleteDatabase mocks base method.
func (m *MockServiceInterface) DeleteDatabase(ctx context.Context, id, orgID int32) error {
m.ctrl.T.Helper()
@@ -204,6 +233,21 @@ func (mr *MockServiceInterfaceMockRecorder) GetClusterDiagnostic(ctx, id, diagno
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetClusterDiagnostic", reflect.TypeOf((*MockServiceInterface)(nil).GetClusterDiagnostic), ctx, id, diagnosticID, orgID)
}
+// GetCollabNotebook mocks base method.
+func (m *MockServiceInterface) GetCollabNotebook(ctx context.Context, notebookID string, orgID, userID int32) (*apigen.CollabNotebook, error) {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "GetCollabNotebook", ctx, notebookID, orgID, userID)
+ ret0, _ := ret[0].(*apigen.CollabNotebook)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// GetCollabNotebook indicates an expected call of GetCollabNotebook.
+func (mr *MockServiceInterfaceMockRecorder) GetCollabNotebook(ctx, notebookID, orgID, userID any) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetCollabNotebook", reflect.TypeOf((*MockServiceInterface)(nil).GetCollabNotebook), ctx, notebookID, orgID, userID)
+}
+
// GetDDLProgress mocks base method.
func (m *MockServiceInterface) GetDDLProgress(ctx context.Context, id, orgID int32) ([]apigen.DDLProgress, error) {
m.ctrl.T.Helper()
@@ -384,6 +428,21 @@ func (mr *MockServiceInterfaceMockRecorder) ListClustersByMetricsStoreID(ctx, id
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListClustersByMetricsStoreID", reflect.TypeOf((*MockServiceInterface)(nil).ListClustersByMetricsStoreID), ctx, id)
}
+// ListCollabNotebooks mocks base method.
+func (m *MockServiceInterface) ListCollabNotebooks(ctx context.Context, orgID, userID int32) ([]*apigen.CollabNotebook, error) {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "ListCollabNotebooks", ctx, orgID, userID)
+ ret0, _ := ret[0].([]*apigen.CollabNotebook)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// ListCollabNotebooks indicates an expected call of ListCollabNotebooks.
+func (mr *MockServiceInterfaceMockRecorder) ListCollabNotebooks(ctx, orgID, userID any) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListCollabNotebooks", reflect.TypeOf((*MockServiceInterface)(nil).ListCollabNotebooks), ctx, orgID, userID)
+}
+
// ListDatabases mocks base method.
func (m *MockServiceInterface) ListDatabases(ctx context.Context, orgID int32) ([]apigen.Database, error) {
m.ctrl.T.Helper()
@@ -486,6 +545,21 @@ func (mr *MockServiceInterfaceMockRecorder) TestDatabaseConnection(ctx, params,
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "TestDatabaseConnection", reflect.TypeOf((*MockServiceInterface)(nil).TestDatabaseConnection), ctx, params, orgID)
}
+// TransferCollabNotebook mocks base method.
+func (m *MockServiceInterface) TransferCollabNotebook(ctx context.Context, notebookID string, params apigen.CollabNotebookTransferRequest, orgID, userID int32) (*apigen.CollabNotebook, error) {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "TransferCollabNotebook", ctx, notebookID, params, orgID, userID)
+ ret0, _ := ret[0].(*apigen.CollabNotebook)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// TransferCollabNotebook indicates an expected call of TransferCollabNotebook.
+func (mr *MockServiceInterfaceMockRecorder) TransferCollabNotebook(ctx, notebookID, params, orgID, userID any) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "TransferCollabNotebook", reflect.TypeOf((*MockServiceInterface)(nil).TransferCollabNotebook), ctx, notebookID, params, orgID, userID)
+}
+
// UpdateCluster mocks base method.
func (m *MockServiceInterface) UpdateCluster(ctx context.Context, id int32, params apigen.ClusterImport, orgID int32) (*apigen.Cluster, error) {
m.ctrl.T.Helper()
@@ -529,6 +603,21 @@ func (mr *MockServiceInterfaceMockRecorder) UpdateClusterAutoDiagnosticConfig(ct
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateClusterAutoDiagnosticConfig", reflect.TypeOf((*MockServiceInterface)(nil).UpdateClusterAutoDiagnosticConfig), ctx, id, params, orgID)
}
+// UpdateCollabNotebook mocks base method.
+func (m *MockServiceInterface) UpdateCollabNotebook(ctx context.Context, notebookID, title string, orgID, userID int32) (*apigen.CollabNotebook, error) {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "UpdateCollabNotebook", ctx, notebookID, title, orgID, userID)
+ ret0, _ := ret[0].(*apigen.CollabNotebook)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// UpdateCollabNotebook indicates an expected call of UpdateCollabNotebook.
+func (mr *MockServiceInterfaceMockRecorder) UpdateCollabNotebook(ctx, notebookID, title, orgID, userID any) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateCollabNotebook", reflect.TypeOf((*MockServiceInterface)(nil).UpdateCollabNotebook), ctx, notebookID, title, orgID, userID)
+}
+
// UpdateDatabase mocks base method.
func (m *MockServiceInterface) UpdateDatabase(ctx context.Context, id int32, params apigen.DatabaseConnectInfo, orgID int32) (*apigen.Database, error) {
m.ctrl.T.Helper()
@@ -558,3 +647,17 @@ func (mr *MockServiceInterfaceMockRecorder) UpdateMetricsStore(ctx, id, req, Org
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateMetricsStore", reflect.TypeOf((*MockServiceInterface)(nil).UpdateMetricsStore), ctx, id, req, OrgID)
}
+
+// UpsertCollabDocSnapshot mocks base method.
+func (m *MockServiceInterface) UpsertCollabDocSnapshot(ctx context.Context, notebookID string, snapshot []byte, stateVector map[uint64]uint64) error {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "UpsertCollabDocSnapshot", ctx, notebookID, snapshot, stateVector)
+ ret0, _ := ret[0].(error)
+ return ret0
+}
+
+// UpsertCollabDocSnapshot indicates an expected call of UpsertCollabDocSnapshot.
+func (mr *MockServiceInterfaceMockRecorder) UpsertCollabDocSnapshot(ctx, notebookID, snapshot, stateVector any) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpsertCollabDocSnapshot", reflect.TypeOf((*MockServiceInterface)(nil).UpsertCollabDocSnapshot), ctx, notebookID, snapshot, stateVector)
+}
diff --git a/pkg/trd/lib0/binary.go b/pkg/trd/lib0/binary.go
new file mode 100644
index 00000000..688e0424
--- /dev/null
+++ b/pkg/trd/lib0/binary.go
@@ -0,0 +1,74 @@
+package lib0
+
+/**
+ * Bit constants
+ * @reference https://github.com/dmonad/lib0/blob/cc6612569e17a1ec37ca76dba8593aa3d9cc1753/binary.js
+ */
+const (
+ Bit1 uint = 1 << 0
+ Bit2 uint = 1 << 1
+ Bit3 uint = 1 << 2
+ Bit4 uint = 1 << 3
+ Bit5 uint = 1 << 4
+ Bit6 uint = 1 << 5
+ Bit7 uint = 1 << 6
+ Bit8 uint = 1 << 7
+ Bit9 uint = 1 << 8
+ Bit10 uint = 1 << 9
+ Bit11 uint = 1 << 10
+ Bit12 uint = 1 << 11
+ Bit13 uint = 1 << 12
+ Bit14 uint = 1 << 13
+ Bit15 uint = 1 << 14
+ Bit16 uint = 1 << 15
+ Bit17 uint = 1 << 16
+ Bit18 uint = 1 << 17
+ Bit19 uint = 1 << 18
+ Bit20 uint = 1 << 19
+ Bit21 uint = 1 << 20
+ Bit22 uint = 1 << 21
+ Bit23 uint = 1 << 22
+ Bit24 uint = 1 << 23
+ Bit25 uint = 1 << 24
+ Bit26 uint = 1 << 25
+ Bit27 uint = 1 << 26
+ Bit28 uint = 1 << 27
+ Bit29 uint = 1 << 28
+ Bit30 uint = 1 << 29
+ Bit31 uint = 1 << 30
+ Bit32 int = 1 << 31
+
+ Bits0 uint = (1 << 0) - 1
+ Bits1 uint = (1 << 1) - 1
+ Bits2 uint = (1 << 2) - 1
+ Bits3 uint = (1 << 3) - 1
+ Bits4 uint = (1 << 4) - 1
+ Bits5 uint = (1 << 5) - 1
+ Bits6 uint = (1 << 6) - 1
+ Bits7 uint = (1 << 7) - 1
+ Bits8 uint = (1 << 8) - 1
+ Bits9 uint = (1 << 9) - 1
+ Bits10 uint = (1 << 10) - 1
+ Bits11 uint = (1 << 11) - 1
+ Bits12 uint = (1 << 12) - 1
+ Bits13 uint = (1 << 13) - 1
+ Bits14 uint = (1 << 14) - 1
+ Bits15 uint = (1 << 15) - 1
+ Bits16 uint = (1 << 16) - 1
+ Bits17 uint = (1 << 17) - 1
+ Bits18 uint = (1 << 18) - 1
+ Bits19 uint = (1 << 19) - 1
+ Bits20 uint = (1 << 20) - 1
+ Bits21 uint = (1 << 21) - 1
+ Bits22 uint = (1 << 22) - 1
+ Bits23 uint = (1 << 23) - 1
+ Bits24 uint = (1 << 24) - 1
+ Bits25 uint = (1 << 25) - 1
+ Bits26 uint = (1 << 26) - 1
+ Bits27 uint = (1 << 27) - 1
+ Bits28 uint = (1 << 28) - 1
+ Bits29 uint = (1 << 29) - 1
+ Bits30 uint = (1 << 30) - 1
+ Bits31 uint = 0x7FFFFFFF
+ Bits32 uint = 0xFFFFFFFF
+)
diff --git a/pkg/trd/lib0/reader.go b/pkg/trd/lib0/reader.go
new file mode 100644
index 00000000..9c3e569f
--- /dev/null
+++ b/pkg/trd/lib0/reader.go
@@ -0,0 +1,47 @@
+package lib0
+
+import (
+ "bufio"
+ "bytes"
+ "encoding/binary"
+ "io"
+)
+
+// @reference https://github.com/dmonad/lib0/blob/cc6612569e17a1ec37ca76dba8593aa3d9cc1753/decoding.js
+
+// ReadVarUint reads a variable-length unsigned integer using Go's native
+// Uvarint format (LEB128). This matches JS lib0's varuint layout.
+func ReadVarUint(r io.ByteReader) (uint64, error) {
+ return binary.ReadUvarint(r)
+}
+
+// ReadVarUintFrom is a convenience wrapper that accepts any io.Reader.
+// It upgrades to a buffered ByteReader when needed.
+func ReadVarUintFrom(r io.Reader) (uint64, error) {
+ if br, ok := r.(io.ByteReader); ok {
+ return ReadVarUint(br)
+ }
+ return ReadVarUint(bufio.NewReader(r))
+}
+
+// DecodeYjsStateVector parses Yjs state vector encoding (varuint count followed by clientID/clock pairs).
+func DecodeYjsStateVector(payload []byte) (map[uint64]uint64, error) {
+ r := bytes.NewReader(payload)
+ count, err := ReadVarUintFrom(r)
+ if err != nil {
+ return nil, err
+ }
+ result := make(map[uint64]uint64, int(count))
+ for i := uint64(0); i < count; i++ {
+ clientID, err := ReadVarUintFrom(r)
+ if err != nil {
+ return nil, err
+ }
+ clock, err := ReadVarUintFrom(r)
+ if err != nil {
+ return nil, err
+ }
+ result[clientID] = clock
+ }
+ return result, nil
+}
diff --git a/pkg/trd/lib0/writer.go b/pkg/trd/lib0/writer.go
new file mode 100644
index 00000000..f5d19cf6
--- /dev/null
+++ b/pkg/trd/lib0/writer.go
@@ -0,0 +1,75 @@
+package lib0
+
+import (
+ "encoding/binary"
+ "io"
+)
+
+// @reference https://github.com/dmonad/lib0/blob/cc6612569e17a1ec37ca76dba8593aa3d9cc1753/encoding.js
+
+// WriteUint16 writes v as little-endian uint16 using a small stack buffer.
+func WriteUint16(w io.Writer, v uint16) error {
+ var buf [2]byte
+ binary.LittleEndian.PutUint16(buf[:], v)
+ _, err := w.Write(buf[:])
+ return err
+}
+
+// WriteUint8 writes v as a single byte.
+// Uses io.ByteWriter fast-path where available.
+func WriteUint8(w io.Writer, v uint8) error {
+ if bw, ok := w.(interface{ WriteByte(byte) error }); ok {
+ return bw.WriteByte(v)
+ }
+ var buf [1]byte
+ buf[0] = v
+ _, err := w.Write(buf[:])
+ return err
+}
+
+// WriteUint32 writes v as little-endian uint32 using a small stack buffer.
+func WriteUint32(w io.Writer, v uint32) error {
+ var buf [4]byte
+ binary.LittleEndian.PutUint32(buf[:], v)
+ _, err := w.Write(buf[:])
+ return err
+}
+
+// [sync with JS]
+// WriteVarUint writes v as a variable-length unsigned integer (LEB128/Uvarint).
+// Encodes 7 bits per byte with MSB as continuation bit. Matches JS lib0 varuint
+// and encoding/binary.Uvarint. Prefer this for wire-compat with JS.
+func WriteVarUint(w io.Writer, v uint64) error {
+ var buf [10]byte
+ n := binary.PutUvarint(buf[:], v)
+ _, err := w.Write(buf[:n])
+ return err
+}
+
+// In Go, callers usually compose WriteVarUint + Write(b) directly for slices.
+
+// [sync with JS]
+// WriteVarString writes a variable-length string using JS lib0's layout:
+// varuint byte-length prefix followed by UTF-8 bytes. In Go, strings are
+// UTF-8 already; io.WriteString avoids extra []byte allocation for writers
+// like *bufio.Writer. Prefer this for wire-compat with JS.
+func WriteVarString(w io.Writer, s string) error {
+ if err := WriteVarUint(w, uint64(len(s))); err != nil {
+ return err
+ }
+ if len(s) == 0 {
+ return nil
+ }
+ _, err := io.WriteString(w, s)
+ return err
+}
+
+// WriteVarInt writes a signed varint using Go's native encoding
+// (encoding/binary.PutVarint). Prefer this Go scheme unless specific
+// cross-language compatibility is required.
+func WriteVarInt(w io.Writer, v int64) error {
+ var buf [10]byte
+ n := binary.PutVarint(buf[:], v)
+ _, err := w.Write(buf[:n])
+ return err
+}
diff --git a/pkg/zcore/injection/injection.go b/pkg/zcore/injection/injection.go
index 9fc4b17d..8636f661 100644
--- a/pkg/zcore/injection/injection.go
+++ b/pkg/zcore/injection/injection.go
@@ -8,6 +8,7 @@ import (
"github.com/cloudcarver/anclax/pkg/hooks"
"github.com/cloudcarver/anclax/pkg/service"
"github.com/cloudcarver/anclax/pkg/taskcore"
+ "github.com/cloudcarver/anclax/pkg/ws"
)
func InjectAuth(anclaxApp *anclax_app.Application) auth.AuthInterface {
@@ -26,6 +27,10 @@ func InjectAnclaxHooks(anclaxApp *anclax_app.Application) hooks.AnclaxHookInterf
return anclaxApp.GetHooks()
}
+func InjectHub(anclaxApp *anclax_app.Application) *ws.Hub {
+ return anclaxApp.GetServer().Websocket().Hub()
+}
+
func InjectCloserManager(anclaxApp *anclax_app.Application) *closer.CloserManager {
return anclaxApp.GetCloserManager()
}
diff --git a/pkg/zcore/model/mock_gen.go b/pkg/zcore/model/mock_gen.go
index 51425c91..7d530db2 100644
--- a/pkg/zcore/model/mock_gen.go
+++ b/pkg/zcore/model/mock_gen.go
@@ -144,6 +144,21 @@ func (mr *MockModelInterfaceMockRecorder) CreateMetricsStore(ctx, arg any) *gomo
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateMetricsStore", reflect.TypeOf((*MockModelInterface)(nil).CreateMetricsStore), ctx, arg)
}
+// CreateNotebook mocks base method.
+func (m *MockModelInterface) CreateNotebook(ctx context.Context, arg querier.CreateNotebookParams) (*querier.CreateNotebookRow, error) {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "CreateNotebook", ctx, arg)
+ ret0, _ := ret[0].(*querier.CreateNotebookRow)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// CreateNotebook indicates an expected call of CreateNotebook.
+func (mr *MockModelInterfaceMockRecorder) CreateNotebook(ctx, arg any) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateNotebook", reflect.TypeOf((*MockModelInterface)(nil).CreateNotebook), ctx, arg)
+}
+
// CreateOrgSettings mocks base method.
func (m *MockModelInterface) CreateOrgSettings(ctx context.Context, arg querier.CreateOrgSettingsParams) error {
m.ctrl.T.Helper()
@@ -214,6 +229,20 @@ func (mr *MockModelInterfaceMockRecorder) DeleteMetricsStore(ctx, arg any) *gomo
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteMetricsStore", reflect.TypeOf((*MockModelInterface)(nil).DeleteMetricsStore), ctx, arg)
}
+// DeleteNotebook mocks base method.
+func (m *MockModelInterface) DeleteNotebook(ctx context.Context, id string) error {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "DeleteNotebook", ctx, id)
+ ret0, _ := ret[0].(error)
+ return ret0
+}
+
+// DeleteNotebook indicates an expected call of DeleteNotebook.
+func (mr *MockModelInterfaceMockRecorder) DeleteNotebook(ctx, id any) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteNotebook", reflect.TypeOf((*MockModelInterface)(nil).DeleteNotebook), ctx, id)
+}
+
// DeleteOrgCluster mocks base method.
func (m *MockModelInterface) DeleteOrgCluster(ctx context.Context, arg querier.DeleteOrgClusterParams) error {
m.ctrl.T.Helper()
@@ -317,6 +346,21 @@ func (mr *MockModelInterfaceMockRecorder) GetClusterDiagnostic(ctx, id any) *gom
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetClusterDiagnostic", reflect.TypeOf((*MockModelInterface)(nil).GetClusterDiagnostic), ctx, id)
}
+// GetCollabDocSnapshot mocks base method.
+func (m *MockModelInterface) GetCollabDocSnapshot(ctx context.Context, notebookID string) ([]byte, error) {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "GetCollabDocSnapshot", ctx, notebookID)
+ ret0, _ := ret[0].([]byte)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// GetCollabDocSnapshot indicates an expected call of GetCollabDocSnapshot.
+func (mr *MockModelInterfaceMockRecorder) GetCollabDocSnapshot(ctx, notebookID any) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetCollabDocSnapshot", reflect.TypeOf((*MockModelInterface)(nil).GetCollabDocSnapshot), ctx, notebookID)
+}
+
// GetDatabaseConnectionByID mocks base method.
func (m *MockModelInterface) GetDatabaseConnectionByID(ctx context.Context, id int32) (*querier.DatabaseConnection, error) {
m.ctrl.T.Helper()
@@ -362,6 +406,21 @@ func (mr *MockModelInterfaceMockRecorder) GetMetricsStoreByIDAndOrgID(ctx, arg a
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetMetricsStoreByIDAndOrgID", reflect.TypeOf((*MockModelInterface)(nil).GetMetricsStoreByIDAndOrgID), ctx, arg)
}
+// GetNotebook mocks base method.
+func (m *MockModelInterface) GetNotebook(ctx context.Context, id string) (*querier.GetNotebookRow, error) {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "GetNotebook", ctx, id)
+ ret0, _ := ret[0].(*querier.GetNotebookRow)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// GetNotebook indicates an expected call of GetNotebook.
+func (mr *MockModelInterfaceMockRecorder) GetNotebook(ctx, id any) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetNotebook", reflect.TypeOf((*MockModelInterface)(nil).GetNotebook), ctx, id)
+}
+
// GetOrgCluster mocks base method.
func (m *MockModelInterface) GetOrgCluster(ctx context.Context, arg querier.GetOrgClusterParams) (*querier.Cluster, error) {
m.ctrl.T.Helper()
@@ -422,6 +481,21 @@ func (mr *MockModelInterfaceMockRecorder) GetOrgSettings(ctx, orgID any) *gomock
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetOrgSettings", reflect.TypeOf((*MockModelInterface)(nil).GetOrgSettings), ctx, orgID)
}
+// GetUserOrganization mocks base method.
+func (m *MockModelInterface) GetUserOrganization(ctx context.Context, userID int32) (int32, error) {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "GetUserOrganization", ctx, userID)
+ ret0, _ := ret[0].(int32)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// GetUserOrganization indicates an expected call of GetUserOrganization.
+func (mr *MockModelInterfaceMockRecorder) GetUserOrganization(ctx, userID any) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetUserOrganization", reflect.TypeOf((*MockModelInterface)(nil).GetUserOrganization), ctx, userID)
+}
+
// InTransaction mocks base method.
func (m *MockModelInterface) InTransaction() bool {
m.ctrl.T.Helper()
@@ -481,6 +555,36 @@ func (mr *MockModelInterfaceMockRecorder) InitMetricsStore(ctx, arg any) *gomock
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "InitMetricsStore", reflect.TypeOf((*MockModelInterface)(nil).InitMetricsStore), ctx, arg)
}
+// IsOrgOwner mocks base method.
+func (m *MockModelInterface) IsOrgOwner(ctx context.Context, arg querier.IsOrgOwnerParams) (bool, error) {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "IsOrgOwner", ctx, arg)
+ ret0, _ := ret[0].(bool)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// IsOrgOwner indicates an expected call of IsOrgOwner.
+func (mr *MockModelInterfaceMockRecorder) IsOrgOwner(ctx, arg any) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "IsOrgOwner", reflect.TypeOf((*MockModelInterface)(nil).IsOrgOwner), ctx, arg)
+}
+
+// ListAccessibleNotebooks mocks base method.
+func (m *MockModelInterface) ListAccessibleNotebooks(ctx context.Context, arg querier.ListAccessibleNotebooksParams) ([]*querier.ListAccessibleNotebooksRow, error) {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "ListAccessibleNotebooks", ctx, arg)
+ ret0, _ := ret[0].([]*querier.ListAccessibleNotebooksRow)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// ListAccessibleNotebooks indicates an expected call of ListAccessibleNotebooks.
+func (mr *MockModelInterfaceMockRecorder) ListAccessibleNotebooks(ctx, arg any) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListAccessibleNotebooks", reflect.TypeOf((*MockModelInterface)(nil).ListAccessibleNotebooks), ctx, arg)
+}
+
// ListClusterDiagnostics mocks base method.
func (m *MockModelInterface) ListClusterDiagnostics(ctx context.Context, clusterID int32) ([]*querier.ListClusterDiagnosticsRow, error) {
m.ctrl.T.Helper()
@@ -670,6 +774,36 @@ func (mr *MockModelInterfaceMockRecorder) UpdateMetricsStore(ctx, arg any) *gomo
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateMetricsStore", reflect.TypeOf((*MockModelInterface)(nil).UpdateMetricsStore), ctx, arg)
}
+// UpdateNotebookScope mocks base method.
+func (m *MockModelInterface) UpdateNotebookScope(ctx context.Context, arg querier.UpdateNotebookScopeParams) (*querier.UpdateNotebookScopeRow, error) {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "UpdateNotebookScope", ctx, arg)
+ ret0, _ := ret[0].(*querier.UpdateNotebookScopeRow)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// UpdateNotebookScope indicates an expected call of UpdateNotebookScope.
+func (mr *MockModelInterfaceMockRecorder) UpdateNotebookScope(ctx, arg any) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateNotebookScope", reflect.TypeOf((*MockModelInterface)(nil).UpdateNotebookScope), ctx, arg)
+}
+
+// UpdateNotebookTitle mocks base method.
+func (m *MockModelInterface) UpdateNotebookTitle(ctx context.Context, arg querier.UpdateNotebookTitleParams) (*querier.UpdateNotebookTitleRow, error) {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "UpdateNotebookTitle", ctx, arg)
+ ret0, _ := ret[0].(*querier.UpdateNotebookTitleRow)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// UpdateNotebookTitle indicates an expected call of UpdateNotebookTitle.
+func (mr *MockModelInterfaceMockRecorder) UpdateNotebookTitle(ctx, arg any) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateNotebookTitle", reflect.TypeOf((*MockModelInterface)(nil).UpdateNotebookTitle), ctx, arg)
+}
+
// UpdateOrgCluster mocks base method.
func (m *MockModelInterface) UpdateOrgCluster(ctx context.Context, arg querier.UpdateOrgClusterParams) (*querier.Cluster, error) {
m.ctrl.T.Helper()
@@ -699,3 +833,17 @@ func (mr *MockModelInterfaceMockRecorder) UpdateOrgDatabaseConnection(ctx, arg a
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateOrgDatabaseConnection", reflect.TypeOf((*MockModelInterface)(nil).UpdateOrgDatabaseConnection), ctx, arg)
}
+
+// UpsertCollabDocSnapshot mocks base method.
+func (m *MockModelInterface) UpsertCollabDocSnapshot(ctx context.Context, arg querier.UpsertCollabDocSnapshotParams) error {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "UpsertCollabDocSnapshot", ctx, arg)
+ ret0, _ := ret[0].(error)
+ return ret0
+}
+
+// UpsertCollabDocSnapshot indicates an expected call of UpsertCollabDocSnapshot.
+func (mr *MockModelInterfaceMockRecorder) UpsertCollabDocSnapshot(ctx, arg any) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpsertCollabDocSnapshot", reflect.TypeOf((*MockModelInterface)(nil).UpsertCollabDocSnapshot), ctx, arg)
+}
diff --git a/pkg/zgen/apigen/scopes_extend_gen.go b/pkg/zgen/apigen/scopes_extend_gen.go
index 2983cdc3..08d89b2f 100644
--- a/pkg/zgen/apigen/scopes_extend_gen.go
+++ b/pkg/zgen/apigen/scopes_extend_gen.go
@@ -302,6 +302,111 @@ func (x *XMiddleware) RestoreClusterSnapshot(c *fiber.Ctx, id int32, snapshotId
}
return x.ServerInterface.RestoreClusterSnapshot(c, id, snapshotId)
}
+// List collaborative notebooks
+// (GET /collab/notebooks)
+func (x *XMiddleware) ListCollabNotebooks(c *fiber.Ctx) error {
+ if err := x.AuthFunc(c); err != nil {
+ return c.Status(fiber.StatusUnauthorized).SendString(err.Error())
+ }
+ if err := x.PreValidate(c); err != nil {
+ return c.Status(fiber.StatusForbidden).SendString(err.Error())
+ }
+
+ if err := x.PostValidate(c); err != nil {
+ return c.Status(fiber.StatusForbidden).SendString(err.Error())
+ }
+ return x.ServerInterface.ListCollabNotebooks(c)
+}
+// Create a collaborative notebook
+// (POST /collab/notebooks)
+func (x *XMiddleware) CreateCollabNotebook(c *fiber.Ctx) error {
+ if err := x.AuthFunc(c); err != nil {
+ return c.Status(fiber.StatusUnauthorized).SendString(err.Error())
+ }
+ if err := x.PreValidate(c); err != nil {
+ return c.Status(fiber.StatusForbidden).SendString(err.Error())
+ }
+
+ if err := x.PostValidate(c); err != nil {
+ return c.Status(fiber.StatusForbidden).SendString(err.Error())
+ }
+ return x.ServerInterface.CreateCollabNotebook(c)
+}
+// Delete a collaborative notebook
+// (DELETE /collab/notebooks/{notebookID})
+func (x *XMiddleware) DeleteCollabNotebook(c *fiber.Ctx, notebookID string) error {
+ if err := x.AuthFunc(c); err != nil {
+ return c.Status(fiber.StatusUnauthorized).SendString(err.Error())
+ }
+ if err := x.PreValidate(c); err != nil {
+ return c.Status(fiber.StatusForbidden).SendString(err.Error())
+ }
+
+ if err := x.PostValidate(c); err != nil {
+ return c.Status(fiber.StatusForbidden).SendString(err.Error())
+ }
+ return x.ServerInterface.DeleteCollabNotebook(c, notebookID)
+}
+// Get collaborative notebook details
+// (GET /collab/notebooks/{notebookID})
+func (x *XMiddleware) GetCollabNotebook(c *fiber.Ctx, notebookID string) error {
+ if err := x.AuthFunc(c); err != nil {
+ return c.Status(fiber.StatusUnauthorized).SendString(err.Error())
+ }
+ if err := x.PreValidate(c); err != nil {
+ return c.Status(fiber.StatusForbidden).SendString(err.Error())
+ }
+
+ if err := x.PostValidate(c); err != nil {
+ return c.Status(fiber.StatusForbidden).SendString(err.Error())
+ }
+ return x.ServerInterface.GetCollabNotebook(c, notebookID)
+}
+// Update a collaborative notebook
+// (PUT /collab/notebooks/{notebookID})
+func (x *XMiddleware) UpdateCollabNotebook(c *fiber.Ctx, notebookID string) error {
+ if err := x.AuthFunc(c); err != nil {
+ return c.Status(fiber.StatusUnauthorized).SendString(err.Error())
+ }
+ if err := x.PreValidate(c); err != nil {
+ return c.Status(fiber.StatusForbidden).SendString(err.Error())
+ }
+
+ if err := x.PostValidate(c); err != nil {
+ return c.Status(fiber.StatusForbidden).SendString(err.Error())
+ }
+ return x.ServerInterface.UpdateCollabNotebook(c, notebookID)
+}
+// Upload a collaborative notebook snapshot
+// (PUT /collab/notebooks/{notebookID}/snapshot)
+func (x *XMiddleware) UploadCollabNotebookSnapshot(c *fiber.Ctx, notebookID string) error {
+ if err := x.AuthFunc(c); err != nil {
+ return c.Status(fiber.StatusUnauthorized).SendString(err.Error())
+ }
+ if err := x.PreValidate(c); err != nil {
+ return c.Status(fiber.StatusForbidden).SendString(err.Error())
+ }
+
+ if err := x.PostValidate(c); err != nil {
+ return c.Status(fiber.StatusForbidden).SendString(err.Error())
+ }
+ return x.ServerInterface.UploadCollabNotebookSnapshot(c, notebookID)
+}
+// Transfer collaborative notebook scope
+// (POST /collab/notebooks/{notebookID}/transfer)
+func (x *XMiddleware) TransferCollabNotebook(c *fiber.Ctx, notebookID string) error {
+ if err := x.AuthFunc(c); err != nil {
+ return c.Status(fiber.StatusUnauthorized).SendString(err.Error())
+ }
+ if err := x.PreValidate(c); err != nil {
+ return c.Status(fiber.StatusForbidden).SendString(err.Error())
+ }
+
+ if err := x.PostValidate(c); err != nil {
+ return c.Status(fiber.StatusForbidden).SendString(err.Error())
+ }
+ return x.ServerInterface.TransferCollabNotebook(c, notebookID)
+}
// List all databases
// (GET /databases)
func (x *XMiddleware) ListDatabases(c *fiber.Ctx) error {
diff --git a/pkg/zgen/apigen/spec_gen.go b/pkg/zgen/apigen/spec_gen.go
index b15ff693..b23410a3 100644
--- a/pkg/zgen/apigen/spec_gen.go
+++ b/pkg/zgen/apigen/spec_gen.go
@@ -22,6 +22,24 @@ const (
BearerAuthScopes = "BearerAuth.Scopes"
)
+// Defines values for CollabNotebookScope.
+const (
+ CollabNotebookScopeOrganization CollabNotebookScope = "organization"
+ CollabNotebookScopePersonal CollabNotebookScope = "personal"
+)
+
+// Defines values for CollabNotebookCreateRequestScope.
+const (
+ CollabNotebookCreateRequestScopeOrganization CollabNotebookCreateRequestScope = "organization"
+ CollabNotebookCreateRequestScopePersonal CollabNotebookCreateRequestScope = "personal"
+)
+
+// Defines values for CollabNotebookTransferRequestTargetScope.
+const (
+ Organization CollabNotebookTransferRequestTargetScope = "organization"
+ Personal CollabNotebookTransferRequestTargetScope = "personal"
+)
+
// Defines values for EventSpecType.
const (
TaskCompleted EventSpecType = "TaskCompleted"
@@ -139,6 +157,60 @@ type ClusterImport struct {
Version string `json:"version"`
}
+// CollabNotebook defines model for CollabNotebook.
+type CollabNotebook struct {
+ // CreatedAt Creation timestamp
+ CreatedAt time.Time `json:"createdAt"`
+
+ // CreatedByUserID User that originally created the notebook
+ CreatedByUserID *int32 `json:"createdByUserID,omitempty"`
+
+ // NotebookID Unique identifier of the collaborative notebook
+ NotebookID string `json:"notebookID"`
+
+ // OrganizationID Organization that currently owns the notebook
+ OrganizationID *int32 `json:"organizationID,omitempty"`
+
+ // OwnerUserID User that owns the notebook when scope is personal
+ OwnerUserID *int32 `json:"ownerUserID,omitempty"`
+
+ // Scope Ownership scope of the notebook
+ Scope *CollabNotebookScope `json:"scope,omitempty"`
+
+ // Title Title of the collaborative notebook
+ Title string `json:"title"`
+
+ // UpdatedAt Last update timestamp
+ UpdatedAt time.Time `json:"updatedAt"`
+}
+
+// CollabNotebookScope Ownership scope of the notebook
+type CollabNotebookScope string
+
+// CollabNotebookCreateRequest defines model for CollabNotebookCreateRequest.
+type CollabNotebookCreateRequest struct {
+ // Scope Ownership scope of the notebook; defaults to personal when omitted
+ Scope *CollabNotebookCreateRequestScope `json:"scope,omitempty"`
+
+ // Title Title of the collaborative notebook
+ Title string `json:"title"`
+}
+
+// CollabNotebookCreateRequestScope Ownership scope of the notebook; defaults to personal when omitted
+type CollabNotebookCreateRequestScope string
+
+// CollabNotebookTransferRequest defines model for CollabNotebookTransferRequest.
+type CollabNotebookTransferRequest struct {
+ // OwnerUserID Target owner when moving to personal scope; defaults to the caller
+ OwnerUserID *int32 `json:"ownerUserID,omitempty"`
+
+ // TargetScope Desired ownership scope
+ TargetScope CollabNotebookTransferRequestTargetScope `json:"targetScope"`
+}
+
+// CollabNotebookTransferRequestTargetScope Desired ownership scope
+type CollabNotebookTransferRequestTargetScope string
+
// Column defines model for Column.
type Column struct {
// IsHidden Whether the column is hidden
@@ -624,6 +696,15 @@ type RunRisectlCommandJSONRequestBody = RisectlCommand
// CreateClusterSnapshotJSONRequestBody defines body for CreateClusterSnapshot for application/json ContentType.
type CreateClusterSnapshotJSONRequestBody = SnapshotCreate
+// CreateCollabNotebookJSONRequestBody defines body for CreateCollabNotebook for application/json ContentType.
+type CreateCollabNotebookJSONRequestBody = CollabNotebookCreateRequest
+
+// UpdateCollabNotebookJSONRequestBody defines body for UpdateCollabNotebook for application/json ContentType.
+type UpdateCollabNotebookJSONRequestBody = CollabNotebook
+
+// TransferCollabNotebookJSONRequestBody defines body for TransferCollabNotebook for application/json ContentType.
+type TransferCollabNotebookJSONRequestBody = CollabNotebookTransferRequest
+
// ImportDatabaseJSONRequestBody defines body for ImportDatabase for application/json ContentType.
type ImportDatabaseJSONRequestBody = DatabaseConnectInfo
@@ -794,6 +875,33 @@ type ClientInterface interface {
// RestoreClusterSnapshot request
RestoreClusterSnapshot(ctx context.Context, id int32, snapshotId int64, reqEditors ...RequestEditorFn) (*http.Response, error)
+ // ListCollabNotebooks request
+ ListCollabNotebooks(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error)
+
+ // CreateCollabNotebookWithBody request with any body
+ CreateCollabNotebookWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error)
+
+ CreateCollabNotebook(ctx context.Context, body CreateCollabNotebookJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error)
+
+ // DeleteCollabNotebook request
+ DeleteCollabNotebook(ctx context.Context, notebookID string, reqEditors ...RequestEditorFn) (*http.Response, error)
+
+ // GetCollabNotebook request
+ GetCollabNotebook(ctx context.Context, notebookID string, reqEditors ...RequestEditorFn) (*http.Response, error)
+
+ // UpdateCollabNotebookWithBody request with any body
+ UpdateCollabNotebookWithBody(ctx context.Context, notebookID string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error)
+
+ UpdateCollabNotebook(ctx context.Context, notebookID string, body UpdateCollabNotebookJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error)
+
+ // UploadCollabNotebookSnapshotWithBody request with any body
+ UploadCollabNotebookSnapshotWithBody(ctx context.Context, notebookID string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error)
+
+ // TransferCollabNotebookWithBody request with any body
+ TransferCollabNotebookWithBody(ctx context.Context, notebookID string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error)
+
+ TransferCollabNotebook(ctx context.Context, notebookID string, body TransferCollabNotebookJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error)
+
// ListDatabases request
ListDatabases(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error)
@@ -1192,6 +1300,126 @@ func (c *Client) RestoreClusterSnapshot(ctx context.Context, id int32, snapshotI
return c.Client.Do(req)
}
+func (c *Client) ListCollabNotebooks(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) {
+ req, err := NewListCollabNotebooksRequest(c.Server)
+ if err != nil {
+ return nil, err
+ }
+ req = req.WithContext(ctx)
+ if err := c.applyEditors(ctx, req, reqEditors); err != nil {
+ return nil, err
+ }
+ return c.Client.Do(req)
+}
+
+func (c *Client) CreateCollabNotebookWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) {
+ req, err := NewCreateCollabNotebookRequestWithBody(c.Server, contentType, body)
+ if err != nil {
+ return nil, err
+ }
+ req = req.WithContext(ctx)
+ if err := c.applyEditors(ctx, req, reqEditors); err != nil {
+ return nil, err
+ }
+ return c.Client.Do(req)
+}
+
+func (c *Client) CreateCollabNotebook(ctx context.Context, body CreateCollabNotebookJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) {
+ req, err := NewCreateCollabNotebookRequest(c.Server, body)
+ if err != nil {
+ return nil, err
+ }
+ req = req.WithContext(ctx)
+ if err := c.applyEditors(ctx, req, reqEditors); err != nil {
+ return nil, err
+ }
+ return c.Client.Do(req)
+}
+
+func (c *Client) DeleteCollabNotebook(ctx context.Context, notebookID string, reqEditors ...RequestEditorFn) (*http.Response, error) {
+ req, err := NewDeleteCollabNotebookRequest(c.Server, notebookID)
+ if err != nil {
+ return nil, err
+ }
+ req = req.WithContext(ctx)
+ if err := c.applyEditors(ctx, req, reqEditors); err != nil {
+ return nil, err
+ }
+ return c.Client.Do(req)
+}
+
+func (c *Client) GetCollabNotebook(ctx context.Context, notebookID string, reqEditors ...RequestEditorFn) (*http.Response, error) {
+ req, err := NewGetCollabNotebookRequest(c.Server, notebookID)
+ if err != nil {
+ return nil, err
+ }
+ req = req.WithContext(ctx)
+ if err := c.applyEditors(ctx, req, reqEditors); err != nil {
+ return nil, err
+ }
+ return c.Client.Do(req)
+}
+
+func (c *Client) UpdateCollabNotebookWithBody(ctx context.Context, notebookID string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) {
+ req, err := NewUpdateCollabNotebookRequestWithBody(c.Server, notebookID, contentType, body)
+ if err != nil {
+ return nil, err
+ }
+ req = req.WithContext(ctx)
+ if err := c.applyEditors(ctx, req, reqEditors); err != nil {
+ return nil, err
+ }
+ return c.Client.Do(req)
+}
+
+func (c *Client) UpdateCollabNotebook(ctx context.Context, notebookID string, body UpdateCollabNotebookJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) {
+ req, err := NewUpdateCollabNotebookRequest(c.Server, notebookID, body)
+ if err != nil {
+ return nil, err
+ }
+ req = req.WithContext(ctx)
+ if err := c.applyEditors(ctx, req, reqEditors); err != nil {
+ return nil, err
+ }
+ return c.Client.Do(req)
+}
+
+func (c *Client) UploadCollabNotebookSnapshotWithBody(ctx context.Context, notebookID string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) {
+ req, err := NewUploadCollabNotebookSnapshotRequestWithBody(c.Server, notebookID, contentType, body)
+ if err != nil {
+ return nil, err
+ }
+ req = req.WithContext(ctx)
+ if err := c.applyEditors(ctx, req, reqEditors); err != nil {
+ return nil, err
+ }
+ return c.Client.Do(req)
+}
+
+func (c *Client) TransferCollabNotebookWithBody(ctx context.Context, notebookID string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) {
+ req, err := NewTransferCollabNotebookRequestWithBody(c.Server, notebookID, contentType, body)
+ if err != nil {
+ return nil, err
+ }
+ req = req.WithContext(ctx)
+ if err := c.applyEditors(ctx, req, reqEditors); err != nil {
+ return nil, err
+ }
+ return c.Client.Do(req)
+}
+
+func (c *Client) TransferCollabNotebook(ctx context.Context, notebookID string, body TransferCollabNotebookJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) {
+ req, err := NewTransferCollabNotebookRequest(c.Server, notebookID, body)
+ if err != nil {
+ return nil, err
+ }
+ req = req.WithContext(ctx)
+ if err := c.applyEditors(ctx, req, reqEditors); err != nil {
+ return nil, err
+ }
+ return c.Client.Do(req)
+}
+
func (c *Client) ListDatabases(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) {
req, err := NewListDatabasesRequest(c.Server)
if err != nil {
@@ -2351,8 +2579,8 @@ func NewRestoreClusterSnapshotRequest(server string, id int32, snapshotId int64)
return req, nil
}
-// NewListDatabasesRequest generates requests for ListDatabases
-func NewListDatabasesRequest(server string) (*http.Request, error) {
+// NewListCollabNotebooksRequest generates requests for ListCollabNotebooks
+func NewListCollabNotebooksRequest(server string) (*http.Request, error) {
var err error
serverURL, err := url.Parse(server)
@@ -2360,7 +2588,7 @@ func NewListDatabasesRequest(server string) (*http.Request, error) {
return nil, err
}
- operationPath := fmt.Sprintf("/databases")
+ operationPath := fmt.Sprintf("/collab/notebooks")
if operationPath[0] == '/' {
operationPath = "." + operationPath
}
@@ -2378,19 +2606,19 @@ func NewListDatabasesRequest(server string) (*http.Request, error) {
return req, nil
}
-// NewImportDatabaseRequest calls the generic ImportDatabase builder with application/json body
-func NewImportDatabaseRequest(server string, body ImportDatabaseJSONRequestBody) (*http.Request, error) {
+// NewCreateCollabNotebookRequest calls the generic CreateCollabNotebook builder with application/json body
+func NewCreateCollabNotebookRequest(server string, body CreateCollabNotebookJSONRequestBody) (*http.Request, error) {
var bodyReader io.Reader
buf, err := json.Marshal(body)
if err != nil {
return nil, err
}
bodyReader = bytes.NewReader(buf)
- return NewImportDatabaseRequestWithBody(server, "application/json", bodyReader)
+ return NewCreateCollabNotebookRequestWithBody(server, "application/json", bodyReader)
}
-// NewImportDatabaseRequestWithBody generates requests for ImportDatabase with any type of body
-func NewImportDatabaseRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) {
+// NewCreateCollabNotebookRequestWithBody generates requests for CreateCollabNotebook with any type of body
+func NewCreateCollabNotebookRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) {
var err error
serverURL, err := url.Parse(server)
@@ -2398,7 +2626,7 @@ func NewImportDatabaseRequestWithBody(server string, contentType string, body io
return nil, err
}
- operationPath := fmt.Sprintf("/databases/import")
+ operationPath := fmt.Sprintf("/collab/notebooks")
if operationPath[0] == '/' {
operationPath = "." + operationPath
}
@@ -2418,27 +2646,23 @@ func NewImportDatabaseRequestWithBody(server string, contentType string, body io
return req, nil
}
-// NewTestDatabaseConnectionRequest calls the generic TestDatabaseConnection builder with application/json body
-func NewTestDatabaseConnectionRequest(server string, body TestDatabaseConnectionJSONRequestBody) (*http.Request, error) {
- var bodyReader io.Reader
- buf, err := json.Marshal(body)
+// NewDeleteCollabNotebookRequest generates requests for DeleteCollabNotebook
+func NewDeleteCollabNotebookRequest(server string, notebookID string) (*http.Request, error) {
+ var err error
+
+ var pathParam0 string
+
+ pathParam0, err = runtime.StyleParamWithLocation("simple", false, "notebookID", runtime.ParamLocationPath, notebookID)
if err != nil {
return nil, err
}
- bodyReader = bytes.NewReader(buf)
- return NewTestDatabaseConnectionRequestWithBody(server, "application/json", bodyReader)
-}
-
-// NewTestDatabaseConnectionRequestWithBody generates requests for TestDatabaseConnection with any type of body
-func NewTestDatabaseConnectionRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) {
- var err error
serverURL, err := url.Parse(server)
if err != nil {
return nil, err
}
- operationPath := fmt.Sprintf("/databases/test-connection")
+ operationPath := fmt.Sprintf("/collab/notebooks/%s", pathParam0)
if operationPath[0] == '/' {
operationPath = "." + operationPath
}
@@ -2448,23 +2672,21 @@ func NewTestDatabaseConnectionRequestWithBody(server string, contentType string,
return nil, err
}
- req, err := http.NewRequest("POST", queryURL.String(), body)
+ req, err := http.NewRequest("DELETE", queryURL.String(), nil)
if err != nil {
return nil, err
}
- req.Header.Add("Content-Type", contentType)
-
return req, nil
}
-// NewDeleteDatabaseRequest generates requests for DeleteDatabase
-func NewDeleteDatabaseRequest(server string, id int32) (*http.Request, error) {
+// NewGetCollabNotebookRequest generates requests for GetCollabNotebook
+func NewGetCollabNotebookRequest(server string, notebookID string) (*http.Request, error) {
var err error
var pathParam0 string
- pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ID", runtime.ParamLocationPath, id)
+ pathParam0, err = runtime.StyleParamWithLocation("simple", false, "notebookID", runtime.ParamLocationPath, notebookID)
if err != nil {
return nil, err
}
@@ -2474,7 +2696,7 @@ func NewDeleteDatabaseRequest(server string, id int32) (*http.Request, error) {
return nil, err
}
- operationPath := fmt.Sprintf("/databases/%s", pathParam0)
+ operationPath := fmt.Sprintf("/collab/notebooks/%s", pathParam0)
if operationPath[0] == '/' {
operationPath = "." + operationPath
}
@@ -2484,7 +2706,7 @@ func NewDeleteDatabaseRequest(server string, id int32) (*http.Request, error) {
return nil, err
}
- req, err := http.NewRequest("DELETE", queryURL.String(), nil)
+ req, err := http.NewRequest("GET", queryURL.String(), nil)
if err != nil {
return nil, err
}
@@ -2492,13 +2714,24 @@ func NewDeleteDatabaseRequest(server string, id int32) (*http.Request, error) {
return req, nil
}
-// NewGetDatabaseRequest generates requests for GetDatabase
-func NewGetDatabaseRequest(server string, id int32) (*http.Request, error) {
+// NewUpdateCollabNotebookRequest calls the generic UpdateCollabNotebook builder with application/json body
+func NewUpdateCollabNotebookRequest(server string, notebookID string, body UpdateCollabNotebookJSONRequestBody) (*http.Request, error) {
+ var bodyReader io.Reader
+ buf, err := json.Marshal(body)
+ if err != nil {
+ return nil, err
+ }
+ bodyReader = bytes.NewReader(buf)
+ return NewUpdateCollabNotebookRequestWithBody(server, notebookID, "application/json", bodyReader)
+}
+
+// NewUpdateCollabNotebookRequestWithBody generates requests for UpdateCollabNotebook with any type of body
+func NewUpdateCollabNotebookRequestWithBody(server string, notebookID string, contentType string, body io.Reader) (*http.Request, error) {
var err error
var pathParam0 string
- pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ID", runtime.ParamLocationPath, id)
+ pathParam0, err = runtime.StyleParamWithLocation("simple", false, "notebookID", runtime.ParamLocationPath, notebookID)
if err != nil {
return nil, err
}
@@ -2508,7 +2741,7 @@ func NewGetDatabaseRequest(server string, id int32) (*http.Request, error) {
return nil, err
}
- operationPath := fmt.Sprintf("/databases/%s", pathParam0)
+ operationPath := fmt.Sprintf("/collab/notebooks/%s", pathParam0)
if operationPath[0] == '/' {
operationPath = "." + operationPath
}
@@ -2518,32 +2751,23 @@ func NewGetDatabaseRequest(server string, id int32) (*http.Request, error) {
return nil, err
}
- req, err := http.NewRequest("GET", queryURL.String(), nil)
+ req, err := http.NewRequest("PUT", queryURL.String(), body)
if err != nil {
return nil, err
}
- return req, nil
-}
+ req.Header.Add("Content-Type", contentType)
-// NewUpdateDatabaseRequest calls the generic UpdateDatabase builder with application/json body
-func NewUpdateDatabaseRequest(server string, id int32, body UpdateDatabaseJSONRequestBody) (*http.Request, error) {
- var bodyReader io.Reader
- buf, err := json.Marshal(body)
- if err != nil {
- return nil, err
- }
- bodyReader = bytes.NewReader(buf)
- return NewUpdateDatabaseRequestWithBody(server, id, "application/json", bodyReader)
+ return req, nil
}
-// NewUpdateDatabaseRequestWithBody generates requests for UpdateDatabase with any type of body
-func NewUpdateDatabaseRequestWithBody(server string, id int32, contentType string, body io.Reader) (*http.Request, error) {
+// NewUploadCollabNotebookSnapshotRequestWithBody generates requests for UploadCollabNotebookSnapshot with any type of body
+func NewUploadCollabNotebookSnapshotRequestWithBody(server string, notebookID string, contentType string, body io.Reader) (*http.Request, error) {
var err error
var pathParam0 string
- pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ID", runtime.ParamLocationPath, id)
+ pathParam0, err = runtime.StyleParamWithLocation("simple", false, "notebookID", runtime.ParamLocationPath, notebookID)
if err != nil {
return nil, err
}
@@ -2553,7 +2777,7 @@ func NewUpdateDatabaseRequestWithBody(server string, id int32, contentType strin
return nil, err
}
- operationPath := fmt.Sprintf("/databases/%s", pathParam0)
+ operationPath := fmt.Sprintf("/collab/notebooks/%s/snapshot", pathParam0)
if operationPath[0] == '/' {
operationPath = "." + operationPath
}
@@ -2573,13 +2797,24 @@ func NewUpdateDatabaseRequestWithBody(server string, id int32, contentType strin
return req, nil
}
-// NewGetDDLProgressRequest generates requests for GetDDLProgress
-func NewGetDDLProgressRequest(server string, id int32) (*http.Request, error) {
+// NewTransferCollabNotebookRequest calls the generic TransferCollabNotebook builder with application/json body
+func NewTransferCollabNotebookRequest(server string, notebookID string, body TransferCollabNotebookJSONRequestBody) (*http.Request, error) {
+ var bodyReader io.Reader
+ buf, err := json.Marshal(body)
+ if err != nil {
+ return nil, err
+ }
+ bodyReader = bytes.NewReader(buf)
+ return NewTransferCollabNotebookRequestWithBody(server, notebookID, "application/json", bodyReader)
+}
+
+// NewTransferCollabNotebookRequestWithBody generates requests for TransferCollabNotebook with any type of body
+func NewTransferCollabNotebookRequestWithBody(server string, notebookID string, contentType string, body io.Reader) (*http.Request, error) {
var err error
var pathParam0 string
- pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ID", runtime.ParamLocationPath, id)
+ pathParam0, err = runtime.StyleParamWithLocation("simple", false, "notebookID", runtime.ParamLocationPath, notebookID)
if err != nil {
return nil, err
}
@@ -2589,7 +2824,7 @@ func NewGetDDLProgressRequest(server string, id int32) (*http.Request, error) {
return nil, err
}
- operationPath := fmt.Sprintf("/databases/%s/ddl-progress", pathParam0)
+ operationPath := fmt.Sprintf("/collab/notebooks/%s/transfer", pathParam0)
if operationPath[0] == '/' {
operationPath = "." + operationPath
}
@@ -2599,38 +2834,26 @@ func NewGetDDLProgressRequest(server string, id int32) (*http.Request, error) {
return nil, err
}
- req, err := http.NewRequest("GET", queryURL.String(), nil)
+ req, err := http.NewRequest("POST", queryURL.String(), body)
if err != nil {
return nil, err
}
+ req.Header.Add("Content-Type", contentType)
+
return req, nil
}
-// NewCancelDDLProgressRequest generates requests for CancelDDLProgress
-func NewCancelDDLProgressRequest(server string, id int32, ddlID int64) (*http.Request, error) {
+// NewListDatabasesRequest generates requests for ListDatabases
+func NewListDatabasesRequest(server string) (*http.Request, error) {
var err error
- var pathParam0 string
-
- pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ID", runtime.ParamLocationPath, id)
- if err != nil {
- return nil, err
- }
-
- var pathParam1 string
-
- pathParam1, err = runtime.StyleParamWithLocation("simple", false, "ddlID", runtime.ParamLocationPath, ddlID)
- if err != nil {
- return nil, err
- }
-
serverURL, err := url.Parse(server)
if err != nil {
return nil, err
}
- operationPath := fmt.Sprintf("/databases/%s/ddl-progress/%s/cancel", pathParam0, pathParam1)
+ operationPath := fmt.Sprintf("/databases")
if operationPath[0] == '/' {
operationPath = "." + operationPath
}
@@ -2640,7 +2863,7 @@ func NewCancelDDLProgressRequest(server string, id int32, ddlID int64) (*http.Re
return nil, err
}
- req, err := http.NewRequest("POST", queryURL.String(), nil)
+ req, err := http.NewRequest("GET", queryURL.String(), nil)
if err != nil {
return nil, err
}
@@ -2648,19 +2871,289 @@ func NewCancelDDLProgressRequest(server string, id int32, ddlID int64) (*http.Re
return req, nil
}
-// NewQueryDatabaseRequest calls the generic QueryDatabase builder with application/json body
-func NewQueryDatabaseRequest(server string, id int32, body QueryDatabaseJSONRequestBody) (*http.Request, error) {
+// NewImportDatabaseRequest calls the generic ImportDatabase builder with application/json body
+func NewImportDatabaseRequest(server string, body ImportDatabaseJSONRequestBody) (*http.Request, error) {
var bodyReader io.Reader
buf, err := json.Marshal(body)
if err != nil {
return nil, err
}
bodyReader = bytes.NewReader(buf)
- return NewQueryDatabaseRequestWithBody(server, id, "application/json", bodyReader)
+ return NewImportDatabaseRequestWithBody(server, "application/json", bodyReader)
}
-// NewQueryDatabaseRequestWithBody generates requests for QueryDatabase with any type of body
-func NewQueryDatabaseRequestWithBody(server string, id int32, contentType string, body io.Reader) (*http.Request, error) {
+// NewImportDatabaseRequestWithBody generates requests for ImportDatabase with any type of body
+func NewImportDatabaseRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) {
+ var err error
+
+ serverURL, err := url.Parse(server)
+ if err != nil {
+ return nil, err
+ }
+
+ operationPath := fmt.Sprintf("/databases/import")
+ if operationPath[0] == '/' {
+ operationPath = "." + operationPath
+ }
+
+ queryURL, err := serverURL.Parse(operationPath)
+ if err != nil {
+ return nil, err
+ }
+
+ req, err := http.NewRequest("POST", queryURL.String(), body)
+ if err != nil {
+ return nil, err
+ }
+
+ req.Header.Add("Content-Type", contentType)
+
+ return req, nil
+}
+
+// NewTestDatabaseConnectionRequest calls the generic TestDatabaseConnection builder with application/json body
+func NewTestDatabaseConnectionRequest(server string, body TestDatabaseConnectionJSONRequestBody) (*http.Request, error) {
+ var bodyReader io.Reader
+ buf, err := json.Marshal(body)
+ if err != nil {
+ return nil, err
+ }
+ bodyReader = bytes.NewReader(buf)
+ return NewTestDatabaseConnectionRequestWithBody(server, "application/json", bodyReader)
+}
+
+// NewTestDatabaseConnectionRequestWithBody generates requests for TestDatabaseConnection with any type of body
+func NewTestDatabaseConnectionRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) {
+ var err error
+
+ serverURL, err := url.Parse(server)
+ if err != nil {
+ return nil, err
+ }
+
+ operationPath := fmt.Sprintf("/databases/test-connection")
+ if operationPath[0] == '/' {
+ operationPath = "." + operationPath
+ }
+
+ queryURL, err := serverURL.Parse(operationPath)
+ if err != nil {
+ return nil, err
+ }
+
+ req, err := http.NewRequest("POST", queryURL.String(), body)
+ if err != nil {
+ return nil, err
+ }
+
+ req.Header.Add("Content-Type", contentType)
+
+ return req, nil
+}
+
+// NewDeleteDatabaseRequest generates requests for DeleteDatabase
+func NewDeleteDatabaseRequest(server string, id int32) (*http.Request, error) {
+ var err error
+
+ var pathParam0 string
+
+ pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ID", runtime.ParamLocationPath, id)
+ if err != nil {
+ return nil, err
+ }
+
+ serverURL, err := url.Parse(server)
+ if err != nil {
+ return nil, err
+ }
+
+ operationPath := fmt.Sprintf("/databases/%s", pathParam0)
+ if operationPath[0] == '/' {
+ operationPath = "." + operationPath
+ }
+
+ queryURL, err := serverURL.Parse(operationPath)
+ if err != nil {
+ return nil, err
+ }
+
+ req, err := http.NewRequest("DELETE", queryURL.String(), nil)
+ if err != nil {
+ return nil, err
+ }
+
+ return req, nil
+}
+
+// NewGetDatabaseRequest generates requests for GetDatabase
+func NewGetDatabaseRequest(server string, id int32) (*http.Request, error) {
+ var err error
+
+ var pathParam0 string
+
+ pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ID", runtime.ParamLocationPath, id)
+ if err != nil {
+ return nil, err
+ }
+
+ serverURL, err := url.Parse(server)
+ if err != nil {
+ return nil, err
+ }
+
+ operationPath := fmt.Sprintf("/databases/%s", pathParam0)
+ if operationPath[0] == '/' {
+ operationPath = "." + operationPath
+ }
+
+ queryURL, err := serverURL.Parse(operationPath)
+ if err != nil {
+ return nil, err
+ }
+
+ req, err := http.NewRequest("GET", queryURL.String(), nil)
+ if err != nil {
+ return nil, err
+ }
+
+ return req, nil
+}
+
+// NewUpdateDatabaseRequest calls the generic UpdateDatabase builder with application/json body
+func NewUpdateDatabaseRequest(server string, id int32, body UpdateDatabaseJSONRequestBody) (*http.Request, error) {
+ var bodyReader io.Reader
+ buf, err := json.Marshal(body)
+ if err != nil {
+ return nil, err
+ }
+ bodyReader = bytes.NewReader(buf)
+ return NewUpdateDatabaseRequestWithBody(server, id, "application/json", bodyReader)
+}
+
+// NewUpdateDatabaseRequestWithBody generates requests for UpdateDatabase with any type of body
+func NewUpdateDatabaseRequestWithBody(server string, id int32, contentType string, body io.Reader) (*http.Request, error) {
+ var err error
+
+ var pathParam0 string
+
+ pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ID", runtime.ParamLocationPath, id)
+ if err != nil {
+ return nil, err
+ }
+
+ serverURL, err := url.Parse(server)
+ if err != nil {
+ return nil, err
+ }
+
+ operationPath := fmt.Sprintf("/databases/%s", pathParam0)
+ if operationPath[0] == '/' {
+ operationPath = "." + operationPath
+ }
+
+ queryURL, err := serverURL.Parse(operationPath)
+ if err != nil {
+ return nil, err
+ }
+
+ req, err := http.NewRequest("PUT", queryURL.String(), body)
+ if err != nil {
+ return nil, err
+ }
+
+ req.Header.Add("Content-Type", contentType)
+
+ return req, nil
+}
+
+// NewGetDDLProgressRequest generates requests for GetDDLProgress
+func NewGetDDLProgressRequest(server string, id int32) (*http.Request, error) {
+ var err error
+
+ var pathParam0 string
+
+ pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ID", runtime.ParamLocationPath, id)
+ if err != nil {
+ return nil, err
+ }
+
+ serverURL, err := url.Parse(server)
+ if err != nil {
+ return nil, err
+ }
+
+ operationPath := fmt.Sprintf("/databases/%s/ddl-progress", pathParam0)
+ if operationPath[0] == '/' {
+ operationPath = "." + operationPath
+ }
+
+ queryURL, err := serverURL.Parse(operationPath)
+ if err != nil {
+ return nil, err
+ }
+
+ req, err := http.NewRequest("GET", queryURL.String(), nil)
+ if err != nil {
+ return nil, err
+ }
+
+ return req, nil
+}
+
+// NewCancelDDLProgressRequest generates requests for CancelDDLProgress
+func NewCancelDDLProgressRequest(server string, id int32, ddlID int64) (*http.Request, error) {
+ var err error
+
+ var pathParam0 string
+
+ pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ID", runtime.ParamLocationPath, id)
+ if err != nil {
+ return nil, err
+ }
+
+ var pathParam1 string
+
+ pathParam1, err = runtime.StyleParamWithLocation("simple", false, "ddlID", runtime.ParamLocationPath, ddlID)
+ if err != nil {
+ return nil, err
+ }
+
+ serverURL, err := url.Parse(server)
+ if err != nil {
+ return nil, err
+ }
+
+ operationPath := fmt.Sprintf("/databases/%s/ddl-progress/%s/cancel", pathParam0, pathParam1)
+ if operationPath[0] == '/' {
+ operationPath = "." + operationPath
+ }
+
+ queryURL, err := serverURL.Parse(operationPath)
+ if err != nil {
+ return nil, err
+ }
+
+ req, err := http.NewRequest("POST", queryURL.String(), nil)
+ if err != nil {
+ return nil, err
+ }
+
+ return req, nil
+}
+
+// NewQueryDatabaseRequest calls the generic QueryDatabase builder with application/json body
+func NewQueryDatabaseRequest(server string, id int32, body QueryDatabaseJSONRequestBody) (*http.Request, error) {
+ var bodyReader io.Reader
+ buf, err := json.Marshal(body)
+ if err != nil {
+ return nil, err
+ }
+ bodyReader = bytes.NewReader(buf)
+ return NewQueryDatabaseRequestWithBody(server, id, "application/json", bodyReader)
+}
+
+// NewQueryDatabaseRequestWithBody generates requests for QueryDatabase with any type of body
+func NewQueryDatabaseRequestWithBody(server string, id int32, contentType string, body io.Reader) (*http.Request, error) {
var err error
var pathParam0 string
@@ -3179,6 +3672,33 @@ type ClientWithResponsesInterface interface {
// RestoreClusterSnapshotWithResponse request
RestoreClusterSnapshotWithResponse(ctx context.Context, id int32, snapshotId int64, reqEditors ...RequestEditorFn) (*RestoreClusterSnapshotResponse, error)
+ // ListCollabNotebooksWithResponse request
+ ListCollabNotebooksWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*ListCollabNotebooksResponse, error)
+
+ // CreateCollabNotebookWithBodyWithResponse request with any body
+ CreateCollabNotebookWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateCollabNotebookResponse, error)
+
+ CreateCollabNotebookWithResponse(ctx context.Context, body CreateCollabNotebookJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateCollabNotebookResponse, error)
+
+ // DeleteCollabNotebookWithResponse request
+ DeleteCollabNotebookWithResponse(ctx context.Context, notebookID string, reqEditors ...RequestEditorFn) (*DeleteCollabNotebookResponse, error)
+
+ // GetCollabNotebookWithResponse request
+ GetCollabNotebookWithResponse(ctx context.Context, notebookID string, reqEditors ...RequestEditorFn) (*GetCollabNotebookResponse, error)
+
+ // UpdateCollabNotebookWithBodyWithResponse request with any body
+ UpdateCollabNotebookWithBodyWithResponse(ctx context.Context, notebookID string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateCollabNotebookResponse, error)
+
+ UpdateCollabNotebookWithResponse(ctx context.Context, notebookID string, body UpdateCollabNotebookJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateCollabNotebookResponse, error)
+
+ // UploadCollabNotebookSnapshotWithBodyWithResponse request with any body
+ UploadCollabNotebookSnapshotWithBodyWithResponse(ctx context.Context, notebookID string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UploadCollabNotebookSnapshotResponse, error)
+
+ // TransferCollabNotebookWithBodyWithResponse request with any body
+ TransferCollabNotebookWithBodyWithResponse(ctx context.Context, notebookID string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*TransferCollabNotebookResponse, error)
+
+ TransferCollabNotebookWithResponse(ctx context.Context, notebookID string, body TransferCollabNotebookJSONRequestBody, reqEditors ...RequestEditorFn) (*TransferCollabNotebookResponse, error)
+
// ListDatabasesWithResponse request
ListDatabasesWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*ListDatabasesResponse, error)
@@ -3665,6 +4185,158 @@ func (r RestoreClusterSnapshotResponse) StatusCode() int {
return 0
}
+type ListCollabNotebooksResponse struct {
+ Body []byte
+ HTTPResponse *http.Response
+ JSON200 *[]CollabNotebook
+}
+
+// Status returns HTTPResponse.Status
+func (r ListCollabNotebooksResponse) Status() string {
+ if r.HTTPResponse != nil {
+ return r.HTTPResponse.Status
+ }
+ return http.StatusText(0)
+}
+
+// StatusCode returns HTTPResponse.StatusCode
+func (r ListCollabNotebooksResponse) StatusCode() int {
+ if r.HTTPResponse != nil {
+ return r.HTTPResponse.StatusCode
+ }
+ return 0
+}
+
+type CreateCollabNotebookResponse struct {
+ Body []byte
+ HTTPResponse *http.Response
+ JSON201 *CollabNotebook
+}
+
+// Status returns HTTPResponse.Status
+func (r CreateCollabNotebookResponse) Status() string {
+ if r.HTTPResponse != nil {
+ return r.HTTPResponse.Status
+ }
+ return http.StatusText(0)
+}
+
+// StatusCode returns HTTPResponse.StatusCode
+func (r CreateCollabNotebookResponse) StatusCode() int {
+ if r.HTTPResponse != nil {
+ return r.HTTPResponse.StatusCode
+ }
+ return 0
+}
+
+type DeleteCollabNotebookResponse struct {
+ Body []byte
+ HTTPResponse *http.Response
+}
+
+// Status returns HTTPResponse.Status
+func (r DeleteCollabNotebookResponse) Status() string {
+ if r.HTTPResponse != nil {
+ return r.HTTPResponse.Status
+ }
+ return http.StatusText(0)
+}
+
+// StatusCode returns HTTPResponse.StatusCode
+func (r DeleteCollabNotebookResponse) StatusCode() int {
+ if r.HTTPResponse != nil {
+ return r.HTTPResponse.StatusCode
+ }
+ return 0
+}
+
+type GetCollabNotebookResponse struct {
+ Body []byte
+ HTTPResponse *http.Response
+ JSON200 *CollabNotebook
+}
+
+// Status returns HTTPResponse.Status
+func (r GetCollabNotebookResponse) Status() string {
+ if r.HTTPResponse != nil {
+ return r.HTTPResponse.Status
+ }
+ return http.StatusText(0)
+}
+
+// StatusCode returns HTTPResponse.StatusCode
+func (r GetCollabNotebookResponse) StatusCode() int {
+ if r.HTTPResponse != nil {
+ return r.HTTPResponse.StatusCode
+ }
+ return 0
+}
+
+type UpdateCollabNotebookResponse struct {
+ Body []byte
+ HTTPResponse *http.Response
+ JSON200 *CollabNotebook
+}
+
+// Status returns HTTPResponse.Status
+func (r UpdateCollabNotebookResponse) Status() string {
+ if r.HTTPResponse != nil {
+ return r.HTTPResponse.Status
+ }
+ return http.StatusText(0)
+}
+
+// StatusCode returns HTTPResponse.StatusCode
+func (r UpdateCollabNotebookResponse) StatusCode() int {
+ if r.HTTPResponse != nil {
+ return r.HTTPResponse.StatusCode
+ }
+ return 0
+}
+
+type UploadCollabNotebookSnapshotResponse struct {
+ Body []byte
+ HTTPResponse *http.Response
+}
+
+// Status returns HTTPResponse.Status
+func (r UploadCollabNotebookSnapshotResponse) Status() string {
+ if r.HTTPResponse != nil {
+ return r.HTTPResponse.Status
+ }
+ return http.StatusText(0)
+}
+
+// StatusCode returns HTTPResponse.StatusCode
+func (r UploadCollabNotebookSnapshotResponse) StatusCode() int {
+ if r.HTTPResponse != nil {
+ return r.HTTPResponse.StatusCode
+ }
+ return 0
+}
+
+type TransferCollabNotebookResponse struct {
+ Body []byte
+ HTTPResponse *http.Response
+ JSON200 *CollabNotebook
+}
+
+// Status returns HTTPResponse.Status
+func (r TransferCollabNotebookResponse) Status() string {
+ if r.HTTPResponse != nil {
+ return r.HTTPResponse.Status
+ }
+ return http.StatusText(0)
+}
+
+// StatusCode returns HTTPResponse.StatusCode
+func (r TransferCollabNotebookResponse) StatusCode() int {
+ if r.HTTPResponse != nil {
+ return r.HTTPResponse.StatusCode
+ }
+ return 0
+}
+
type ListDatabasesResponse struct {
Body []byte
HTTPResponse *http.Response
@@ -4214,103 +4886,190 @@ func (c *ClientWithResponses) CreateClusterDiagnosticWithResponse(ctx context.Co
if err != nil {
return nil, err
}
- return ParseCreateClusterDiagnosticResponse(rsp)
+ return ParseCreateClusterDiagnosticResponse(rsp)
+}
+
+// GetClusterAutoDiagnosticConfigWithResponse request returning *GetClusterAutoDiagnosticConfigResponse
+func (c *ClientWithResponses) GetClusterAutoDiagnosticConfigWithResponse(ctx context.Context, id int32, reqEditors ...RequestEditorFn) (*GetClusterAutoDiagnosticConfigResponse, error) {
+ rsp, err := c.GetClusterAutoDiagnosticConfig(ctx, id, reqEditors...)
+ if err != nil {
+ return nil, err
+ }
+ return ParseGetClusterAutoDiagnosticConfigResponse(rsp)
+}
+
+// UpdateClusterAutoDiagnosticConfigWithBodyWithResponse request with arbitrary body returning *UpdateClusterAutoDiagnosticConfigResponse
+func (c *ClientWithResponses) UpdateClusterAutoDiagnosticConfigWithBodyWithResponse(ctx context.Context, id int32, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateClusterAutoDiagnosticConfigResponse, error) {
+ rsp, err := c.UpdateClusterAutoDiagnosticConfigWithBody(ctx, id, contentType, body, reqEditors...)
+ if err != nil {
+ return nil, err
+ }
+ return ParseUpdateClusterAutoDiagnosticConfigResponse(rsp)
+}
+
+func (c *ClientWithResponses) UpdateClusterAutoDiagnosticConfigWithResponse(ctx context.Context, id int32, body UpdateClusterAutoDiagnosticConfigJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateClusterAutoDiagnosticConfigResponse, error) {
+ rsp, err := c.UpdateClusterAutoDiagnosticConfig(ctx, id, body, reqEditors...)
+ if err != nil {
+ return nil, err
+ }
+ return ParseUpdateClusterAutoDiagnosticConfigResponse(rsp)
+}
+
+// GetClusterDiagnosticWithResponse request returning *GetClusterDiagnosticResponse
+func (c *ClientWithResponses) GetClusterDiagnosticWithResponse(ctx context.Context, id int32, diagnosticId int32, reqEditors ...RequestEditorFn) (*GetClusterDiagnosticResponse, error) {
+ rsp, err := c.GetClusterDiagnostic(ctx, id, diagnosticId, reqEditors...)
+ if err != nil {
+ return nil, err
+ }
+ return ParseGetClusterDiagnosticResponse(rsp)
+}
+
+// RunRisectlCommandWithBodyWithResponse request with arbitrary body returning *RunRisectlCommandResponse
+func (c *ClientWithResponses) RunRisectlCommandWithBodyWithResponse(ctx context.Context, id int32, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*RunRisectlCommandResponse, error) {
+ rsp, err := c.RunRisectlCommandWithBody(ctx, id, contentType, body, reqEditors...)
+ if err != nil {
+ return nil, err
+ }
+ return ParseRunRisectlCommandResponse(rsp)
+}
+
+func (c *ClientWithResponses) RunRisectlCommandWithResponse(ctx context.Context, id int32, body RunRisectlCommandJSONRequestBody, reqEditors ...RequestEditorFn) (*RunRisectlCommandResponse, error) {
+ rsp, err := c.RunRisectlCommand(ctx, id, body, reqEditors...)
+ if err != nil {
+ return nil, err
+ }
+ return ParseRunRisectlCommandResponse(rsp)
+}
+
+// ListClusterSnapshotsWithResponse request returning *ListClusterSnapshotsResponse
+func (c *ClientWithResponses) ListClusterSnapshotsWithResponse(ctx context.Context, id int32, reqEditors ...RequestEditorFn) (*ListClusterSnapshotsResponse, error) {
+ rsp, err := c.ListClusterSnapshots(ctx, id, reqEditors...)
+ if err != nil {
+ return nil, err
+ }
+ return ParseListClusterSnapshotsResponse(rsp)
+}
+
+// CreateClusterSnapshotWithBodyWithResponse request with arbitrary body returning *CreateClusterSnapshotResponse
+func (c *ClientWithResponses) CreateClusterSnapshotWithBodyWithResponse(ctx context.Context, id int32, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateClusterSnapshotResponse, error) {
+ rsp, err := c.CreateClusterSnapshotWithBody(ctx, id, contentType, body, reqEditors...)
+ if err != nil {
+ return nil, err
+ }
+ return ParseCreateClusterSnapshotResponse(rsp)
+}
+
+func (c *ClientWithResponses) CreateClusterSnapshotWithResponse(ctx context.Context, id int32, body CreateClusterSnapshotJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateClusterSnapshotResponse, error) {
+ rsp, err := c.CreateClusterSnapshot(ctx, id, body, reqEditors...)
+ if err != nil {
+ return nil, err
+ }
+ return ParseCreateClusterSnapshotResponse(rsp)
+}
+
+// DeleteClusterSnapshotWithResponse request returning *DeleteClusterSnapshotResponse
+func (c *ClientWithResponses) DeleteClusterSnapshotWithResponse(ctx context.Context, id int32, snapshotId int64, reqEditors ...RequestEditorFn) (*DeleteClusterSnapshotResponse, error) {
+ rsp, err := c.DeleteClusterSnapshot(ctx, id, snapshotId, reqEditors...)
+ if err != nil {
+ return nil, err
+ }
+ return ParseDeleteClusterSnapshotResponse(rsp)
}
-// GetClusterAutoDiagnosticConfigWithResponse request returning *GetClusterAutoDiagnosticConfigResponse
-func (c *ClientWithResponses) GetClusterAutoDiagnosticConfigWithResponse(ctx context.Context, id int32, reqEditors ...RequestEditorFn) (*GetClusterAutoDiagnosticConfigResponse, error) {
- rsp, err := c.GetClusterAutoDiagnosticConfig(ctx, id, reqEditors...)
+// RestoreClusterSnapshotWithResponse request returning *RestoreClusterSnapshotResponse
+func (c *ClientWithResponses) RestoreClusterSnapshotWithResponse(ctx context.Context, id int32, snapshotId int64, reqEditors ...RequestEditorFn) (*RestoreClusterSnapshotResponse, error) {
+ rsp, err := c.RestoreClusterSnapshot(ctx, id, snapshotId, reqEditors...)
if err != nil {
return nil, err
}
- return ParseGetClusterAutoDiagnosticConfigResponse(rsp)
+ return ParseRestoreClusterSnapshotResponse(rsp)
}
-// UpdateClusterAutoDiagnosticConfigWithBodyWithResponse request with arbitrary body returning *UpdateClusterAutoDiagnosticConfigResponse
-func (c *ClientWithResponses) UpdateClusterAutoDiagnosticConfigWithBodyWithResponse(ctx context.Context, id int32, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateClusterAutoDiagnosticConfigResponse, error) {
- rsp, err := c.UpdateClusterAutoDiagnosticConfigWithBody(ctx, id, contentType, body, reqEditors...)
+// ListCollabNotebooksWithResponse request returning *ListCollabNotebooksResponse
+func (c *ClientWithResponses) ListCollabNotebooksWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*ListCollabNotebooksResponse, error) {
+ rsp, err := c.ListCollabNotebooks(ctx, reqEditors...)
if err != nil {
return nil, err
}
- return ParseUpdateClusterAutoDiagnosticConfigResponse(rsp)
+ return ParseListCollabNotebooksResponse(rsp)
}
-func (c *ClientWithResponses) UpdateClusterAutoDiagnosticConfigWithResponse(ctx context.Context, id int32, body UpdateClusterAutoDiagnosticConfigJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateClusterAutoDiagnosticConfigResponse, error) {
- rsp, err := c.UpdateClusterAutoDiagnosticConfig(ctx, id, body, reqEditors...)
+// CreateCollabNotebookWithBodyWithResponse request with arbitrary body returning *CreateCollabNotebookResponse
+func (c *ClientWithResponses) CreateCollabNotebookWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateCollabNotebookResponse, error) {
+ rsp, err := c.CreateCollabNotebookWithBody(ctx, contentType, body, reqEditors...)
if err != nil {
return nil, err
}
- return ParseUpdateClusterAutoDiagnosticConfigResponse(rsp)
+ return ParseCreateCollabNotebookResponse(rsp)
}
-// GetClusterDiagnosticWithResponse request returning *GetClusterDiagnosticResponse
-func (c *ClientWithResponses) GetClusterDiagnosticWithResponse(ctx context.Context, id int32, diagnosticId int32, reqEditors ...RequestEditorFn) (*GetClusterDiagnosticResponse, error) {
- rsp, err := c.GetClusterDiagnostic(ctx, id, diagnosticId, reqEditors...)
+func (c *ClientWithResponses) CreateCollabNotebookWithResponse(ctx context.Context, body CreateCollabNotebookJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateCollabNotebookResponse, error) {
+ rsp, err := c.CreateCollabNotebook(ctx, body, reqEditors...)
if err != nil {
return nil, err
}
- return ParseGetClusterDiagnosticResponse(rsp)
+ return ParseCreateCollabNotebookResponse(rsp)
}
-// RunRisectlCommandWithBodyWithResponse request with arbitrary body returning *RunRisectlCommandResponse
-func (c *ClientWithResponses) RunRisectlCommandWithBodyWithResponse(ctx context.Context, id int32, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*RunRisectlCommandResponse, error) {
- rsp, err := c.RunRisectlCommandWithBody(ctx, id, contentType, body, reqEditors...)
+// DeleteCollabNotebookWithResponse request returning *DeleteCollabNotebookResponse
+func (c *ClientWithResponses) DeleteCollabNotebookWithResponse(ctx context.Context, notebookID string, reqEditors ...RequestEditorFn) (*DeleteCollabNotebookResponse, error) {
+ rsp, err := c.DeleteCollabNotebook(ctx, notebookID, reqEditors...)
if err != nil {
return nil, err
}
- return ParseRunRisectlCommandResponse(rsp)
+ return ParseDeleteCollabNotebookResponse(rsp)
}
-func (c *ClientWithResponses) RunRisectlCommandWithResponse(ctx context.Context, id int32, body RunRisectlCommandJSONRequestBody, reqEditors ...RequestEditorFn) (*RunRisectlCommandResponse, error) {
- rsp, err := c.RunRisectlCommand(ctx, id, body, reqEditors...)
+// GetCollabNotebookWithResponse request returning *GetCollabNotebookResponse
+func (c *ClientWithResponses) GetCollabNotebookWithResponse(ctx context.Context, notebookID string, reqEditors ...RequestEditorFn) (*GetCollabNotebookResponse, error) {
+ rsp, err := c.GetCollabNotebook(ctx, notebookID, reqEditors...)
if err != nil {
return nil, err
}
- return ParseRunRisectlCommandResponse(rsp)
+ return ParseGetCollabNotebookResponse(rsp)
}
-// ListClusterSnapshotsWithResponse request returning *ListClusterSnapshotsResponse
-func (c *ClientWithResponses) ListClusterSnapshotsWithResponse(ctx context.Context, id int32, reqEditors ...RequestEditorFn) (*ListClusterSnapshotsResponse, error) {
- rsp, err := c.ListClusterSnapshots(ctx, id, reqEditors...)
+// UpdateCollabNotebookWithBodyWithResponse request with arbitrary body returning *UpdateCollabNotebookResponse
+func (c *ClientWithResponses) UpdateCollabNotebookWithBodyWithResponse(ctx context.Context, notebookID string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateCollabNotebookResponse, error) {
+ rsp, err := c.UpdateCollabNotebookWithBody(ctx, notebookID, contentType, body, reqEditors...)
if err != nil {
return nil, err
}
- return ParseListClusterSnapshotsResponse(rsp)
+ return ParseUpdateCollabNotebookResponse(rsp)
}
-// CreateClusterSnapshotWithBodyWithResponse request with arbitrary body returning *CreateClusterSnapshotResponse
-func (c *ClientWithResponses) CreateClusterSnapshotWithBodyWithResponse(ctx context.Context, id int32, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateClusterSnapshotResponse, error) {
- rsp, err := c.CreateClusterSnapshotWithBody(ctx, id, contentType, body, reqEditors...)
+func (c *ClientWithResponses) UpdateCollabNotebookWithResponse(ctx context.Context, notebookID string, body UpdateCollabNotebookJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateCollabNotebookResponse, error) {
+ rsp, err := c.UpdateCollabNotebook(ctx, notebookID, body, reqEditors...)
if err != nil {
return nil, err
}
- return ParseCreateClusterSnapshotResponse(rsp)
+ return ParseUpdateCollabNotebookResponse(rsp)
}
-func (c *ClientWithResponses) CreateClusterSnapshotWithResponse(ctx context.Context, id int32, body CreateClusterSnapshotJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateClusterSnapshotResponse, error) {
- rsp, err := c.CreateClusterSnapshot(ctx, id, body, reqEditors...)
+// UploadCollabNotebookSnapshotWithBodyWithResponse request with arbitrary body returning *UploadCollabNotebookSnapshotResponse
+func (c *ClientWithResponses) UploadCollabNotebookSnapshotWithBodyWithResponse(ctx context.Context, notebookID string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UploadCollabNotebookSnapshotResponse, error) {
+ rsp, err := c.UploadCollabNotebookSnapshotWithBody(ctx, notebookID, contentType, body, reqEditors...)
if err != nil {
return nil, err
}
- return ParseCreateClusterSnapshotResponse(rsp)
+ return ParseUploadCollabNotebookSnapshotResponse(rsp)
}
-// DeleteClusterSnapshotWithResponse request returning *DeleteClusterSnapshotResponse
-func (c *ClientWithResponses) DeleteClusterSnapshotWithResponse(ctx context.Context, id int32, snapshotId int64, reqEditors ...RequestEditorFn) (*DeleteClusterSnapshotResponse, error) {
- rsp, err := c.DeleteClusterSnapshot(ctx, id, snapshotId, reqEditors...)
+// TransferCollabNotebookWithBodyWithResponse request with arbitrary body returning *TransferCollabNotebookResponse
+func (c *ClientWithResponses) TransferCollabNotebookWithBodyWithResponse(ctx context.Context, notebookID string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*TransferCollabNotebookResponse, error) {
+ rsp, err := c.TransferCollabNotebookWithBody(ctx, notebookID, contentType, body, reqEditors...)
if err != nil {
return nil, err
}
- return ParseDeleteClusterSnapshotResponse(rsp)
+ return ParseTransferCollabNotebookResponse(rsp)
}
-// RestoreClusterSnapshotWithResponse request returning *RestoreClusterSnapshotResponse
-func (c *ClientWithResponses) RestoreClusterSnapshotWithResponse(ctx context.Context, id int32, snapshotId int64, reqEditors ...RequestEditorFn) (*RestoreClusterSnapshotResponse, error) {
- rsp, err := c.RestoreClusterSnapshot(ctx, id, snapshotId, reqEditors...)
+func (c *ClientWithResponses) TransferCollabNotebookWithResponse(ctx context.Context, notebookID string, body TransferCollabNotebookJSONRequestBody, reqEditors ...RequestEditorFn) (*TransferCollabNotebookResponse, error) {
+ rsp, err := c.TransferCollabNotebook(ctx, notebookID, body, reqEditors...)
if err != nil {
return nil, err
}
- return ParseRestoreClusterSnapshotResponse(rsp)
+ return ParseTransferCollabNotebookResponse(rsp)
}
// ListDatabasesWithResponse request returning *ListDatabasesResponse
@@ -4982,6 +5741,168 @@ func ParseRestoreClusterSnapshotResponse(rsp *http.Response) (*RestoreClusterSna
return response, nil
}
+// ParseListCollabNotebooksResponse parses an HTTP response from a ListCollabNotebooksWithResponse call
+func ParseListCollabNotebooksResponse(rsp *http.Response) (*ListCollabNotebooksResponse, error) {
+ bodyBytes, err := io.ReadAll(rsp.Body)
+ defer func() { _ = rsp.Body.Close() }()
+ if err != nil {
+ return nil, err
+ }
+
+ response := &ListCollabNotebooksResponse{
+ Body: bodyBytes,
+ HTTPResponse: rsp,
+ }
+
+ switch {
+ case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200:
+ var dest []CollabNotebook
+ if err := json.Unmarshal(bodyBytes, &dest); err != nil {
+ return nil, err
+ }
+ response.JSON200 = &dest
+
+ }
+
+ return response, nil
+}
+
+// ParseCreateCollabNotebookResponse parses an HTTP response from a CreateCollabNotebookWithResponse call
+func ParseCreateCollabNotebookResponse(rsp *http.Response) (*CreateCollabNotebookResponse, error) {
+ bodyBytes, err := io.ReadAll(rsp.Body)
+ defer func() { _ = rsp.Body.Close() }()
+ if err != nil {
+ return nil, err
+ }
+
+ response := &CreateCollabNotebookResponse{
+ Body: bodyBytes,
+ HTTPResponse: rsp,
+ }
+
+ switch {
+ case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201:
+ var dest CollabNotebook
+ if err := json.Unmarshal(bodyBytes, &dest); err != nil {
+ return nil, err
+ }
+ response.JSON201 = &dest
+
+ }
+
+ return response, nil
+}
+
+// ParseDeleteCollabNotebookResponse parses an HTTP response from a DeleteCollabNotebookWithResponse call
+func ParseDeleteCollabNotebookResponse(rsp *http.Response) (*DeleteCollabNotebookResponse, error) {
+ bodyBytes, err := io.ReadAll(rsp.Body)
+ defer func() { _ = rsp.Body.Close() }()
+ if err != nil {
+ return nil, err
+ }
+
+ response := &DeleteCollabNotebookResponse{
+ Body: bodyBytes,
+ HTTPResponse: rsp,
+ }
+
+ return response, nil
+}
+
+// ParseGetCollabNotebookResponse parses an HTTP response from a GetCollabNotebookWithResponse call
+func ParseGetCollabNotebookResponse(rsp *http.Response) (*GetCollabNotebookResponse, error) {
+ bodyBytes, err := io.ReadAll(rsp.Body)
+ defer func() { _ = rsp.Body.Close() }()
+ if err != nil {
+ return nil, err
+ }
+
+ response := &GetCollabNotebookResponse{
+ Body: bodyBytes,
+ HTTPResponse: rsp,
+ }
+
+ switch {
+ case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200:
+ var dest CollabNotebook
+ if err := json.Unmarshal(bodyBytes, &dest); err != nil {
+ return nil, err
+ }
+ response.JSON200 = &dest
+
+ }
+
+ return response, nil
+}
+
+// ParseUpdateCollabNotebookResponse parses an HTTP response from a UpdateCollabNotebookWithResponse call
+func ParseUpdateCollabNotebookResponse(rsp *http.Response) (*UpdateCollabNotebookResponse, error) {
+ bodyBytes, err := io.ReadAll(rsp.Body)
+ defer func() { _ = rsp.Body.Close() }()
+ if err != nil {
+ return nil, err
+ }
+
+ response := &UpdateCollabNotebookResponse{
+ Body: bodyBytes,
+ HTTPResponse: rsp,
+ }
+
+ switch {
+ case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200:
+ var dest CollabNotebook
+ if err := json.Unmarshal(bodyBytes, &dest); err != nil {
+ return nil, err
+ }
+ response.JSON200 = &dest
+
+ }
+
+ return response, nil
+}
+
+// ParseUploadCollabNotebookSnapshotResponse parses an HTTP response from a UploadCollabNotebookSnapshotWithResponse call
+func ParseUploadCollabNotebookSnapshotResponse(rsp *http.Response) (*UploadCollabNotebookSnapshotResponse, error) {
+ bodyBytes, err := io.ReadAll(rsp.Body)
+ defer func() { _ = rsp.Body.Close() }()
+ if err != nil {
+ return nil, err
+ }
+
+ response := &UploadCollabNotebookSnapshotResponse{
+ Body: bodyBytes,
+ HTTPResponse: rsp,
+ }
+
+ return response, nil
+}
+
+// ParseTransferCollabNotebookResponse parses an HTTP response from a TransferCollabNotebookWithResponse call
+func ParseTransferCollabNotebookResponse(rsp *http.Response) (*TransferCollabNotebookResponse, error) {
+ bodyBytes, err := io.ReadAll(rsp.Body)
+ defer func() { _ = rsp.Body.Close() }()
+ if err != nil {
+ return nil, err
+ }
+
+ response := &TransferCollabNotebookResponse{
+ Body: bodyBytes,
+ HTTPResponse: rsp,
+ }
+
+ switch {
+ case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200:
+ var dest CollabNotebook
+ if err := json.Unmarshal(bodyBytes, &dest); err != nil {
+ return nil, err
+ }
+ response.JSON200 = &dest
+
+ }
+
+ return response, nil
+}
+
// ParseListDatabasesResponse parses an HTTP response from a ListDatabasesWithResponse call
func ParseListDatabasesResponse(rsp *http.Response) (*ListDatabasesResponse, error) {
bodyBytes, err := io.ReadAll(rsp.Body)
@@ -5485,6 +6406,27 @@ type ServerInterface interface {
// Restore snapshot
// (POST /clusters/{ID}/snapshots/{snapshotId})
RestoreClusterSnapshot(c *fiber.Ctx, id int32, snapshotId int64) error
+ // List collaborative notebooks
+ // (GET /collab/notebooks)
+ ListCollabNotebooks(c *fiber.Ctx) error
+ // Create a collaborative notebook
+ // (POST /collab/notebooks)
+ CreateCollabNotebook(c *fiber.Ctx) error
+ // Delete a collaborative notebook
+ // (DELETE /collab/notebooks/{notebookID})
+ DeleteCollabNotebook(c *fiber.Ctx, notebookID string) error
+ // Get collaborative notebook details
+ // (GET /collab/notebooks/{notebookID})
+ GetCollabNotebook(c *fiber.Ctx, notebookID string) error
+ // Update a collaborative notebook
+ // (PUT /collab/notebooks/{notebookID})
+ UpdateCollabNotebook(c *fiber.Ctx, notebookID string) error
+ // Upload a collaborative notebook snapshot
+ // (PUT /collab/notebooks/{notebookID}/snapshot)
+ UploadCollabNotebookSnapshot(c *fiber.Ctx, notebookID string) error
+ // Transfer collaborative notebook scope
+ // (POST /collab/notebooks/{notebookID}/transfer)
+ TransferCollabNotebook(c *fiber.Ctx, notebookID string) error
// List all databases
// (GET /databases)
ListDatabases(c *fiber.Ctx) error
@@ -5928,6 +6870,112 @@ func (siw *ServerInterfaceWrapper) RestoreClusterSnapshot(c *fiber.Ctx) error {
return siw.Handler.RestoreClusterSnapshot(c, id, snapshotId)
}
+// ListCollabNotebooks operation middleware
+func (siw *ServerInterfaceWrapper) ListCollabNotebooks(c *fiber.Ctx) error {
+
+ c.Context().SetUserValue(BearerAuthScopes, []string{})
+
+ return siw.Handler.ListCollabNotebooks(c)
+}
+
+// CreateCollabNotebook operation middleware
+func (siw *ServerInterfaceWrapper) CreateCollabNotebook(c *fiber.Ctx) error {
+
+ c.Context().SetUserValue(BearerAuthScopes, []string{})
+
+ return siw.Handler.CreateCollabNotebook(c)
+}
+
+// DeleteCollabNotebook operation middleware
+func (siw *ServerInterfaceWrapper) DeleteCollabNotebook(c *fiber.Ctx) error {
+
+ var err error
+
+ // ------------- Path parameter "notebookID" -------------
+ var notebookID string
+
+ err = runtime.BindStyledParameterWithOptions("simple", "notebookID", c.Params("notebookID"), ¬ebookID, runtime.BindStyledParameterOptions{Explode: false, Required: true})
+ if err != nil {
+ return fiber.NewError(fiber.StatusBadRequest, fmt.Errorf("Invalid format for parameter notebookID: %w", err).Error())
+ }
+
+ c.Context().SetUserValue(BearerAuthScopes, []string{})
+
+ return siw.Handler.DeleteCollabNotebook(c, notebookID)
+}
+
+// GetCollabNotebook operation middleware
+func (siw *ServerInterfaceWrapper) GetCollabNotebook(c *fiber.Ctx) error {
+
+ var err error
+
+ // ------------- Path parameter "notebookID" -------------
+ var notebookID string
+
+ err = runtime.BindStyledParameterWithOptions("simple", "notebookID", c.Params("notebookID"), ¬ebookID, runtime.BindStyledParameterOptions{Explode: false, Required: true})
+ if err != nil {
+ return fiber.NewError(fiber.StatusBadRequest, fmt.Errorf("Invalid format for parameter notebookID: %w", err).Error())
+ }
+
+ c.Context().SetUserValue(BearerAuthScopes, []string{})
+
+ return siw.Handler.GetCollabNotebook(c, notebookID)
+}
+
+// UpdateCollabNotebook operation middleware
+func (siw *ServerInterfaceWrapper) UpdateCollabNotebook(c *fiber.Ctx) error {
+
+ var err error
+
+ // ------------- Path parameter "notebookID" -------------
+ var notebookID string
+
+ err = runtime.BindStyledParameterWithOptions("simple", "notebookID", c.Params("notebookID"), ¬ebookID, runtime.BindStyledParameterOptions{Explode: false, Required: true})
+ if err != nil {
+ return fiber.NewError(fiber.StatusBadRequest, fmt.Errorf("Invalid format for parameter notebookID: %w", err).Error())
+ }
+
+ c.Context().SetUserValue(BearerAuthScopes, []string{})
+
+ return siw.Handler.UpdateCollabNotebook(c, notebookID)
+}
+
+// UploadCollabNotebookSnapshot operation middleware
+func (siw *ServerInterfaceWrapper) UploadCollabNotebookSnapshot(c *fiber.Ctx) error {
+
+ var err error
+
+ // ------------- Path parameter "notebookID" -------------
+ var notebookID string
+
+ err = runtime.BindStyledParameterWithOptions("simple", "notebookID", c.Params("notebookID"), ¬ebookID, runtime.BindStyledParameterOptions{Explode: false, Required: true})
+ if err != nil {
+ return fiber.NewError(fiber.StatusBadRequest, fmt.Errorf("Invalid format for parameter notebookID: %w", err).Error())
+ }
+
+ c.Context().SetUserValue(BearerAuthScopes, []string{})
+
+ return siw.Handler.UploadCollabNotebookSnapshot(c, notebookID)
+}
+
+// TransferCollabNotebook operation middleware
+func (siw *ServerInterfaceWrapper) TransferCollabNotebook(c *fiber.Ctx) error {
+
+ var err error
+
+ // ------------- Path parameter "notebookID" -------------
+ var notebookID string
+
+ err = runtime.BindStyledParameterWithOptions("simple", "notebookID", c.Params("notebookID"), ¬ebookID, runtime.BindStyledParameterOptions{Explode: false, Required: true})
+ if err != nil {
+ return fiber.NewError(fiber.StatusBadRequest, fmt.Errorf("Invalid format for parameter notebookID: %w", err).Error())
+ }
+
+ c.Context().SetUserValue(BearerAuthScopes, []string{})
+
+ return siw.Handler.TransferCollabNotebook(c, notebookID)
+}
+
// ListDatabases operation middleware
func (siw *ServerInterfaceWrapper) ListDatabases(c *fiber.Ctx) error {
@@ -6271,6 +7319,20 @@ func RegisterHandlersWithOptions(router fiber.Router, si ServerInterface, option
router.Post(options.BaseURL+"/clusters/:ID/snapshots/:snapshotId", wrapper.RestoreClusterSnapshot)
+ router.Get(options.BaseURL+"/collab/notebooks", wrapper.ListCollabNotebooks)
+
+ router.Post(options.BaseURL+"/collab/notebooks", wrapper.CreateCollabNotebook)
+
+ router.Delete(options.BaseURL+"/collab/notebooks/:notebookID", wrapper.DeleteCollabNotebook)
+
+ router.Get(options.BaseURL+"/collab/notebooks/:notebookID", wrapper.GetCollabNotebook)
+
+ router.Put(options.BaseURL+"/collab/notebooks/:notebookID", wrapper.UpdateCollabNotebook)
+
+ router.Put(options.BaseURL+"/collab/notebooks/:notebookID/snapshot", wrapper.UploadCollabNotebookSnapshot)
+
+ router.Post(options.BaseURL+"/collab/notebooks/:notebookID/transfer", wrapper.TransferCollabNotebook)
+
router.Get(options.BaseURL+"/databases", wrapper.ListDatabases)
router.Post(options.BaseURL+"/databases/import", wrapper.ImportDatabase)
diff --git a/pkg/zgen/querier/collab.sql.gen.go b/pkg/zgen/querier/collab.sql.gen.go
new file mode 100644
index 00000000..442d0598
--- /dev/null
+++ b/pkg/zgen/querier/collab.sql.gen.go
@@ -0,0 +1,301 @@
+// Code generated by sqlc. DO NOT EDIT.
+// versions:
+// sqlc v1.29.0
+// source: collab.sql
+
+package querier
+
+import (
+ "context"
+ "time"
+)
+
+const createNotebook = `-- name: CreateNotebook :one
+INSERT INTO notebooks (id, scope, org_id, owner_user_id, title, created_by)
+VALUES ($1, $2, $3, $4, $5, $6)
+RETURNING id, scope, org_id AS organization_id, owner_user_id, title, created_by, created_at, updated_at
+`
+
+type CreateNotebookParams struct {
+ ID string
+ Scope NotebookScope
+ OrgID int32
+ OwnerUserID *int32
+ Title string
+ CreatedBy *int32
+}
+
+type CreateNotebookRow struct {
+ ID string
+ Scope NotebookScope
+ OrganizationID int32
+ OwnerUserID *int32
+ Title string
+ CreatedBy *int32
+ CreatedAt time.Time
+ UpdatedAt time.Time
+}
+
+func (q *Queries) CreateNotebook(ctx context.Context, arg CreateNotebookParams) (*CreateNotebookRow, error) {
+ row := q.db.QueryRow(ctx, createNotebook,
+ arg.ID,
+ arg.Scope,
+ arg.OrgID,
+ arg.OwnerUserID,
+ arg.Title,
+ arg.CreatedBy,
+ )
+ var i CreateNotebookRow
+ err := row.Scan(
+ &i.ID,
+ &i.Scope,
+ &i.OrganizationID,
+ &i.OwnerUserID,
+ &i.Title,
+ &i.CreatedBy,
+ &i.CreatedAt,
+ &i.UpdatedAt,
+ )
+ return &i, err
+}
+
+const deleteNotebook = `-- name: DeleteNotebook :exec
+DELETE FROM notebooks
+WHERE id = $1
+`
+
+func (q *Queries) DeleteNotebook(ctx context.Context, id string) error {
+ _, err := q.db.Exec(ctx, deleteNotebook, id)
+ return err
+}
+
+const getCollabDocSnapshot = `-- name: GetCollabDocSnapshot :one
+SELECT snapshot FROM collab_doc_snapshots WHERE notebook_id = $1
+`
+
+func (q *Queries) GetCollabDocSnapshot(ctx context.Context, notebookID string) ([]byte, error) {
+ row := q.db.QueryRow(ctx, getCollabDocSnapshot, notebookID)
+ var snapshot []byte
+ err := row.Scan(&snapshot)
+ return snapshot, err
+}
+
+const getNotebook = `-- name: GetNotebook :one
+SELECT id, scope, org_id AS organization_id, owner_user_id, title, created_by, created_at, updated_at
+FROM notebooks
+WHERE id = $1
+`
+
+type GetNotebookRow struct {
+ ID string
+ Scope NotebookScope
+ OrganizationID int32
+ OwnerUserID *int32
+ Title string
+ CreatedBy *int32
+ CreatedAt time.Time
+ UpdatedAt time.Time
+}
+
+func (q *Queries) GetNotebook(ctx context.Context, id string) (*GetNotebookRow, error) {
+ row := q.db.QueryRow(ctx, getNotebook, id)
+ var i GetNotebookRow
+ err := row.Scan(
+ &i.ID,
+ &i.Scope,
+ &i.OrganizationID,
+ &i.OwnerUserID,
+ &i.Title,
+ &i.CreatedBy,
+ &i.CreatedAt,
+ &i.UpdatedAt,
+ )
+ return &i, err
+}
+
+const getUserOrganization = `-- name: GetUserOrganization :one
+SELECT org_id AS organization_id
+FROM anclax.user_default_orgs
+WHERE user_id = $1
+`
+
+func (q *Queries) GetUserOrganization(ctx context.Context, userID int32) (int32, error) {
+ row := q.db.QueryRow(ctx, getUserOrganization, userID)
+ var organization_id int32
+ err := row.Scan(&organization_id)
+ return organization_id, err
+}
+
+const isOrgOwner = `-- name: IsOrgOwner :one
+SELECT EXISTS(
+ SELECT 1 FROM anclax.org_owners WHERE user_id = $1 AND org_id = $2
+)
+`
+
+type IsOrgOwnerParams struct {
+ UserID int32
+ OrgID int32
+}
+
+func (q *Queries) IsOrgOwner(ctx context.Context, arg IsOrgOwnerParams) (bool, error) {
+ row := q.db.QueryRow(ctx, isOrgOwner, arg.UserID, arg.OrgID)
+ var exists bool
+ err := row.Scan(&exists)
+ return exists, err
+}
+
+const listAccessibleNotebooks = `-- name: ListAccessibleNotebooks :many
+SELECT id, scope, org_id AS organization_id, owner_user_id, title, created_by, created_at, updated_at
+FROM notebooks
+WHERE org_id = $1
+ AND (
+ scope = 'organization'::notebook_scope OR
+ (scope = 'personal'::notebook_scope AND owner_user_id = $2)
+ )
+ORDER BY updated_at DESC
+`
+
+type ListAccessibleNotebooksParams struct {
+ OrgID int32
+ OwnerUserID *int32
+}
+
+type ListAccessibleNotebooksRow struct {
+ ID string
+ Scope NotebookScope
+ OrganizationID int32
+ OwnerUserID *int32
+ Title string
+ CreatedBy *int32
+ CreatedAt time.Time
+ UpdatedAt time.Time
+}
+
+func (q *Queries) ListAccessibleNotebooks(ctx context.Context, arg ListAccessibleNotebooksParams) ([]*ListAccessibleNotebooksRow, error) {
+ rows, err := q.db.Query(ctx, listAccessibleNotebooks, arg.OrgID, arg.OwnerUserID)
+ if err != nil {
+ return nil, err
+ }
+ defer rows.Close()
+ var items []*ListAccessibleNotebooksRow
+ for rows.Next() {
+ var i ListAccessibleNotebooksRow
+ if err := rows.Scan(
+ &i.ID,
+ &i.Scope,
+ &i.OrganizationID,
+ &i.OwnerUserID,
+ &i.Title,
+ &i.CreatedBy,
+ &i.CreatedAt,
+ &i.UpdatedAt,
+ ); err != nil {
+ return nil, err
+ }
+ items = append(items, &i)
+ }
+ if err := rows.Err(); err != nil {
+ return nil, err
+ }
+ return items, nil
+}
+
+const updateNotebookScope = `-- name: UpdateNotebookScope :one
+UPDATE notebooks
+SET scope = $2,
+ owner_user_id = $3,
+ updated_at = CURRENT_TIMESTAMP
+WHERE id = $1
+RETURNING id, scope, org_id AS organization_id, owner_user_id, title, created_by, created_at, updated_at
+`
+
+type UpdateNotebookScopeParams struct {
+ ID string
+ Scope NotebookScope
+ OwnerUserID *int32
+}
+
+type UpdateNotebookScopeRow struct {
+ ID string
+ Scope NotebookScope
+ OrganizationID int32
+ OwnerUserID *int32
+ Title string
+ CreatedBy *int32
+ CreatedAt time.Time
+ UpdatedAt time.Time
+}
+
+func (q *Queries) UpdateNotebookScope(ctx context.Context, arg UpdateNotebookScopeParams) (*UpdateNotebookScopeRow, error) {
+ row := q.db.QueryRow(ctx, updateNotebookScope, arg.ID, arg.Scope, arg.OwnerUserID)
+ var i UpdateNotebookScopeRow
+ err := row.Scan(
+ &i.ID,
+ &i.Scope,
+ &i.OrganizationID,
+ &i.OwnerUserID,
+ &i.Title,
+ &i.CreatedBy,
+ &i.CreatedAt,
+ &i.UpdatedAt,
+ )
+ return &i, err
+}
+
+const updateNotebookTitle = `-- name: UpdateNotebookTitle :one
+UPDATE notebooks
+SET title = $2,
+ updated_at = CURRENT_TIMESTAMP
+WHERE id = $1
+RETURNING id, scope, org_id AS organization_id, owner_user_id, title, created_by, created_at, updated_at
+`
+
+type UpdateNotebookTitleParams struct {
+ ID string
+ Title string
+}
+
+type UpdateNotebookTitleRow struct {
+ ID string
+ Scope NotebookScope
+ OrganizationID int32
+ OwnerUserID *int32
+ Title string
+ CreatedBy *int32
+ CreatedAt time.Time
+ UpdatedAt time.Time
+}
+
+func (q *Queries) UpdateNotebookTitle(ctx context.Context, arg UpdateNotebookTitleParams) (*UpdateNotebookTitleRow, error) {
+ row := q.db.QueryRow(ctx, updateNotebookTitle, arg.ID, arg.Title)
+ var i UpdateNotebookTitleRow
+ err := row.Scan(
+ &i.ID,
+ &i.Scope,
+ &i.OrganizationID,
+ &i.OwnerUserID,
+ &i.Title,
+ &i.CreatedBy,
+ &i.CreatedAt,
+ &i.UpdatedAt,
+ )
+ return &i, err
+}
+
+const upsertCollabDocSnapshot = `-- name: UpsertCollabDocSnapshot :exec
+INSERT INTO collab_doc_snapshots (notebook_id, snapshot)
+VALUES ($1, $2)
+ON CONFLICT (notebook_id)
+DO UPDATE SET snapshot = EXCLUDED.snapshot,
+ updated_at = CURRENT_TIMESTAMP
+`
+
+type UpsertCollabDocSnapshotParams struct {
+ NotebookID string
+ Snapshot []byte
+}
+
+func (q *Queries) UpsertCollabDocSnapshot(ctx context.Context, arg UpsertCollabDocSnapshotParams) error {
+ _, err := q.db.Exec(ctx, upsertCollabDocSnapshot, arg.NotebookID, arg.Snapshot)
+ return err
+}
diff --git a/pkg/zgen/querier/models_gen.go b/pkg/zgen/querier/models_gen.go
index 1e261f9b..f0b19a36 100644
--- a/pkg/zgen/querier/models_gen.go
+++ b/pkg/zgen/querier/models_gen.go
@@ -5,12 +5,157 @@
package querier
import (
+ "database/sql/driver"
+ "encoding/json"
+ "fmt"
"time"
"github.com/jackc/pgx/v5/pgtype"
"github.com/risingwavelabs/risingwave-console/pkg/zgen/apigen"
)
+type NotebookScope string
+
+const (
+ NotebookScopePersonal NotebookScope = "personal"
+ NotebookScopeOrganization NotebookScope = "organization"
+)
+
+func (e *NotebookScope) Scan(src interface{}) error {
+ switch s := src.(type) {
+ case []byte:
+ *e = NotebookScope(s)
+ case string:
+ *e = NotebookScope(s)
+ default:
+ return fmt.Errorf("unsupported scan type for NotebookScope: %T", src)
+ }
+ return nil
+}
+
+type NullNotebookScope struct {
+ NotebookScope NotebookScope
+ Valid bool // Valid is true if NotebookScope is not NULL
+}
+
+// Scan implements the Scanner interface.
+func (ns *NullNotebookScope) Scan(value interface{}) error {
+ if value == nil {
+ ns.NotebookScope, ns.Valid = "", false
+ return nil
+ }
+ ns.Valid = true
+ return ns.NotebookScope.Scan(value)
+}
+
+// Value implements the driver Valuer interface.
+func (ns NullNotebookScope) Value() (driver.Value, error) {
+ if !ns.Valid {
+ return nil, nil
+ }
+ return string(ns.NotebookScope), nil
+}
+
+type AnclaxAccessKeyPair struct {
+ AccessKey string
+ SecretKey string
+ CreatedAt time.Time
+ UpdatedAt time.Time
+}
+
+type AnclaxAccessRule struct {
+ Name string
+ Description string
+ CreatedAt time.Time
+ UpdatedAt time.Time
+}
+
+type AnclaxEvent struct {
+ ID int32
+ Spec json.RawMessage
+ CreatedAt time.Time
+}
+
+type AnclaxOpaqueKey struct {
+ ID int64
+ Key []byte
+ UserID int32
+ CreatedAt time.Time
+ UpdatedAt time.Time
+}
+
+type AnclaxOrg struct {
+ ID int32
+ Name string
+ Tz string
+ CreatedAt time.Time
+ UpdatedAt time.Time
+}
+
+type AnclaxOrgOwner struct {
+ OrgID int32
+ UserID int32
+ CreatedAt time.Time
+}
+
+type AnclaxOrgUser struct {
+ OrgID int32
+ UserID int32
+ CreatedAt time.Time
+ UpdatedAt time.Time
+}
+
+type AnclaxRole struct {
+ ID int32
+ OrgID int32
+ Name string
+ Description string
+ CreatedAt time.Time
+ UpdatedAt time.Time
+}
+
+type AnclaxRoleAccessRule struct {
+ RoleID int32
+ AccessRuleName string
+ CreatedAt time.Time
+ UpdatedAt time.Time
+}
+
+type AnclaxTask struct {
+ ID int32
+ Attributes json.RawMessage
+ Spec json.RawMessage
+ Status string
+ UniqueTag *string
+ StartedAt *time.Time
+ CreatedAt time.Time
+ UpdatedAt time.Time
+ Attempts int32
+}
+
+type AnclaxUser struct {
+ ID int32
+ Name string
+ PasswordHash string
+ PasswordSalt string
+ CreatedAt time.Time
+ UpdatedAt time.Time
+ DeletedAt *time.Time
+}
+
+type AnclaxUserDefaultOrg struct {
+ UserID int32
+ OrgID int32
+ CreatedAt time.Time
+}
+
+type AnclaxUsersRole struct {
+ UserID int32
+ RoleID int32
+ CreatedAt time.Time
+ UpdatedAt time.Time
+}
+
type AutoBackupConfig struct {
ClusterID int32
Enabled bool
@@ -60,6 +205,13 @@ type ClusterSnapshot struct {
UpdatedAt time.Time
}
+type CollabDocSnapshot struct {
+ NotebookID string
+ Snapshot []byte
+ CreatedAt time.Time
+ UpdatedAt time.Time
+}
+
type DatabaseConnection struct {
ID int32
OrgID int32
@@ -82,6 +234,17 @@ type MetricsStore struct {
UpdatedAt pgtype.Timestamp
}
+type Notebook struct {
+ ID string
+ Scope NotebookScope
+ OrgID int32
+ OwnerUserID *int32
+ CreatedBy *int32
+ Title string
+ CreatedAt time.Time
+ UpdatedAt time.Time
+}
+
type OpaqueKey struct {
ID int64
Key []byte
diff --git a/pkg/zgen/querier/querier_gen.go b/pkg/zgen/querier/querier_gen.go
index 378506fe..1f8cb8cc 100644
--- a/pkg/zgen/querier/querier_gen.go
+++ b/pkg/zgen/querier/querier_gen.go
@@ -16,11 +16,13 @@ type Querier interface {
CreateClusterSnapshot(ctx context.Context, arg CreateClusterSnapshotParams) error
CreateDatabaseConnection(ctx context.Context, arg CreateDatabaseConnectionParams) (*DatabaseConnection, error)
CreateMetricsStore(ctx context.Context, arg CreateMetricsStoreParams) (*MetricsStore, error)
+ CreateNotebook(ctx context.Context, arg CreateNotebookParams) (*CreateNotebookRow, error)
CreateOrgSettings(ctx context.Context, arg CreateOrgSettingsParams) error
DeleteAllOrgDatabaseConnectionsByClusterID(ctx context.Context, arg DeleteAllOrgDatabaseConnectionsByClusterIDParams) error
DeleteClusterDiagnostic(ctx context.Context, id int32) error
DeleteClusterSnapshot(ctx context.Context, arg DeleteClusterSnapshotParams) error
DeleteMetricsStore(ctx context.Context, arg DeleteMetricsStoreParams) error
+ DeleteNotebook(ctx context.Context, id string) error
DeleteOrgCluster(ctx context.Context, arg DeleteOrgClusterParams) error
DeleteOrgDatabaseConnection(ctx context.Context, arg DeleteOrgDatabaseConnectionParams) error
GetAllOrgDatabseConnectionsByClusterID(ctx context.Context, arg GetAllOrgDatabseConnectionsByClusterIDParams) ([]*DatabaseConnection, error)
@@ -28,16 +30,21 @@ type Querier interface {
GetAutoDiagnosticsConfig(ctx context.Context, clusterID int32) (*AutoDiagnosticsConfig, error)
GetClusterByID(ctx context.Context, id int32) (*Cluster, error)
GetClusterDiagnostic(ctx context.Context, id int32) (*ClusterDiagnostic, error)
+ GetCollabDocSnapshot(ctx context.Context, notebookID string) ([]byte, error)
GetDatabaseConnectionByID(ctx context.Context, id int32) (*DatabaseConnection, error)
GetMetricsStore(ctx context.Context, id int32) (*MetricsStore, error)
GetMetricsStoreByIDAndOrgID(ctx context.Context, arg GetMetricsStoreByIDAndOrgIDParams) (*MetricsStore, error)
+ GetNotebook(ctx context.Context, id string) (*GetNotebookRow, error)
GetOrgCluster(ctx context.Context, arg GetOrgClusterParams) (*Cluster, error)
GetOrgDatabaseByID(ctx context.Context, arg GetOrgDatabaseByIDParams) (*DatabaseConnection, error)
GetOrgDatabaseConnection(ctx context.Context, arg GetOrgDatabaseConnectionParams) (*DatabaseConnection, error)
GetOrgSettings(ctx context.Context, orgID int32) (*OrgSetting, error)
+ GetUserOrganization(ctx context.Context, userID int32) (int32, error)
InitCluster(ctx context.Context, arg InitClusterParams) (*Cluster, error)
InitDatabaseConnection(ctx context.Context, arg InitDatabaseConnectionParams) (*DatabaseConnection, error)
InitMetricsStore(ctx context.Context, arg InitMetricsStoreParams) (*MetricsStore, error)
+ IsOrgOwner(ctx context.Context, arg IsOrgOwnerParams) (bool, error)
+ ListAccessibleNotebooks(ctx context.Context, arg ListAccessibleNotebooksParams) ([]*ListAccessibleNotebooksRow, error)
ListClusterDiagnostics(ctx context.Context, clusterID int32) ([]*ListClusterDiagnosticsRow, error)
ListClusterSnapshots(ctx context.Context, clusterID int32) ([]*ClusterSnapshot, error)
ListClustersByMetricsStoreID(ctx context.Context, metricsStoreID *int32) ([]*Cluster, error)
@@ -48,8 +55,11 @@ type Querier interface {
UpdateAutoBackupConfig(ctx context.Context, arg UpdateAutoBackupConfigParams) error
UpdateAutoDiagnosticsConfig(ctx context.Context, arg UpdateAutoDiagnosticsConfigParams) error
UpdateMetricsStore(ctx context.Context, arg UpdateMetricsStoreParams) (*MetricsStore, error)
+ UpdateNotebookScope(ctx context.Context, arg UpdateNotebookScopeParams) (*UpdateNotebookScopeRow, error)
+ UpdateNotebookTitle(ctx context.Context, arg UpdateNotebookTitleParams) (*UpdateNotebookTitleRow, error)
UpdateOrgCluster(ctx context.Context, arg UpdateOrgClusterParams) (*Cluster, error)
UpdateOrgDatabaseConnection(ctx context.Context, arg UpdateOrgDatabaseConnectionParams) (*DatabaseConnection, error)
+ UpsertCollabDocSnapshot(ctx context.Context, arg UpsertCollabDocSnapshotParams) error
}
var _ Querier = (*Queries)(nil)
diff --git a/pkg/zgen/taskgen/taskgen_gen.go b/pkg/zgen/taskgen/taskgen_gen.go
index 530df3a8..4952a5c6 100644
--- a/pkg/zgen/taskgen/taskgen_gen.go
+++ b/pkg/zgen/taskgen/taskgen_gen.go
@@ -260,10 +260,10 @@ type DeleteClusterDiagnosticParameters struct {
type DeleteSnapshotParameters struct {
//
- SnapshotID int64 `json:"snapshotID" yaml:"snapshotID"`
+ ClusterID int32 `json:"clusterID" yaml:"clusterID"`
//
- ClusterID int32 `json:"clusterID" yaml:"clusterID"`
+ SnapshotID int64 `json:"snapshotID" yaml:"snapshotID"`
}
func (r *AutoBackupParameters) Parse(spec json.RawMessage) error {
diff --git a/sql/migrations/0008_collab_doc_snapshots.down.sql b/sql/migrations/0008_collab_doc_snapshots.down.sql
new file mode 100644
index 00000000..6409104a
--- /dev/null
+++ b/sql/migrations/0008_collab_doc_snapshots.down.sql
@@ -0,0 +1,7 @@
+BEGIN;
+
+DROP TABLE IF EXISTS collab_doc_snapshots;
+DROP TABLE IF EXISTS notebooks;
+DROP TYPE IF EXISTS notebook_scope;
+
+COMMIT;
diff --git a/sql/migrations/0008_collab_doc_snapshots.up.sql b/sql/migrations/0008_collab_doc_snapshots.up.sql
new file mode 100644
index 00000000..a269729d
--- /dev/null
+++ b/sql/migrations/0008_collab_doc_snapshots.up.sql
@@ -0,0 +1,33 @@
+BEGIN;
+
+CREATE TYPE notebook_scope AS ENUM ('personal', 'organization');
+
+CREATE TABLE notebooks (
+ id TEXT NOT NULL,
+ scope notebook_scope NOT NULL,
+ org_id INTEGER NOT NULL REFERENCES anclax.orgs(id) ON DELETE CASCADE,
+ owner_user_id INTEGER REFERENCES anclax.users(id) ON DELETE CASCADE,
+ created_by INTEGER REFERENCES anclax.users(id) ON DELETE SET NULL,
+ title TEXT NOT NULL DEFAULT '',
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+
+ PRIMARY KEY (id),
+ CHECK (
+ (scope = 'personal' AND owner_user_id IS NOT NULL) OR
+ (scope = 'organization' AND owner_user_id IS NULL)
+ )
+);
+
+-- Stores the latest Yjs document snapshot for each collaborative notebook.
+-- The snapshot is the raw binary produced by the client (e.g. Y.encodeStateAsUpdate(doc)).
+CREATE TABLE collab_doc_snapshots (
+ notebook_id TEXT NOT NULL REFERENCES notebooks(id) ON DELETE CASCADE,
+ snapshot BYTEA NOT NULL,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
+
+ PRIMARY KEY (notebook_id)
+);
+
+COMMIT;
diff --git a/sql/queries/collab.sql b/sql/queries/collab.sql
new file mode 100644
index 00000000..b0e85e4d
--- /dev/null
+++ b/sql/queries/collab.sql
@@ -0,0 +1,58 @@
+-- name: CreateNotebook :one
+INSERT INTO notebooks (id, scope, org_id, owner_user_id, title, created_by)
+VALUES ($1, $2, $3, $4, $5, $6)
+RETURNING id, scope, org_id AS organization_id, owner_user_id, title, created_by, created_at, updated_at;
+
+-- name: GetNotebook :one
+SELECT id, scope, org_id AS organization_id, owner_user_id, title, created_by, created_at, updated_at
+FROM notebooks
+WHERE id = $1;
+
+-- name: ListAccessibleNotebooks :many
+SELECT id, scope, org_id AS organization_id, owner_user_id, title, created_by, created_at, updated_at
+FROM notebooks
+WHERE org_id = $1
+ AND (
+ scope = 'organization'::notebook_scope OR
+ (scope = 'personal'::notebook_scope AND owner_user_id = $2)
+ )
+ORDER BY updated_at DESC;
+
+-- name: UpdateNotebookTitle :one
+UPDATE notebooks
+SET title = $2,
+ updated_at = CURRENT_TIMESTAMP
+WHERE id = $1
+RETURNING id, scope, org_id AS organization_id, owner_user_id, title, created_by, created_at, updated_at;
+
+-- name: UpdateNotebookScope :one
+UPDATE notebooks
+SET scope = $2,
+ owner_user_id = $3,
+ updated_at = CURRENT_TIMESTAMP
+WHERE id = $1
+RETURNING id, scope, org_id AS organization_id, owner_user_id, title, created_by, created_at, updated_at;
+
+-- name: DeleteNotebook :exec
+DELETE FROM notebooks
+WHERE id = $1;
+
+-- name: UpsertCollabDocSnapshot :exec
+INSERT INTO collab_doc_snapshots (notebook_id, snapshot)
+VALUES ($1, $2)
+ON CONFLICT (notebook_id)
+DO UPDATE SET snapshot = EXCLUDED.snapshot,
+ updated_at = CURRENT_TIMESTAMP;
+
+-- name: GetCollabDocSnapshot :one
+SELECT snapshot FROM collab_doc_snapshots WHERE notebook_id = $1;
+
+-- name: IsOrgOwner :one
+SELECT EXISTS(
+ SELECT 1 FROM anclax.org_owners WHERE user_id = $1 AND org_id = $2
+);
+
+-- name: GetUserOrganization :one
+SELECT org_id AS organization_id
+FROM anclax.user_default_orgs
+WHERE user_id = $1;
diff --git a/sql/sqlc.yaml b/sql/sqlc.yaml
index 445c14e1..83c6930d 100644
--- a/sql/sqlc.yaml
+++ b/sql/sqlc.yaml
@@ -1,6 +1,8 @@
version: "2"
sql:
- - schema: "migrations"
+ - schema:
+ - "migrations"
+ - "../dev/anclax/sql/migrations"
queries: "queries"
engine: "postgresql"
gen:
diff --git a/web/.cursor/rules/styling.mdc b/web/.cursor/rules/styling.mdc
index 95499c6a..8deb4431 100644
--- a/web/.cursor/rules/styling.mdc
+++ b/web/.cursor/rules/styling.mdc
@@ -38,7 +38,7 @@ const className = sizeClasses[size]
## Class Utility Function
-Use the `cn()` utility from `src/lib/cn.ts` for combining classes:
+Use the `cn()` utility from `lib/cn.ts` for combining classes:
```typescript
import { cn } from '@/lib/cn'
diff --git a/web/.gitignore b/web/.gitignore
index 45e4bc10..13a53247 100644
--- a/web/.gitignore
+++ b/web/.gitignore
@@ -40,3 +40,6 @@ yarn-error.log*
next-env.d.ts
out/*
!out/.gitkeep
+
+# monaco editor (copied from node_modules)
+/public/monaco-editor
diff --git a/web/CLAUDE.md b/web/CLAUDE.md
new file mode 100644
index 00000000..b629704f
--- /dev/null
+++ b/web/CLAUDE.md
@@ -0,0 +1,284 @@
+# CLAUDE.md
+
+This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
+
+## Development Commands
+
+**Start Development Server:**
+```bash
+npm run dev
+```
+Uses Next.js with Turbopack for fast development.
+
+**Build Project:**
+```bash
+npm run build
+```
+Builds static export with out/.gitkeep file.
+
+**Lint Code:**
+```bash
+npm run lint
+```
+Runs Next.js ESLint configuration.
+
+**Generate API Clients:**
+```bash
+npm run gen
+```
+Generates API clients from OpenAPI specs for both local API (../api/v1.yaml) and Anchor API.
+
+## Architecture Overview
+
+This is a **Next.js 15** application with **static export** configuration built for RisingWave Console management. The application uses a modular architecture with strict component organization patterns.
+
+### Key Technologies
+- **Next.js 15** with App Router and static export
+- **React 19** with TypeScript
+- **Jotai** for state management with custom utilities
+- **TailwindCSS** for styling with strict static class requirements
+- **Framer Motion** with LazyMotion optimization (using `m.` prefix)
+- **Radix UI** primitives for accessible components
+- **React Query** (@tanstack/react-query) for data fetching
+- **Monaco Editor** for code editing capabilities
+
+### Directory Structure
+
+**`/app/`** - Next.js App Router pages
+- Route-based pages (clusters, login, sqlconsole, settings, etc.)
+- Main layout with RootProviders wrapper
+
+**`/modules/`** - Feature-specific components organized by domain
+- `clusters/` - Cluster management UI
+- `database/` - Database operations and insights
+- `sqlconsole/` - SQL editor and query execution
+- `tasks/` - Task execution history
+- `notebook/` - Notebook functionality (new module)
+
+**`/components/`** - Shared component library
+- `ui/` - Universal Radix-based primitives (Button, Input, Dialog, etc.)
+- `common/` - App-specific shared components (AppSidebar, AuthGuard, etc.)
+
+**`/lib/`** - Core utilities and configuration
+- `jotai.ts` - Custom Jotai store and hook utilities
+- `utils.ts` - TailwindCSS class merging utility (`cn`)
+- `query-client.ts` - React Query configuration
+- `framer-lazy-feature.ts` - LazyMotion feature loading
+
+**`/atoms/`** - Jotai state atoms
+- `auth.ts` - Authentication state
+- `context-menu.ts` - UI context menu state
+- `viewport.ts` - Viewport/layout state
+
+**`/providers/`** - React context providers
+- `root-providers.tsx` - Main provider composition
+- `auth-provider.tsx` - Authentication context
+
+## Component Organization Rules
+
+**Universal Components** → `components/ui/`
+- Reusable across any React application
+- Based on Radix UI primitives
+- No business logic
+
+**Feature Components** → `modules/{domain}/`
+- Domain-specific business logic
+- Examples: `FeedTimeline`, `ClusterList`, `SqlEditor`
+
+**App-Specific Shared** → `components/common/`
+- Used across features but specific to this app
+- Examples: `AppSidebar`, `AuthGuard`
+
+Always use `@/` path alias for imports (configured in tsconfig.json).
+
+## State Management with Jotai
+
+Use the custom utilities from `lib/jotai.ts`:
+
+```typescript
+// Create atom with hooks
+const [useMyAtom, useSetMyAtom, useMyAtomValue, myAtom] = createAtomHooks(atom(null))
+
+// Use global store
+import { jotaiStore } from '@/lib/jotai'
+```
+
+Store atoms in `/atoms/` directory and use the global `jotaiStore` instance configured in RootProviders.
+
+## Styling Guidelines
+
+**Critical:** All TailwindCSS classes must be **statically defined** - no dynamic className construction.
+
+```typescript
+// ❌ Wrong - Dynamic classes won't work
+const className = `text-${size}`
+
+// ✅ Correct - Static conditional classes
+const className = clsx({
+ 'text-base': size === 'small',
+ 'text-lg': size === 'large',
+})
+```
+
+Always use the `cn()` utility from `@/lib/utils` for class composition:
+
+```typescript
+import { cn } from '@/lib/utils'
+
+
+```
+
+## Animation Guidelines
+
+**Critical:** Always use `m.` prefix instead of `motion.` for LazyMotion optimization:
+
+```typescript
+// ✅ Correct
+import { m } from 'framer-motion'
+
+
+// ❌ Wrong - Breaks LazyMotion
+import { motion } from 'framer-motion'
+ // Don't use this
+```
+
+## API Integration
+
+The project generates API clients from OpenAPI specs:
+- Local API: `../api/v1.yaml` → `./api-gen/`
+- Anchor API: Remote spec → `./api-anchor/`
+
+Use React Query for data fetching with the configured query client from `lib/query-client.ts`.
+
+## Build Configuration
+
+- **Static Export:** Next.js configured for static file generation
+- **Image Optimization:** Disabled for static export compatibility
+- **Trailing Slashes:** Enabled for static hosting
+- **TypeScript:** Strict mode enabled with path aliases
+
+## Development Guidelines
+
+### Component Placement Rules
+
+**Universal Components** → `components/ui/`
+- If the component could be used in any React app
+- Pure UI components without business logic
+- Reusable across different domains
+
+**Feature Components** → `modules/{domain}/`
+- If the component is specific to a business domain/feature
+- Contains domain-specific logic or data handling
+- Examples: `FeedTimeline`, `UserProfile`, `AuthForm`
+
+**App-Specific Shared** → `components/common/`
+- If the component is used across features but specific to this app
+- Contains app-specific logic but used in multiple places
+
+### Jotai State Management Patterns
+
+**Global Store Setup:**
+```typescript
+import { jotaiStore } from '@/lib/jotai'
+```
+
+**Custom Hook Utility:**
+```typescript
+import { createAtomHooks } from '@/lib/jotai'
+const [useMyAtom, useSetMyAtom, useMyAtomValue, myAtom] = createAtomHooks(atom(null))
+```
+
+**Atom Organization:**
+- Store atoms in `atoms/` directory
+- Use descriptive names ending with `Atom`
+- Group related atoms in the same file
+- Always use the configured `jotaiStore` instance
+
+**Usage Patterns:**
+```typescript
+// Reading state
+import { useAtomValue } from 'jotai'
+import { userAtom } from '@/atoms/user'
+
+function UserProfile() {
+ const user = useAtomValue(userAtom)
+ return
{user?.name}
+}
+
+// Writing state
+import { useSetAtom } from 'jotai'
+const setUser = useSetAtom(userAtom)
+
+// Reading and writing
+import { useAtom } from 'jotai'
+const [user, setUser] = useAtom(userAtom)
+```
+
+### Animation with Framer Motion + LazyMotion
+
+**Critical Rule: Always use `m.` prefix instead of `motion.`**
+
+```typescript
+// ✅ Correct
+import { m } from 'framer-motion'
+
+
+// ❌ Wrong - Breaks LazyMotion optimization
+import { motion } from 'framer-motion'
+ // Never use this
+```
+
+Available with any HTML element: ``, ``, ``, etc.
+
+### TailwindCSS Styling Rules
+
+**Critical: No Dynamic Classes**
+All Tailwind classes must be statically defined - no dynamic className construction:
+
+```typescript
+// ❌ Wrong - Dynamic class construction
+const size = 'large'
+const className = `text-${size}` // Won't work with Tailwind purging
+
+// ✅ Correct - Static classes with conditional logic
+const className = clsx({
+ 'text-base': size === 'small',
+ 'text-lg': size === 'medium',
+ 'text-xl': size === 'large',
+})
+```
+
+**Class Composition:**
+Always use the `cn()` utility from `@/lib/utils`:
+
+```typescript
+import { cn } from '@/lib/utils'
+
+function Button({ className, variant = 'primary', ...props }) {
+ return (
+
+ )
+}
+```
+
+**Component Styling Pattern:**
+Use object-based variant patterns for reusable components with size and color variants.
+
+## Development Notes
+
+- The application expects a `config.js` file in the public directory (loaded before React hydration)
+- Authentication is handled through the AuthProvider with route guarding
+- The sidebar navigation is implemented as a collapsible component using Radix UI
+- Monaco Editor is integrated for SQL editing capabilities
+- The application supports dark mode through next-themes integration
+- Provider setup includes Jotai Provider with global store in `providers/root-providers.tsx`
\ No newline at end of file
diff --git a/web/api-gen/index.ts b/web/api-gen/index.ts
index 5bffcde2..7a1dfe69 100644
--- a/web/api-gen/index.ts
+++ b/web/api-gen/index.ts
@@ -12,6 +12,9 @@ export type { AutoDiagnosticConfig } from './models/AutoDiagnosticConfig';
export type { Cluster } from './models/Cluster';
export type { ClusterCreate } from './models/ClusterCreate';
export type { ClusterImport } from './models/ClusterImport';
+export { CollabNotebook } from './models/CollabNotebook';
+export { CollabNotebookCreateRequest } from './models/CollabNotebookCreateRequest';
+export { CollabNotebookTransferRequest } from './models/CollabNotebookTransferRequest';
export type { Column } from './models/Column';
export { Credentials } from './models/Credentials';
export type { Database } from './models/Database';
diff --git a/web/api-gen/models/CollabNotebook.ts b/web/api-gen/models/CollabNotebook.ts
new file mode 100644
index 00000000..9ce4b948
--- /dev/null
+++ b/web/api-gen/models/CollabNotebook.ts
@@ -0,0 +1,48 @@
+/* generated using openapi-typescript-codegen -- do not edit */
+/* istanbul ignore file */
+/* tslint:disable */
+/* eslint-disable */
+export type CollabNotebook = {
+ /**
+ * Unique identifier of the collaborative notebook
+ */
+ notebookID: string;
+ /**
+ * Title of the collaborative notebook
+ */
+ title: string;
+ /**
+ * Ownership scope of the notebook
+ */
+ scope?: CollabNotebook.scope;
+ /**
+ * Organization that currently owns the notebook
+ */
+ organizationID?: number;
+ /**
+ * User that owns the notebook when scope is personal
+ */
+ ownerUserID?: number;
+ /**
+ * User that originally created the notebook
+ */
+ createdByUserID?: number;
+ /**
+ * Creation timestamp
+ */
+ createdAt: string;
+ /**
+ * Last update timestamp
+ */
+ updatedAt: string;
+};
+export namespace CollabNotebook {
+ /**
+ * Ownership scope of the notebook
+ */
+ export enum scope {
+ PERSONAL = 'personal',
+ ORGANIZATION = 'organization',
+ }
+}
+
diff --git a/web/api-gen/models/CollabNotebookCreateRequest.ts b/web/api-gen/models/CollabNotebookCreateRequest.ts
new file mode 100644
index 00000000..43f17146
--- /dev/null
+++ b/web/api-gen/models/CollabNotebookCreateRequest.ts
@@ -0,0 +1,24 @@
+/* generated using openapi-typescript-codegen -- do not edit */
+/* istanbul ignore file */
+/* tslint:disable */
+/* eslint-disable */
+export type CollabNotebookCreateRequest = {
+ /**
+ * Title of the collaborative notebook
+ */
+ title: string;
+ /**
+ * Ownership scope of the notebook; defaults to personal when omitted
+ */
+ scope?: CollabNotebookCreateRequest.scope;
+};
+export namespace CollabNotebookCreateRequest {
+ /**
+ * Ownership scope of the notebook; defaults to personal when omitted
+ */
+ export enum scope {
+ PERSONAL = 'personal',
+ ORGANIZATION = 'organization',
+ }
+}
+
diff --git a/web/api-gen/models/CollabNotebookTransferRequest.ts b/web/api-gen/models/CollabNotebookTransferRequest.ts
new file mode 100644
index 00000000..308bf670
--- /dev/null
+++ b/web/api-gen/models/CollabNotebookTransferRequest.ts
@@ -0,0 +1,24 @@
+/* generated using openapi-typescript-codegen -- do not edit */
+/* istanbul ignore file */
+/* tslint:disable */
+/* eslint-disable */
+export type CollabNotebookTransferRequest = {
+ /**
+ * Desired ownership scope
+ */
+ targetScope: CollabNotebookTransferRequest.targetScope;
+ /**
+ * Target owner when moving to personal scope; defaults to the caller
+ */
+ ownerUserID?: number;
+};
+export namespace CollabNotebookTransferRequest {
+ /**
+ * Desired ownership scope
+ */
+ export enum targetScope {
+ PERSONAL = 'personal',
+ ORGANIZATION = 'organization',
+ }
+}
+
diff --git a/web/api-gen/services/DefaultService.ts b/web/api-gen/services/DefaultService.ts
index f3b6c515..3c24c39a 100644
--- a/web/api-gen/services/DefaultService.ts
+++ b/web/api-gen/services/DefaultService.ts
@@ -7,6 +7,9 @@ import type { AutoDiagnosticConfig } from '../models/AutoDiagnosticConfig';
import type { Cluster } from '../models/Cluster';
import type { ClusterCreate } from '../models/ClusterCreate';
import type { ClusterImport } from '../models/ClusterImport';
+import type { CollabNotebook } from '../models/CollabNotebook';
+import type { CollabNotebookCreateRequest } from '../models/CollabNotebookCreateRequest';
+import type { CollabNotebookTransferRequest } from '../models/CollabNotebookTransferRequest';
import type { Database } from '../models/Database';
import type { DatabaseConnectInfo } from '../models/DatabaseConnectInfo';
import type { DDLProgress } from '../models/DDLProgress';
@@ -743,4 +746,146 @@ export class DefaultService {
},
});
}
+ /**
+ * List collaborative notebooks
+ * Retrieve a list of collaborative notebooks
+ * @returns CollabNotebook Successfully retrieved collaborative notebook list
+ * @throws ApiError
+ */
+ public static listCollabNotebooks(): CancelablePromise> {
+ return __request(OpenAPI, {
+ method: 'GET',
+ url: '/collab/notebooks',
+ });
+ }
+ /**
+ * Create a collaborative notebook
+ * Create a new collaborative notebook
+ * @param requestBody
+ * @returns CollabNotebook Successfully created collaborative notebook
+ * @throws ApiError
+ */
+ public static createCollabNotebook(
+ requestBody: CollabNotebookCreateRequest,
+ ): CancelablePromise {
+ return __request(OpenAPI, {
+ method: 'POST',
+ url: '/collab/notebooks',
+ body: requestBody,
+ mediaType: 'application/json',
+ });
+ }
+ /**
+ * Get collaborative notebook details
+ * Retrieve details of a specific collaborative notebook
+ * @param notebookId
+ * @returns CollabNotebook Successfully retrieved collaborative notebook
+ * @throws ApiError
+ */
+ public static getCollabNotebook(
+ notebookId: string,
+ ): CancelablePromise {
+ return __request(OpenAPI, {
+ method: 'GET',
+ url: '/collab/notebooks/{notebookID}',
+ path: {
+ 'notebookID': notebookId,
+ },
+ });
+ }
+ /**
+ * Update a collaborative notebook
+ * Update details of a specific collaborative notebook
+ * @param notebookId
+ * @param requestBody
+ * @returns CollabNotebook Successfully upd-ated collaborative notebook
+ * @throws ApiError
+ */
+ public static updateCollabNotebook(
+ notebookId: string,
+ requestBody: CollabNotebook,
+ ): CancelablePromise {
+ return __request(OpenAPI, {
+ method: 'PUT',
+ url: '/collab/notebooks/{notebookID}',
+ path: {
+ 'notebookID': notebookId,
+ },
+ body: requestBody,
+ mediaType: 'application/json',
+ });
+ }
+ /**
+ * Delete a collaborative notebook
+ * Permanently delete a collaborative notebook
+ * @param notebookId
+ * @returns void
+ * @throws ApiError
+ */
+ public static deleteCollabNotebook(
+ notebookId: string,
+ ): CancelablePromise {
+ return __request(OpenAPI, {
+ method: 'DELETE',
+ url: '/collab/notebooks/{notebookID}',
+ path: {
+ 'notebookID': notebookId,
+ },
+ errors: {
+ 403: `Operation not permitted`,
+ 404: `Notebook not found`,
+ },
+ });
+ }
+ /**
+ * Transfer collaborative notebook scope
+ * Move a collaborative notebook between personal and organization scopes.
+ * @param notebookId
+ * @param requestBody
+ * @returns CollabNotebook Successfully transferred collaborative notebook
+ * @throws ApiError
+ */
+ public static transferCollabNotebook(
+ notebookId: string,
+ requestBody: CollabNotebookTransferRequest,
+ ): CancelablePromise {
+ return __request(OpenAPI, {
+ method: 'POST',
+ url: '/collab/notebooks/{notebookID}/transfer',
+ path: {
+ 'notebookID': notebookId,
+ },
+ body: requestBody,
+ mediaType: 'application/json',
+ errors: {
+ 400: `Invalid transfer request`,
+ 403: `Operation not permitted`,
+ 404: `Notebook not found`,
+ },
+ });
+ }
+ /**
+ * Upload a collaborative notebook snapshot
+ * Persist the latest Yjs snapshot for a collaborative notebook.
+ * @param notebookId Unique identifier of the collaborative notebook.
+ * @param requestBody
+ * @returns void
+ * @throws ApiError
+ */
+ public static uploadCollabNotebookSnapshot(
+ notebookId: string,
+ requestBody: Blob,
+ options?: { headers?: Record },
+ ): CancelablePromise {
+ return __request(OpenAPI, {
+ method: 'PUT',
+ url: '/collab/notebooks/{notebookID}/snapshot',
+ path: {
+ 'notebookID': notebookId,
+ },
+ headers: options?.headers,
+ body: requestBody,
+ mediaType: 'application/octet-stream',
+ });
+ }
}
diff --git a/web/app/globals.css b/web/app/globals.css
index 58143879..ecbdb3a8 100644
--- a/web/app/globals.css
+++ b/web/app/globals.css
@@ -90,3 +90,35 @@
@apply bg-background text-foreground;
}
}
+
+/* Notebook collaboration – Monaco remote selections */
+.yRemoteSelection {
+ position: relative;
+ border-radius: 2px;
+ background-color: var(--y-remote-color-bg, rgba(37, 99, 235, 0.18));
+ border-bottom: 1px solid var(--y-remote-color-border, rgba(37, 99, 235, 0.45));
+}
+
+.yRemoteSelectionHead::after,
+.yRemoteSelectionHead::before {
+ content: '';
+ position: absolute;
+ top: 0;
+ bottom: 0;
+ width: 2px;
+ border-radius: 9999px;
+ background-color: var(--y-remote-color-head, #2563eb);
+}
+
+.yRemoteSelectionHead::after {
+ right: -1px;
+}
+
+.yRemoteSelectionHead::before {
+ left: -1px;
+}
+
+
+.monaco-editor .line-numbers {
+ user-select: none;
+}
\ No newline at end of file
diff --git a/web/app/layout.tsx b/web/app/layout.tsx
index 1d64e8d8..9bbf0f63 100644
--- a/web/app/layout.tsx
+++ b/web/app/layout.tsx
@@ -4,6 +4,11 @@ import Script from "next/script";
import "./globals.css";
import { RootProviders } from "@/providers/root-providers";
+if (process.env.NODE_ENV === "development") {
+ const { start } = await import("react-scan");
+ start();
+}
+
export default function Layout({ children }: { children: React.ReactNode }) {
return (
diff --git a/web/app/notebook/NotebookPageClient.tsx b/web/app/notebook/NotebookPageClient.tsx
new file mode 100644
index 00000000..b0e86c08
--- /dev/null
+++ b/web/app/notebook/NotebookPageClient.tsx
@@ -0,0 +1,75 @@
+'use client'
+
+import { useEffect, useRef } from 'react'
+import { loader } from '@monaco-editor/react'
+import { NotebookEditor } from '@/modules/notebook/components/NotebookEditor'
+import { NotebookToolbar } from '@/modules/notebook/components/NotebookToolbar'
+import { NotebookSidebar } from '@/modules/notebook/components/NotebookSidebar'
+import { NotebookBottomPanel } from '@/modules/notebook/components/NotebookBottomPanel'
+import { KeyboardShortcuts } from '@/modules/notebook/components/KeyboardShortcuts'
+import { useNotebookRouteController } from '@/modules/notebook/hooks/useNotebookRouteController'
+import { SqlConsoleMigrationBanner } from '@/modules/notebook/components/SqlConsoleMigrationBanner'
+import { NotebookProvider } from '@/modules/notebook/providers/NotebookProvider'
+import { DebugPanel } from '@/modules/notebook/components/debug'
+import { OutlineFloating } from '@/modules/notebook/components/OutlineFloating'
+
+export default function NotebookPageClient() {
+ const { notebookId: activeNotebookId } = useNotebookRouteController()
+ const scrollContainerRef = useRef(null)
+
+ useEffect(() => {
+ // loader.config({ paths: { vs: 'https://cdn.jsdelivr.net/npm/monaco-editor@0.49.0/min/vs' } })
+ loader.init()
+ }, [])
+
+ // Show placeholder if no notebook is selected
+ if (!activeNotebookId) {
+ return (
+
+
+
+
+
+
+
+
+
+
+
Select or create a notebook to get started
+
+
+
+ )
+ }
+
+ return (
+
+
+
+
+ )
+}
diff --git a/web/app/notebook/page.tsx b/web/app/notebook/page.tsx
new file mode 100644
index 00000000..c2ee7eeb
--- /dev/null
+++ b/web/app/notebook/page.tsx
@@ -0,0 +1,13 @@
+"use client";
+
+import dynamic from 'next/dynamic'
+
+// Keep any Yjs consumers out of the SSR build
+// https://github.com/yjs/yjs/issues/438
+const NotebookPageClient = dynamic(() => import('./NotebookPageClient'), {
+ ssr: false,
+})
+
+export default function NotebookPage() {
+ return
+}
diff --git a/web/app/sqlconsole/page.tsx b/web/app/sqlconsole/page.tsx
index 52c209e3..efafb1f4 100644
--- a/web/app/sqlconsole/page.tsx
+++ b/web/app/sqlconsole/page.tsx
@@ -6,6 +6,7 @@ import { Button } from "@/components/ui/button"
import { Settings, RefreshCw } from 'lucide-react'
import { DatabaseManagement } from "@/modules/database/database-management"
import { SQLEditor, type SQLEditorHandle } from "@/modules/sqlconsole/sql-editor"
+import { SqlConsoleMigrationBanner } from "@/modules/notebook/components/SqlConsoleMigrationBanner";
import { DefaultService } from "@/api-gen"
import { toast } from "react-hot-toast"
import { RisingWaveNodeData, NodeType } from "@/components/common/streaming-graph"
@@ -479,7 +480,10 @@ export default function SQLConsole() {
onMouseDown={handleMouseDown}
/>
-
+
+
+
+
+
+export type { CellEditorMode } from './ui'
+
+export const useCellEditorModes = (notebookId: string | null | undefined) => {
+ const { cellEditorModes } = useNotebookUIState()
+ if (!notebookId) return EMPTY_MODES
+ return cellEditorModes[notebookId] ?? EMPTY_MODES
+}
+
+export const useCellEditorModeForCell = (
+ notebookId: string,
+ cellId: string,
+): CellEditorMode => {
+ const { cellEditorModes } = useNotebookUIState()
+ if (!notebookId) return 'preview'
+ return cellEditorModes[notebookId]?.[cellId] ?? 'preview'
+}
+
+export const setCellEditorMode = (
+ notebookId: string,
+ cellId: string,
+ mode: CellEditorMode,
+) => {
+ notebookUIActions.setCellEditorMode(notebookId, cellId, mode)
+}
+
+export const toggleCellEditorMode = (notebookId: string, cellId: string) => {
+ notebookUIActions.toggleCellEditorMode(notebookId, cellId)
+}
+
+export const removeCellEditorMode = (notebookId: string, cellId: string) => {
+ notebookUIActions.removeCellEditorMode(notebookId, cellId)
+}
+
+export const resetCellEditorModes = (notebookId: string | null | undefined) => {
+ if (!notebookId) return
+ notebookUIActions.resetCellEditorModes(notebookId)
+}
diff --git a/web/atoms/notebook/cell-runtime.ts b/web/atoms/notebook/cell-runtime.ts
new file mode 100644
index 00000000..5f8d2395
--- /dev/null
+++ b/web/atoms/notebook/cell-runtime.ts
@@ -0,0 +1,28 @@
+import { proxy } from "valtio";
+import { useSnapshot } from "valtio";
+
+const cellCollapsedState = proxy>({});
+
+export const useCellCollapsedValue = () => useSnapshot(cellCollapsedState);
+
+export const toggleCellCollapsedAction = (cellId: string) => {
+ const current = cellCollapsedState[cellId] ?? false;
+ if (current) {
+ delete cellCollapsedState[cellId];
+ return;
+ }
+ cellCollapsedState[cellId] = true;
+};
+
+export const setCellCollapsedAction = (cellId: string, collapsed: boolean) => {
+ if (!collapsed) {
+ delete cellCollapsedState[cellId];
+ return;
+ }
+ cellCollapsedState[cellId] = collapsed;
+};
+
+export const useCellCollapsedForCell = (cellId: string): boolean => {
+ const snapshot = useSnapshot(cellCollapsedState);
+ return snapshot[cellId] === true;
+};
diff --git a/web/atoms/notebook/constants.ts b/web/atoms/notebook/constants.ts
new file mode 100644
index 00000000..036f0c75
--- /dev/null
+++ b/web/atoms/notebook/constants.ts
@@ -0,0 +1,7 @@
+export const NOTEBOOK_HISTORY_ENTRY_ORIGIN = {
+ INITIAL: "initial" as const,
+ USER: "user" as const,
+};
+
+export type NotebookHistoryEntryOrigin =
+ (typeof NOTEBOOK_HISTORY_ENTRY_ORIGIN)[keyof typeof NOTEBOOK_HISTORY_ENTRY_ORIGIN];
diff --git a/web/atoms/notebook/errors.ts b/web/atoms/notebook/errors.ts
new file mode 100644
index 00000000..5b21086f
--- /dev/null
+++ b/web/atoms/notebook/errors.ts
@@ -0,0 +1,53 @@
+// Standardized error types for notebook domain operations.
+
+export class NotebookError extends Error {
+ readonly code: string;
+ constructor(message: string, code = 'NOTEBOOK_ERROR') {
+ super(message);
+ this.name = this.constructor.name;
+ this.code = code;
+ }
+}
+
+export class NotebookNotReadyError extends NotebookError {
+ constructor(message = 'Notebook is not ready') {
+ super(message, 'NOTEBOOK_NOT_READY');
+ }
+}
+
+export class NotebookStateError extends NotebookError {
+ constructor(message = 'Invalid notebook state') {
+ super(message, 'NOTEBOOK_STATE');
+ }
+}
+
+export class CellNotFoundError extends NotebookError {
+ readonly cellId: string;
+ constructor(cellId: string, message = 'Cell not found') {
+ super(`${message}: ${cellId}`, 'CELL_NOT_FOUND');
+ this.cellId = cellId;
+ }
+}
+
+export class NotebookBoundaryError extends NotebookError {
+ readonly cellId: string;
+ readonly direction: 'up' | 'down';
+ constructor(cellId: string, direction: 'up' | 'down', message = 'Cell at boundary') {
+ super(`${message}: ${cellId} (${direction})`, 'NOTEBOOK_BOUNDARY');
+ this.cellId = cellId;
+ this.direction = direction;
+ }
+}
+
+// Small helper to prefix console messages consistently
+export function logNotebookError(context: string, error: unknown) {
+ const prefix = '[Notebook]';
+ if (error instanceof Error) {
+
+ console.error(`${prefix} ${context}:`, error.name, error.message);
+ } else {
+
+ console.error(`${prefix} ${context}:`, error);
+ }
+}
+
diff --git a/web/atoms/notebook/hooks/index.ts b/web/atoms/notebook/hooks/index.ts
new file mode 100644
index 00000000..21007f5d
--- /dev/null
+++ b/web/atoms/notebook/hooks/index.ts
@@ -0,0 +1,3 @@
+export * from './useNotebookState';
+export * from './useNotebookOperations';
+export * from './safe';
diff --git a/web/atoms/notebook/hooks/safe.ts b/web/atoms/notebook/hooks/safe.ts
new file mode 100644
index 00000000..1c5aa5ab
--- /dev/null
+++ b/web/atoms/notebook/hooks/safe.ts
@@ -0,0 +1,6 @@
+import { useNotebookRuntime } from "@/modules/notebook/providers/notebook-runtime-context";
+
+export function useNotebookReady(): boolean {
+ const { ready } = useNotebookRuntime();
+ return ready;
+}
diff --git a/web/atoms/notebook/hooks/useNotebookOperations.ts b/web/atoms/notebook/hooks/useNotebookOperations.ts
new file mode 100644
index 00000000..5e07d85d
--- /dev/null
+++ b/web/atoms/notebook/hooks/useNotebookOperations.ts
@@ -0,0 +1,111 @@
+import { useCallback, useMemo } from 'react';
+import toast from 'react-hot-toast';
+import * as execOps from '../operations/execution-ops';
+import { resolveErrorMessage } from '@/lib/errors';
+import { useNotebookRuntime } from '@/modules/notebook/providers/notebook-runtime-context';
+import { getNotebookRoot } from '@/modules/notebook/collab/yjs/schema/access/root';
+import { NB_DATABASE_ID } from '@/modules/notebook/collab/yjs/schema/core/keys';
+import { useSnapshot } from 'valtio';
+
+export function useNotebookOperations() {
+ const { resource, store } = useNotebookRuntime();
+ const root = useMemo(() => getNotebookRoot(resource.doc), [resource.doc]);
+ const snapshot = useSnapshot(store.state);
+ const databaseId = (snapshot as any)[NB_DATABASE_ID] ?? null;
+ const executedBy = resource.provider.awareness.clientID;
+
+ /**
+ * Execute a single cell.
+ * Shows toast notifications for success/error.
+ */
+ const executeCell = useCallback(
+ async (cellId: string): Promise => {
+ if (!root) {
+ toast.error('Notebook not loaded');
+ return false;
+ }
+
+ const dbId = databaseId ? Number(databaseId) : null;
+
+ if (!dbId) {
+ toast.error('Select a database before executing cells');
+ return false;
+ }
+
+ try {
+ await execOps.executeCell(root, cellId, { databaseId: dbId, executedBy });
+ return true;
+ } catch (err) {
+ const message = resolveErrorMessage(err, 'Failed to execute cell');
+ toast.error(message);
+ return false;
+ }
+ },
+ [root, databaseId]
+ );
+
+ /**
+ * Execute multiple cells sequentially.
+ * Shows summary toast after completion.
+ */
+ const executeCells = useCallback(
+ async (
+ cellIds: string[],
+ options?: { stopOnError?: boolean }
+ ): Promise<{ success: number; failed: number }> => {
+ if (!root) {
+ toast.error('Notebook not loaded');
+ return { success: 0, failed: 0 };
+ }
+
+ const dbId = databaseId ? Number(databaseId) : null;
+
+ if (!dbId) {
+ toast.error('Select a database before executing cells');
+ return { success: 0, failed: 0 };
+ }
+
+ try {
+ const results = await execOps.executeCells(root, cellIds, {
+ databaseId: dbId,
+ executedBy,
+ stopOnError: options?.stopOnError ?? false,
+ });
+
+ const successCount = results.filter((r) => r.success).length;
+ const failedCount = results.filter((r) => !r.success).length;
+
+ return { success: successCount, failed: failedCount };
+ } catch (err) {
+ const message = resolveErrorMessage(err, 'Failed to execute cells');
+ toast.error(message);
+ return { success: 0, failed: cellIds.length };
+ }
+ },
+ [root, databaseId]
+ );
+
+ /**
+ * Clear all cell outputs.
+ * Shows success toast.
+ */
+ const clearOutputs = useCallback(() => {
+ if (!root) {
+ toast.error('Notebook not loaded');
+ return;
+ }
+
+ try {
+ execOps.clearOutputs(root);
+ } catch (err) {
+ const message = resolveErrorMessage(err, 'Failed to clear outputs');
+ toast.error(message);
+ }
+ }, [root]);
+
+ return {
+ executeCell,
+ executeCells,
+ clearOutputs,
+ };
+}
diff --git a/web/atoms/notebook/hooks/useNotebookState.ts b/web/atoms/notebook/hooks/useNotebookState.ts
new file mode 100644
index 00000000..a3a8c29d
--- /dev/null
+++ b/web/atoms/notebook/hooks/useNotebookState.ts
@@ -0,0 +1,125 @@
+import { useCallback, useMemo } from 'react';
+import { useSnapshot } from 'valtio';
+import * as cellOps from '../operations/cell-ops';
+import * as metadataOps from '../operations/metadata-ops';
+import type { CellModel, CellMetadataModel } from '@/modules/notebook/collab/yjs/schema/core/types';
+import { NotebookBoundaryError, NotebookNotReadyError, logNotebookError } from '../errors';
+import { useNotebookRuntime } from '@/modules/notebook/providers/notebook-runtime-context';
+import { getNotebookRoot } from '@/modules/notebook/collab/yjs/schema/access/root';
+import { NB_DATABASE_ID, NB_TITLE, NB_CELL_ORDER } from '@/modules/notebook/collab/yjs/schema/core/keys';
+
+export function useNotebookState() {
+ const { resource, store } = useNotebookRuntime();
+ const snapshot = useSnapshot(store.state);
+ const root = useMemo(() => getNotebookRoot(resource.doc), [resource.doc]);
+
+ const title = (snapshot as any)[NB_TITLE] ?? 'Untitled Notebook';
+ const databaseId = snapshot[NB_DATABASE_ID] ?? null;
+ const cellOrder = snapshot[NB_CELL_ORDER] ?? [];
+
+ const setTitle = useCallback(
+ (nextTitle: string) => {
+ (store.state as any)[NB_TITLE] = nextTitle;
+ },
+ [store.state],
+ );
+
+ const setDatabaseId = useCallback(
+ (nextId: number | string | null) => {
+ (store.state as any)[NB_DATABASE_ID] = nextId === null ? null : String(nextId);
+ },
+ [store.state],
+ );
+
+ // Operation: Add cell
+ const addCell = useCallback(
+ (kind: CellModel['kind'], source?: string, index?: number): string | null => {
+ if (!root) {
+ // Not fatal; return fallback and log consistently
+ logNotebookError('Cannot add cell: notebook not loaded', new NotebookNotReadyError());
+ return null;
+ }
+
+ try {
+ return cellOps.addCell(root, { kind, source, index });
+ } catch (err) {
+ logNotebookError('Failed to add cell', err);
+ return null;
+ }
+ },
+ [root]
+ );
+
+ // Operation: Delete cell
+ const deleteCell = useCallback(
+ (cellId: string): void => {
+ if (!root) {
+ logNotebookError('Cannot delete cell: notebook not loaded', new NotebookNotReadyError());
+ return;
+ }
+
+ try {
+ cellOps.deleteCell(root, cellId);
+ } catch (err) {
+ logNotebookError('Failed to delete cell', err);
+ }
+ },
+ [root]
+ );
+
+ // Operation: Move cell
+ const moveCell = useCallback(
+ (cellId: string, direction: 'up' | 'down'): void => {
+ if (!root) {
+ logNotebookError('Cannot move cell: notebook not loaded', new NotebookNotReadyError());
+ return;
+ }
+
+ try {
+ const moved = cellOps.moveCell(root, cellId, direction);
+ if (!moved) {
+ // Boundary no-op: not an error, but keep consistent, low-noise log for debugging
+ // Avoid throwing; UI already disables boundary moves.
+ console.debug('[Notebook] Move cell no-op (boundary):', { cellId, direction });
+ }
+ } catch (err) {
+ // If a caller wants to differentiate boundary, cell-ops now returns false instead of throwing
+ if (err instanceof NotebookBoundaryError) return;
+ logNotebookError('Failed to move cell', err);
+ }
+ },
+ [root]
+ );
+
+ // Operation: Update cell metadata
+ const updateCellMetadata = useCallback(
+ (cellId: string, metadata: Partial): void => {
+ if (!root) {
+ logNotebookError('Cannot update cell metadata: notebook not loaded', new NotebookNotReadyError());
+ return;
+ }
+
+ try {
+ metadataOps.updateCellMetadata(root, cellId, metadata);
+ } catch (err) {
+ logNotebookError('Failed to update cell metadata', err);
+ }
+ },
+ [root]
+ );
+
+ return {
+ // State
+ title,
+ databaseId,
+ cellOrder,
+ // Setters
+ setTitle,
+ setDatabaseId,
+ // Operations
+ addCell,
+ deleteCell,
+ moveCell,
+ updateCellMetadata,
+ };
+}
diff --git a/web/atoms/notebook/index.ts b/web/atoms/notebook/index.ts
new file mode 100644
index 00000000..268360f8
--- /dev/null
+++ b/web/atoms/notebook/index.ts
@@ -0,0 +1,3 @@
+export * from './ui'
+export * from './cell-editor-modes'
+export * from './save-status'
diff --git a/web/atoms/notebook/operations/cell-ops.ts b/web/atoms/notebook/operations/cell-ops.ts
new file mode 100644
index 00000000..68299599
--- /dev/null
+++ b/web/atoms/notebook/operations/cell-ops.ts
@@ -0,0 +1,69 @@
+import * as Y from 'yjs';
+import type { YNotebook, CellModel } from '@/modules/notebook/collab/yjs/schema/core/types';
+import {
+ createCell,
+ insertCell as rawInsertCell,
+ softDeleteCell,
+ moveCell as rawMoveCell,
+} from '@/modules/notebook/collab/yjs/schema';
+import { NB_CELL_ORDER, CELL_ID } from '@/modules/notebook/collab/yjs/schema/core/keys';
+import { CellNotFoundError, NotebookStateError } from '@/atoms/notebook/errors';
+
+export function addCell(
+ root: YNotebook,
+ options: {
+ kind: CellModel['kind'];
+ source?: string;
+ index?: number;
+ }
+): string {
+ const { kind, source = '', index } = options;
+
+ // Create new cell
+ const newCell = createCell({ kind, source });
+
+ // Insert into notebook
+ rawInsertCell(root, newCell, index);
+
+ // Get and validate cell ID
+ const cellId = newCell.get(CELL_ID);
+ if (typeof cellId !== 'string') {
+ throw new NotebookStateError('Failed to create cell: missing cell ID');
+ }
+
+ return cellId;
+}
+
+export function deleteCell(root: YNotebook, cellId: string): void {
+ // soft delete is idempotent; rely on lower-level op to no-op if missing
+ softDeleteCell(root, cellId);
+}
+
+export function moveCell(
+ root: YNotebook,
+ cellId: string,
+ direction: 'up' | 'down'
+): boolean {
+ const orderArray = root.get(NB_CELL_ORDER) as Y.Array | undefined;
+ if (!orderArray) {
+ throw new NotebookStateError('Cell order array not found in notebook');
+ }
+
+ const currentOrder = orderArray.toArray();
+ const fromIndex = currentOrder.indexOf(cellId);
+
+ if (fromIndex === -1) {
+ throw new CellNotFoundError(cellId, 'Cell not found in order array');
+ }
+
+ const toIndex = direction === 'up' ? fromIndex - 1 : fromIndex + 1;
+
+ // Check boundaries - return explicit false to denote no-op
+ if (toIndex < 0 || toIndex >= currentOrder.length) {
+ return false;
+ }
+
+ // Perform move
+ rawMoveCell(root, cellId, toIndex);
+ return true;
+}
diff --git a/web/atoms/notebook/operations/execution-ops.ts b/web/atoms/notebook/operations/execution-ops.ts
new file mode 100644
index 00000000..b95669a9
--- /dev/null
+++ b/web/atoms/notebook/operations/execution-ops.ts
@@ -0,0 +1,144 @@
+import * as Y from 'yjs';
+import type { YNotebook, YCell, CellModel } from '@/modules/notebook/collab/yjs/schema/core/types';
+import {
+ startExecuteCell,
+ applyExecuteResultForCurrentRun,
+ getOutputEntry,
+} from '@/modules/notebook/collab/yjs/schema';
+import {
+ NB_CELL_MAP,
+ NB_OUTPUTS,
+ CELL_KIND,
+ CELL_SOURCE,
+ CELL_META,
+} from '@/modules/notebook/collab/yjs/schema/core/keys';
+import { EXECUTION_ORIGIN } from '@/modules/notebook/collab/yjs/schema';
+import { DefaultService } from '@/api-gen/services/DefaultService';
+import { type QueryResponse } from '@/api-gen';
+import { resolveErrorMessage } from '@/lib/errors';
+
+function errorResponse(message: string): QueryResponse {
+ return {
+ columns: [],
+ rows: [],
+ rowsAffected: 0,
+ error: message,
+ };
+}
+
+export async function executeCell(
+ root: YNotebook,
+ cellId: string,
+ options: { databaseId: number; executedBy?: number }
+): Promise {
+ const cellMap = root.get(NB_CELL_MAP) as Y.Map | undefined;
+ if (!cellMap) {
+ throw new Error('Cell map not found in notebook');
+ }
+
+ const cell = cellMap.get(cellId);
+ if (!cell) {
+ throw new Error(`Cell ${cellId} not found`);
+ }
+
+ // Check cell kind
+ const kind = cell.get(CELL_KIND) as CellModel['kind'] | undefined;
+ if (kind !== 'sql') {
+ throw new Error('Only SQL cells can be executed');
+ }
+
+ // Check if already running
+ const existing = getOutputEntry(root, cellId);
+ if (existing?.get('running') === true) {
+ throw new Error('Cell is already running');
+ }
+
+ // Get source code
+ const sourceText = cell.get(CELL_SOURCE) as Y.Text | undefined;
+ const source = sourceText?.toString() ?? '';
+ const trimmed = source.trim();
+
+ if (trimmed.length === 0) {
+ // Empty query - mark as failed
+ startExecuteCell(root, cellId, { executedBy: options.executedBy });
+ applyExecuteResultForCurrentRun(root, cellId, errorResponse('Query is empty'));
+ throw new Error('Query is empty');
+ }
+
+ // Get metadata
+ const meta = cell.get(CELL_META) as Y.Map | undefined;
+ const backgroundDDL = meta?.get('backgroundDDL') === true;
+
+ // Start execution
+ startExecuteCell(root, cellId, { executedBy: options.executedBy });
+
+ try {
+ // Execute query
+ const result = await DefaultService.queryDatabase(options.databaseId, {
+ query: trimmed,
+ backgroundDDL,
+ });
+
+ // Apply result
+ applyExecuteResultForCurrentRun(root, cellId, result);
+ } catch (error) {
+ // Apply error
+ const message = resolveErrorMessage(error, 'Failed to execute query');
+ applyExecuteResultForCurrentRun(root, cellId, errorResponse(message));
+ throw error; // Re-throw for caller to handle
+ }
+}
+
+export async function executeCells(
+ root: YNotebook,
+ cellIds: string[],
+ options: { databaseId: number; stopOnError?: boolean; executedBy?: number }
+): Promise> {
+ const { databaseId, stopOnError = false, executedBy } = options;
+ const results: Array<{ cellId: string; success: boolean; error?: string }> = [];
+
+ for (const cellId of cellIds) {
+ try {
+ await executeCell(root, cellId, { databaseId, executedBy });
+ results.push({ cellId, success: true });
+ } catch (error) {
+ const message = resolveErrorMessage(error, 'Execution failed');
+ results.push({ cellId, success: false, error: message });
+
+ if (stopOnError) {
+ break;
+ }
+ }
+ }
+
+ return results;
+}
+
+export function clearOutputs(root: YNotebook): void {
+ const doc = root.doc as Y.Doc | undefined;
+
+ const apply = () => {
+ const outputs = root.get(NB_OUTPUTS) as Y.Map> | undefined;
+ if (!outputs) return;
+
+ outputs.forEach((entry, key) => {
+ const isRunning = entry.get('running') === true;
+ if (isRunning) {
+ // Preserve active runs; drop any prior results and stale flags
+ entry.delete('result');
+ entry.delete('completedAt');
+ entry.set('stale', false);
+ } else {
+ // Remove completed outputs
+ outputs.delete(String(key));
+ }
+ });
+ };
+
+ // Use transaction if doc available
+ if (doc) {
+ doc.transact(apply, EXECUTION_ORIGIN);
+ } else {
+ apply();
+ }
+}
diff --git a/web/atoms/notebook/operations/index.ts b/web/atoms/notebook/operations/index.ts
new file mode 100644
index 00000000..a3bccd00
--- /dev/null
+++ b/web/atoms/notebook/operations/index.ts
@@ -0,0 +1,3 @@
+export * from './cell-ops';
+export * from './execution-ops';
+export * from './metadata-ops';
diff --git a/web/atoms/notebook/operations/metadata-ops.ts b/web/atoms/notebook/operations/metadata-ops.ts
new file mode 100644
index 00000000..d38a2d21
--- /dev/null
+++ b/web/atoms/notebook/operations/metadata-ops.ts
@@ -0,0 +1,68 @@
+import * as Y from 'yjs';
+import type { YNotebook, YCell } from '@/modules/notebook/collab/yjs/schema/core/types';
+import { NB_CELL_MAP, NB_METADATA, CELL_META } from '@/modules/notebook/collab/yjs/schema/core/keys';
+
+export function updateCellMetadata(
+ root: YNotebook,
+ cellId: string,
+ metadata: Record
+): void {
+ const cellMap = root.get(NB_CELL_MAP) as Y.Map | undefined;
+ if (!cellMap) {
+ throw new Error('Cell map not found in notebook');
+ }
+
+ const cell = cellMap.get(cellId);
+ if (!cell) {
+ throw new Error(`Cell ${cellId} not found`);
+ }
+
+ const doc = cell.doc as Y.Doc | undefined;
+
+ const apply = () => {
+ let metaMap = cell.get(CELL_META) as Y.Map | undefined;
+ if (!metaMap) {
+ metaMap = new Y.Map();
+ cell.set(CELL_META, metaMap);
+ }
+
+ // Merge metadata
+ for (const [key, value] of Object.entries(metadata)) {
+ metaMap.set(key, value);
+ }
+ };
+
+ // Use transaction if doc available
+ if (doc) {
+ doc.transact(apply);
+ } else {
+ apply();
+ }
+}
+
+export function updateNotebookMetadata(
+ root: YNotebook,
+ metadata: Record
+): void {
+ const doc = root.doc as Y.Doc | undefined;
+
+ const apply = () => {
+ let metaMap = root.get(NB_METADATA) as Y.Map | undefined;
+ if (!metaMap) {
+ metaMap = new Y.Map();
+ root.set(NB_METADATA, metaMap);
+ }
+
+ // Merge metadata
+ for (const [key, value] of Object.entries(metadata)) {
+ metaMap.set(key, value);
+ }
+ };
+
+ // Use transaction if doc available
+ if (doc) {
+ doc.transact(apply);
+ } else {
+ apply();
+ }
+}
diff --git a/web/atoms/notebook/save-status.ts b/web/atoms/notebook/save-status.ts
new file mode 100644
index 00000000..6af18016
--- /dev/null
+++ b/web/atoms/notebook/save-status.ts
@@ -0,0 +1,58 @@
+import { proxy, useSnapshot } from 'valtio'
+
+export type SaveStatus = 'saved' | 'saving' | 'unsaved'
+
+export interface NotebookSaveState {
+ /** Current save status per notebook */
+ status: Record
+ /** Timestamp of last successful save per notebook */
+ lastSavedAt: Record
+}
+
+const defaultSaveState: NotebookSaveState = {
+ status: {},
+ lastSavedAt: {},
+}
+
+export const notebookSaveState = proxy(defaultSaveState)
+
+export const notebookSaveActions = {
+ setStatus(notebookId: string, status: SaveStatus) {
+ notebookSaveState.status[notebookId] = status
+ if (status === 'saved') {
+ notebookSaveState.lastSavedAt[notebookId] = Date.now()
+ }
+ },
+ markUnsaved(notebookId: string) {
+ if (notebookSaveState.status[notebookId] !== 'saving') {
+ notebookSaveState.status[notebookId] = 'unsaved'
+ }
+ },
+ markSaving(notebookId: string) {
+ notebookSaveState.status[notebookId] = 'saving'
+ },
+ markSaved(notebookId: string) {
+ notebookSaveState.status[notebookId] = 'saved'
+ notebookSaveState.lastSavedAt[notebookId] = Date.now()
+ },
+ cleanup(notebookId: string) {
+ delete notebookSaveState.status[notebookId]
+ delete notebookSaveState.lastSavedAt[notebookId]
+ },
+} as const
+
+export const useNotebookSaveState = () => useSnapshot(notebookSaveState)
+
+export const useNotebookSaveStatus = (notebookId: string): SaveStatus => {
+ const state = useNotebookSaveState()
+ return state.status[notebookId] ?? 'saved'
+}
+
+export const useNotebookLastSavedAt = (notebookId: string): number | null => {
+ const state = useNotebookSaveState()
+ return state.lastSavedAt[notebookId] ?? null
+}
+
+export const getNotebookSaveStatus = (notebookId: string): SaveStatus => {
+ return notebookSaveState.status[notebookId] ?? 'saved'
+}
diff --git a/web/atoms/notebook/ui.ts b/web/atoms/notebook/ui.ts
new file mode 100644
index 00000000..3aac57e2
--- /dev/null
+++ b/web/atoms/notebook/ui.ts
@@ -0,0 +1,137 @@
+import { useSnapshot } from 'valtio'
+
+import { proxyWithLocalStorage } from '@/lib/state/proxy-with-local-storage'
+
+export type NotebookBottomPanelTab =
+ | 'streaming-graph'
+ | 'history'
+ | 'export'
+ | 'progress'
+
+export type NotebookSidebarTab = 'notebooks' | 'schema'
+
+export type CellEditorMode = 'editor' | 'preview'
+
+export interface NotebookBottomPanelState {
+ isOpen: boolean
+ activeTab: NotebookBottomPanelTab
+}
+
+export interface NotebookSidebarState {
+ activeTab: NotebookSidebarTab
+ schemaExpandedNodes: Record
+ schemaSearchQuery: string
+}
+
+export interface NotebookUIState {
+ bottomPanel: NotebookBottomPanelState
+ sidebar: NotebookSidebarState
+ cellEditorModes: Record>
+}
+
+const NOTEBOOK_UI_STORAGE_KEY = 'notebook-ui-state'
+
+const defaultNotebookUIState: NotebookUIState = {
+ bottomPanel: {
+ isOpen: false,
+ activeTab: 'history',
+ },
+ sidebar: {
+ activeTab: 'notebooks',
+ schemaExpandedNodes: {},
+ schemaSearchQuery: '',
+ },
+ cellEditorModes: {},
+}
+
+export const notebookUIState = proxyWithLocalStorage({
+ key: NOTEBOOK_UI_STORAGE_KEY,
+ defaultState: defaultNotebookUIState,
+ persistKeys: ['bottomPanel', 'sidebar'],
+})
+
+const ensureNotebookEditorModes = (notebookId: string) => {
+ if (!notebookId) return null
+ if (!notebookUIState.cellEditorModes[notebookId]) {
+ notebookUIState.cellEditorModes[notebookId] = {}
+ }
+ return notebookUIState.cellEditorModes[notebookId]
+}
+
+export const notebookUIActions = {
+ setBottomPanel(updates: Partial) {
+ Object.assign(notebookUIState.bottomPanel, updates)
+ },
+ toggleBottomPanel() {
+ notebookUIState.bottomPanel.isOpen = !notebookUIState.bottomPanel.isOpen
+ },
+ openBottomPanel(tab?: NotebookBottomPanelTab) {
+ notebookUIState.bottomPanel.isOpen = true
+ if (tab) {
+ notebookUIState.bottomPanel.activeTab = tab
+ }
+ },
+ closeBottomPanel() {
+ notebookUIState.bottomPanel.isOpen = false
+ },
+ setActiveBottomPanelTab(tab: NotebookBottomPanelTab) {
+ notebookUIState.bottomPanel.activeTab = tab
+ },
+ setCellEditorMode(notebookId: string, cellId: string, mode: CellEditorMode) {
+ const modes = ensureNotebookEditorModes(notebookId)
+ if (!modes) return
+ modes[cellId] = mode
+ },
+ toggleCellEditorMode(notebookId: string, cellId: string) {
+ const modes = ensureNotebookEditorModes(notebookId)
+ if (!modes) return
+ const current = modes[cellId] ?? 'preview'
+ modes[cellId] = current === 'editor' ? 'preview' : 'editor'
+ },
+ removeCellEditorMode(notebookId: string, cellId: string) {
+ const notebookModes = notebookUIState.cellEditorModes[notebookId]
+ if (!notebookModes || !(cellId in notebookModes)) return
+ delete notebookModes[cellId]
+ },
+ resetCellEditorModes(notebookId: string) {
+ if (!notebookId) return
+ delete notebookUIState.cellEditorModes[notebookId]
+ },
+ // Sidebar actions
+ setSidebarTab(tab: NotebookSidebarTab) {
+ notebookUIState.sidebar.activeTab = tab
+ },
+ toggleSchemaNode(nodeId: string) {
+ const current = notebookUIState.sidebar.schemaExpandedNodes[nodeId] ?? false
+ notebookUIState.sidebar.schemaExpandedNodes[nodeId] = !current
+ },
+ setSchemaNodeExpanded(nodeId: string, expanded: boolean) {
+ notebookUIState.sidebar.schemaExpandedNodes[nodeId] = expanded
+ },
+ setSchemaSearchQuery(query: string) {
+ notebookUIState.sidebar.schemaSearchQuery = query
+ },
+ expandAllSchemaNodes(nodeIds: string[]) {
+ nodeIds.forEach((id) => {
+ notebookUIState.sidebar.schemaExpandedNodes[id] = true
+ })
+ },
+ collapseAllSchemaNodes() {
+ notebookUIState.sidebar.schemaExpandedNodes = {}
+ },
+} as const
+
+export const useNotebookUIState = () => useSnapshot(notebookUIState)
+
+export const useNotebookBottomPanelState = () =>
+ useNotebookUIState().bottomPanel
+
+export const useNotebookSidebarState = () => useNotebookUIState().sidebar
+
+export const getNotebookUIState = (): NotebookUIState => notebookUIState
+
+export const getCellEditorMode = (
+ notebookId: string,
+ cellId: string,
+): CellEditorMode =>
+ notebookUIState.cellEditorModes[notebookId]?.[cellId] ?? 'preview'
diff --git a/web/components/common/MarkdownRenderer.tsx b/web/components/common/MarkdownRenderer.tsx
new file mode 100644
index 00000000..1ca5a42f
--- /dev/null
+++ b/web/components/common/MarkdownRenderer.tsx
@@ -0,0 +1,138 @@
+'use client'
+
+import { memo, useMemo, useState, useEffect } from 'react'
+import { unified } from 'unified'
+import remarkParse from 'remark-parse'
+import remarkRehype from 'remark-rehype'
+// import rehypeShiki from '@shikijs/rehype'
+import rehypeStringify from 'rehype-stringify'
+import { visit } from 'unist-util-visit'
+import type { Element } from 'hast'
+import { cn } from '@/lib/utils'
+
+interface MarkdownRendererProps {
+ content: string
+ className?: string
+ /** Cell ID used for generating heading IDs for outline navigation */
+ cellId?: string
+ // 可选的 Shiki 配置
+ themes?: {
+ light: string
+ dark: string
+ }
+ // 是否支持内联代码高亮
+ enableInlineHighlight?: boolean
+}
+
+/**
+ * Rehype plugin to add data-heading-id attributes to heading elements.
+ * This enables the floating outline to track and scroll to headings.
+ */
+function rehypeHeadingIds(cellId: string) {
+ return () => {
+ // headingIndex is reset each time the plugin runs (per process call)
+ return (tree: any) => {
+ let headingIndex = 0
+ visit(tree, 'element', (node: Element) => {
+ if (['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(node.tagName)) {
+ node.properties = node.properties || {}
+ node.properties['data-heading-id'] = `${cellId}-h-${headingIndex++}`
+ }
+ })
+ }
+ }
+}
+
+const MarkdownRenderer = memo(function MarkdownRenderer({
+ content,
+ className,
+ cellId,
+}: MarkdownRendererProps) {
+ const [processedContent, setProcessedContent] = useState('')
+ const [isLoading, setIsLoading] = useState(true)
+ const [lastContent, setLastContent] = useState('')
+ const [lastCellId, setLastCellId] = useState(undefined)
+
+ // 创建 unified 处理器,使用 useMemo 避免重复创建
+ // Recreate processor when cellId changes to ensure correct heading IDs
+ const processor = useMemo(() => {
+ const p = unified()
+ .use(remarkParse) // 解析 Markdown
+ .use(remarkRehype) // 转换为 HTML AST
+
+ // Add heading ID plugin only if cellId is provided
+ if (cellId) {
+ p.use(rehypeHeadingIds(cellId))
+ }
+
+ // .use(rehypeShiki, {
+ // themes,
+ // inline: enableInlineHighlight ? 'tailing-curly-colon' : false,
+ // defaultColor: 'light-dark()',
+ // })
+ return p.use(rehypeStringify) // 转换为 HTML 字符串
+ }, [cellId])
+
+ // 处理 Markdown 内容
+ useEffect(() => {
+ // 如果内容和 cellId 都没有变化,避免重新处理
+ if (content === lastContent && cellId === lastCellId && processedContent) {
+ return
+ }
+
+ let isCancelled = false
+
+ const processContent = async () => {
+ try {
+ setIsLoading(true)
+ const result = await processor.process(content)
+ const html = result.toString()
+
+ if (!isCancelled) {
+ setProcessedContent(html)
+ setLastContent(content)
+ setLastCellId(cellId)
+ }
+ } catch (error) {
+ console.error('Markdown processing error:', error)
+ if (!isCancelled) {
+ setProcessedContent(content) // 降级到原始内容
+ setLastContent(content)
+ setLastCellId(cellId)
+ }
+ } finally {
+ if (!isCancelled) {
+ setIsLoading(false)
+ }
+ }
+ }
+
+ processContent()
+
+ return () => {
+ isCancelled = true
+ }
+ }, [processor, content, lastContent, processedContent, cellId, lastCellId])
+
+ if (isLoading) {
+ return (
+
+ )
+ }
+
+ return (
+
+ )
+})
+
+export { MarkdownRenderer }
\ No newline at end of file
diff --git a/web/components/common/app-sidebar.tsx b/web/components/common/app-sidebar.tsx
index 478e3189..d903c207 100644
--- a/web/components/common/app-sidebar.tsx
+++ b/web/components/common/app-sidebar.tsx
@@ -13,6 +13,7 @@ import {
Code,
Moon,
Sun,
+ NotebookPen,
} from "lucide-react"
import Link from "next/link"
import Image from "next/image"
@@ -48,6 +49,11 @@ const mainMenuItems = [
href: "/sqlconsole",
icon: Code,
},
+ {
+ title: "Notebook",
+ href: "/notebook",
+ icon: NotebookPen,
+ },
{
title: "Metrics Store",
href: "/metricsstore",
diff --git a/web/components/ui/avatar.tsx b/web/components/ui/avatar.tsx
new file mode 100644
index 00000000..51e507ba
--- /dev/null
+++ b/web/components/ui/avatar.tsx
@@ -0,0 +1,50 @@
+"use client"
+
+import * as React from "react"
+import * as AvatarPrimitive from "@radix-ui/react-avatar"
+
+import { cn } from "@/lib/utils"
+
+const Avatar = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, ...props }, ref) => (
+
+))
+Avatar.displayName = AvatarPrimitive.Root.displayName
+
+const AvatarImage = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, ...props }, ref) => (
+
+))
+AvatarImage.displayName = AvatarPrimitive.Image.displayName
+
+const AvatarFallback = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, ...props }, ref) => (
+
+))
+AvatarFallback.displayName = AvatarPrimitive.Fallback.displayName
+
+export { Avatar, AvatarImage, AvatarFallback }
diff --git a/web/components/ui/monaco-editor.tsx b/web/components/ui/monaco-editor.tsx
new file mode 100644
index 00000000..04f667c8
--- /dev/null
+++ b/web/components/ui/monaco-editor.tsx
@@ -0,0 +1,178 @@
+"use client";
+
+import { memo, useRef, useCallback, useImperativeHandle, useEffect, useState } from "react";
+import Editor, { OnMount, OnChange } from "@monaco-editor/react";
+import { cn } from "@/lib/utils";
+import type { editor } from "monaco-editor";
+import { useIsDark } from "@/hooks/useIsDark";
+
+interface MonacoEditorProps {
+ // value?: string;
+ defaultValue?: string;
+ language?: string;
+ onChange?: (value: string) => void;
+ onMount?: (editor: editor.IStandaloneCodeEditor) => void;
+ onReady?: (editor: editor.IStandaloneCodeEditor) => void;
+ placeholder?: string;
+ height?: string | number;
+ className?: string;
+ options?: editor.IStandaloneEditorConstructionOptions;
+ readOnly?: boolean;
+ autoResize?: boolean;
+ minHeight?: number;
+ maxHeight?: number;
+ ref?: React.Ref;
+}
+
+interface MonacoEditorHandle {
+ focus: () => void;
+ getValue: () => string;
+ setValue: (value: string) => void;
+ getEditor: () => editor.IStandaloneCodeEditor | null;
+}
+
+const MonacoEditor = memo(function MonacoEditor({
+ // value,
+ defaultValue,
+ language = "sql",
+ onChange,
+ onMount,
+ onReady,
+ height = 200,
+ className,
+ options,
+ readOnly = false,
+ autoResize = false,
+ minHeight = 100,
+ maxHeight = 600,
+ ref,
+}: MonacoEditorProps) {
+ const isDark = useIsDark();
+ const editorRef = useRef(null);
+ const [editorHeight, setEditorHeight] = useState(
+ typeof height === 'number' ? height : 200
+ );
+
+ useEffect(() => {
+ return () => {
+ // Cleanup editor instance when component unmounts
+ if (editorRef.current) {
+ editorRef.current.dispose();
+ editorRef.current = null;
+ }
+ };
+ }, []);
+
+ const updateHeight = useCallback(() => {
+ if (!autoResize || !editorRef.current) return;
+
+ const contentHeight = editorRef.current.getContentHeight();
+ const newHeight = Math.max(minHeight, Math.min(contentHeight, maxHeight));
+
+ if (newHeight !== editorHeight) {
+ setEditorHeight(newHeight);
+ editorRef.current.layout();
+ }
+ }, [autoResize, editorHeight, minHeight, maxHeight]);
+
+ useEffect(() => {
+ if (autoResize && editorRef.current) {
+ // Update height when value changes
+ updateHeight();
+
+ // Listen to content size changes
+ const disposable = editorRef.current.onDidContentSizeChange(() => {
+ updateHeight();
+ });
+
+ return () => {
+ disposable.dispose();
+ };
+ }
+ }, [autoResize, updateHeight]);
+
+ const handleEditorDidMount: OnMount = useCallback(
+ (editor) => {
+ editorRef.current = editor;
+
+ if (autoResize) {
+ // Initial height update
+ updateHeight();
+
+ // Listen to content size changes
+ editor.onDidContentSizeChange(() => {
+ updateHeight();
+ });
+ }
+
+ onMount?.(editor);
+ onReady?.(editor);
+ },
+ [onMount, onReady, autoResize, updateHeight]
+ );
+
+ const handleEditorChange: OnChange = useCallback(
+ (newValue) => {
+ onChange?.(newValue || "");
+ },
+ [onChange]
+ );
+
+ useImperativeHandle(
+ ref,
+ () => ({
+ focus: () => editorRef.current?.focus(),
+ getValue: () => editorRef.current?.getValue() || "",
+ setValue: (newValue: string) => editorRef.current?.setValue(newValue),
+ getEditor: () => editorRef.current,
+ }),
+ []
+ );
+
+ const defaultOptions: editor.IStandaloneEditorConstructionOptions = {
+ minimap: { enabled: false },
+ lineNumbers: "on",
+ fontSize: 14,
+ tabSize: 2,
+ insertSpaces: true,
+ wordWrap: "on",
+ folding: false,
+ renderLineHighlight: "all",
+ smoothScrolling: true,
+ cursorBlinking: "smooth",
+ suggestOnTriggerCharacters: true,
+ quickSuggestions: true,
+ parameterHints: { enabled: true },
+ autoClosingBrackets: "always",
+ autoClosingQuotes: "always",
+ bracketPairColorization: { enabled: true },
+ readOnly,
+ scrollbar: {
+ handleMouseWheel: !autoResize,
+ },
+ scrollBeyondLastLine: !autoResize,
+ ...options,
+ };
+
+ return (
+
+
+ }
+ />
+
+ );
+});
+
+export { MonacoEditor };
+export type { MonacoEditorHandle, MonacoEditorProps };
diff --git a/web/components/ui/shiki/ShikiCodeBlock.tsx b/web/components/ui/shiki/ShikiCodeBlock.tsx
new file mode 100644
index 00000000..0415e1ac
--- /dev/null
+++ b/web/components/ui/shiki/ShikiCodeBlock.tsx
@@ -0,0 +1,137 @@
+"use client";
+
+import { cn } from "@/lib/utils";
+import { Suspense, useRef } from "react";
+import type { BundledLanguage } from "shiki";
+import { useShiki } from "./use-shiki";
+import { useIsDark } from "@/hooks/useIsDark";
+
+interface ShikiCodeBlockProps {
+ value: string;
+ language?: BundledLanguage;
+ className?: string;
+ title?: string;
+ copyable?: boolean;
+ onDoubleClick?: () => void;
+ emptyText?: string;
+}
+
+function HighlightedCode({
+ value,
+ language,
+ isDark,
+ emptyText,
+}: {
+ value: string;
+ language: BundledLanguage;
+ isDark: boolean;
+ emptyText: string;
+}) {
+ const trimmedValue = value.trim();
+
+ const theme = isDark ? "dark-plus" : "light-plus";
+
+ const highlighted = useShiki(
+ trimmedValue ? value : " ",
+ {
+ lang: language,
+ theme,
+ engine: "oniguruma",
+ },
+ [value, language, theme] // Cache key dependencies
+ );
+
+ if (!trimmedValue) {
+ return (
+
{emptyText}
+ );
+ }
+
+ return <>{highlighted}>;
+}
+
+export function ShikiCodeBlock({
+ value,
+ language = "sql",
+ className,
+ title,
+ onDoubleClick,
+ emptyText = "Empty content",
+}: ShikiCodeBlockProps) {
+ const containerRef = useRef
(null);
+ const isDark = useIsDark();
+
+ const fallback = (
+
+ {value || emptyText}
+
+ );
+
+ // If no title provided, render minimal version
+ if (!title) {
+ return (
+ pre]:!bg-transparent [&>pre]:h-full [&>pre]:overflow-auto [&>pre]:text-sm [&>pre]:leading-relaxed [&>pre]:whitespace-pre-wrap [&>pre]:break-words [&_.shiki-pre]:bg-transparent [&_.shiki-pre]:p-3 [&_.shiki-pre]:pt-2 [&_.shiki-pre]:font-mono [&_.shiki-code]:font-mono",
+ className
+ )}
+ onDoubleClick={onDoubleClick}
+ ref={containerRef}
+ >
+
+
+
+
+ );
+ }
+
+ // Full version with title and optional copy functionality
+ return (
+
+
+
{title}
+
+
+
+
+
+ {value || emptyText}
+
+
+ }
+ >
+
+
+
+
+
+
+ );
+}
diff --git a/web/components/ui/shiki/index.ts b/web/components/ui/shiki/index.ts
new file mode 100644
index 00000000..1e0321c3
--- /dev/null
+++ b/web/components/ui/shiki/index.ts
@@ -0,0 +1,2 @@
+export { ShikiCodeBlock } from "./ShikiCodeBlock";
+export { useShiki } from "./use-shiki";
diff --git a/web/components/ui/shiki/use-shiki.ts b/web/components/ui/shiki/use-shiki.ts
new file mode 100644
index 00000000..edb4166b
--- /dev/null
+++ b/web/components/ui/shiki/use-shiki.ts
@@ -0,0 +1,22 @@
+'use client';
+import { type DependencyList, type ReactNode, use, useMemo } from 'react';
+import { highlight, type HighlightOptions } from '@/lib/shiki-highlighter';
+
+const promises: Record> = {};
+
+/**
+ * Get highlighted results, should be used with React Suspense API.
+ *
+ * Note: results are cached with (lang, code) as keys, if this is not the desired behaviour, pass a `deps` instead.
+ */
+export function useShiki(
+ code: string,
+ options: HighlightOptions,
+ deps?: DependencyList,
+): ReactNode {
+ const key = useMemo(() => {
+ return deps ? JSON.stringify(deps) : `${options.lang}:${code}`;
+ }, [code, deps, options.lang]);
+
+ return use((promises[key] ??= highlight(code, options)));
+}
diff --git a/web/eslint.config.mjs b/web/eslint.config.mjs
index c85fb67c..a2eaef2f 100644
--- a/web/eslint.config.mjs
+++ b/web/eslint.config.mjs
@@ -11,6 +11,12 @@ const compat = new FlatCompat({
const eslintConfig = [
...compat.extends("next/core-web-vitals", "next/typescript"),
+ ...compat.config({
+ rules: {
+ "@typescript-eslint/no-explicit-any": "off",
+ },
+ ignorePatterns: ["node_modules/**", ".next/**", "out/**", "build/**", "next-env.d.ts", "tests/**", "api-anclax/**", "api-gen/**"],
+ })
];
export default eslintConfig;
diff --git a/web/hooks/useAuth.ts b/web/hooks/useAuth.ts
index d0fdfd5c..1147a604 100644
--- a/web/hooks/useAuth.ts
+++ b/web/hooks/useAuth.ts
@@ -98,4 +98,4 @@ export const useAuthActions = () => {
login,
logout,
}
-}
+}
\ No newline at end of file
diff --git a/web/hooks/useIsDark.ts b/web/hooks/useIsDark.ts
new file mode 100644
index 00000000..aabc23d6
--- /dev/null
+++ b/web/hooks/useIsDark.ts
@@ -0,0 +1,13 @@
+import { useTheme } from 'next-themes';
+import { useEffect, useState } from 'react';
+
+export function useIsDark() {
+ const { resolvedTheme } = useTheme();
+ const [isDark, setIsDark] = useState(resolvedTheme === 'dark');
+
+ useEffect(() => {
+ setIsDark(resolvedTheme === 'dark');
+ }, [resolvedTheme]);
+
+ return isDark;
+}
\ No newline at end of file
diff --git a/web/hooks/useResizable.ts b/web/hooks/useResizable.ts
new file mode 100644
index 00000000..3ac68736
--- /dev/null
+++ b/web/hooks/useResizable.ts
@@ -0,0 +1,170 @@
+import { useCallback, useState, useEffect, useRef } from 'react'
+
+type ResizeDirection = 'horizontal' | 'vertical'
+type ResizeHandle = 'left' | 'right' | 'top' | 'bottom'
+
+interface UseResizableConfig {
+ defaultWidth: number
+ minWidth?: number
+ maxWidth?: number
+ storageKey?: string
+ direction?: ResizeDirection
+ handle?: ResizeHandle
+}
+
+interface UseResizableReturn {
+ width: number
+ setWidth: (width: number) => void
+ resizeHandleProps: {
+ onMouseDown: (e: React.MouseEvent) => void
+ onDoubleClick: (e: React.MouseEvent) => void
+ className: string
+ }
+ isDragging: boolean
+}
+
+export function useResizable({
+ defaultWidth,
+ minWidth = 100,
+ maxWidth = Infinity,
+ storageKey,
+ direction = 'horizontal',
+ handle = 'right'
+}: UseResizableConfig): UseResizableReturn {
+ // Initialize width from localStorage if available
+ const [width, setWidthState] = useState(() => {
+ if (typeof window === 'undefined') return defaultWidth
+
+ if (storageKey) {
+ const stored = localStorage.getItem(storageKey)
+ if (stored) {
+ const parsedWidth = parseInt(stored, 10)
+ if (!isNaN(parsedWidth)) {
+ return Math.min(Math.max(parsedWidth, minWidth), maxWidth)
+ }
+ }
+ }
+ return defaultWidth
+ })
+
+ const [isDragging, setIsDragging] = useState(false)
+ const [startPosition, setStartPosition] = useState(0)
+ const [startWidth, setStartWidth] = useState(0)
+ const dragStateRef = useRef({ isDragging: false })
+
+ const setWidth = useCallback((newWidth: number) => {
+ const constrainedWidth = Math.min(Math.max(newWidth, minWidth), maxWidth)
+ setWidthState(constrainedWidth)
+
+ // Persist to localStorage if storageKey is provided
+ if (storageKey) {
+ localStorage.setItem(storageKey, constrainedWidth.toString())
+ }
+ }, [minWidth, maxWidth, storageKey])
+
+ const handleMouseDown = useCallback((e: React.MouseEvent) => {
+ e.preventDefault()
+ e.stopPropagation()
+
+ setIsDragging(true)
+ dragStateRef.current.isDragging = true
+
+ if (direction === 'horizontal') {
+ setStartPosition(e.clientX)
+ } else {
+ setStartPosition(e.clientY)
+ }
+
+ setStartWidth(width)
+ }, [direction, width])
+
+ const handleDoubleClick = useCallback((e: React.MouseEvent) => {
+ e.preventDefault()
+ e.stopPropagation()
+
+ // Reset to default width
+ setWidth(defaultWidth)
+ }, [defaultWidth, setWidth])
+
+ const handleMouseMove = useCallback((e: MouseEvent) => {
+ if (!dragStateRef.current.isDragging) return
+
+ const currentPosition = direction === 'horizontal' ? e.clientX : e.clientY
+ const diff = currentPosition - startPosition
+
+ // Calculate new width based on handle position
+ let newWidth: number
+ if (handle === 'right' || handle === 'bottom') {
+ newWidth = startWidth + diff
+ } else {
+ newWidth = startWidth - diff
+ }
+
+ setWidth(newWidth)
+ }, [direction, startPosition, startWidth, handle, setWidth])
+
+ const handleMouseUp = useCallback(() => {
+ setIsDragging(false)
+ dragStateRef.current.isDragging = false
+ }, [])
+
+ useEffect(() => {
+ if (isDragging) {
+ document.addEventListener('mousemove', handleMouseMove)
+ document.addEventListener('mouseup', handleMouseUp)
+
+ // Set cursor and disable text selection during drag
+ const cursor = direction === 'horizontal' ? 'col-resize' : 'row-resize'
+ document.body.style.cursor = cursor
+ document.body.style.userSelect = 'none'
+
+ return () => {
+ document.removeEventListener('mousemove', handleMouseMove)
+ document.removeEventListener('mouseup', handleMouseUp)
+ document.body.style.cursor = ''
+ document.body.style.userSelect = ''
+ }
+ }
+ }, [isDragging, handleMouseMove, handleMouseUp, direction])
+
+ // Generate className for resize handle based on direction and handle position
+ const getHandleClassName = useCallback(() => {
+ const baseClasses = 'absolute cursor-col-resize group transition-colors'
+ const hoverClasses = 'hover:bg-border'
+ const activeClasses = isDragging ? 'bg-border' : ''
+
+ let positionClasses = ''
+ let sizeClasses = ''
+
+ if (direction === 'horizontal') {
+ sizeClasses = 'w-1 h-full'
+ if (handle === 'right') {
+ positionClasses = 'top-0 right-0'
+ } else {
+ positionClasses = 'top-0 left-0'
+ }
+ } else {
+ sizeClasses = 'h-1 w-full'
+ if (handle === 'bottom') {
+ positionClasses = 'bottom-0 left-0'
+ } else {
+ positionClasses = 'top-0 left-0'
+ }
+ }
+
+ return `${baseClasses} ${hoverClasses} ${activeClasses} ${positionClasses} ${sizeClasses}`.trim()
+ }, [direction, handle, isDragging])
+
+ const resizeHandleProps = {
+ onMouseDown: handleMouseDown,
+ onDoubleClick: handleDoubleClick,
+ className: getHandleClassName()
+ }
+
+ return {
+ width,
+ setWidth,
+ resizeHandleProps,
+ isDragging
+ }
+}
\ No newline at end of file
diff --git a/web/lib/awareness-user.ts b/web/lib/awareness-user.ts
new file mode 100644
index 00000000..986edbac
--- /dev/null
+++ b/web/lib/awareness-user.ts
@@ -0,0 +1,55 @@
+/**
+ * Predefined color palette for user presence indicators.
+ * Colors are chosen to be visually distinct and accessible.
+ */
+const PRESENCE_COLORS = [
+ '#FF6B6B', // Red
+ '#4ECDC4', // Cyan
+ '#45B7D1', // Blue
+ '#FFA07A', // Light Salmon
+ '#98D8C8', // Mint
+ '#F7DC6F', // Yellow
+ '#BB8FCE', // Purple
+ '#85C1E2', // Sky Blue
+ '#F8B88B', // Peach
+ '#A8E6CF', // Light Green
+ '#FFD3B6', // Apricot
+ '#FFAAA5', // Pink
+] as const;
+
+const hashString = (input: string): number => {
+ let hash = 0;
+ for (let i = 0; i < input.length; i += 1) {
+ hash = (hash << 5) - hash + input.charCodeAt(i);
+ hash |= 0; // Keep 32-bit
+ }
+ return Math.abs(hash);
+};
+
+export function generateUserColor(seed: string | number): string {
+ const idx = typeof seed === "number" ? seed : hashString(seed);
+ return PRESENCE_COLORS[idx % PRESENCE_COLORS.length];
+}
+
+/**
+ * Generates anonymous user information for awareness.
+ * Creates a consistent identity based on the client ID.
+ *
+ * @param params - user identity inputs
+ * @returns User object with id, name, color, and avatarSeed
+ */
+export function generateAwarenessUser(params: {
+ clientId: number;
+ username?: string | null;
+ userId?: string | number | null;
+}) {
+ const { clientId, username, userId } = params;
+ const name = username?.trim() || `User ${clientId}`;
+ const seed = username?.trim() || String(userId ?? clientId);
+ return {
+ id: userId ?? clientId,
+ name,
+ color: generateUserColor(seed),
+ avatarSeed: seed,
+ };
+}
diff --git a/web/lib/debug-awareness.ts b/web/lib/debug-awareness.ts
new file mode 100644
index 00000000..8c0122b7
--- /dev/null
+++ b/web/lib/debug-awareness.ts
@@ -0,0 +1,75 @@
+/**
+ * Debugging utilities for awareness system.
+ * Use these to diagnose presence and awareness issues.
+ */
+
+/**
+ * Logs all awareness states in a readable format.
+ * Call this in the browser console: window.debugAwareness()
+ */
+export function debugAwarenessStates(provider: any) {
+ console.group('🔍 Awareness Debug');
+
+ const awareness = provider?.awareness;
+ if (!awareness) {
+ console.error('❌ No awareness found on provider');
+ console.groupEnd();
+ return;
+ }
+
+ console.log('📊 Local Client ID:', awareness.clientID);
+ console.log('📊 Total Clients:', awareness.getStates().size);
+
+ console.group('👥 All Clients:');
+ for (const [clientId, state] of awareness.getStates().entries()) {
+ const isSelf = clientId === awareness.clientID;
+ console.group(`${isSelf ? '👤 [SELF]' : '👥'} Client ${clientId}`);
+ console.log('State:', state);
+ console.log('User:', (state as any)?.user);
+ console.log('Editing:', (state as any)?.editing);
+ console.log('Cursor:', (state as any)?.cursor);
+ console.groupEnd();
+ }
+ console.groupEnd();
+
+ console.groupEnd();
+}
+
+/**
+ * Enable continuous awareness monitoring.
+ * Logs every awareness change event.
+ */
+export function enableAwarenessMonitoring(provider: any) {
+ const awareness = provider?.awareness;
+ if (!awareness) {
+ console.error('❌ No awareness found');
+ return () => {};
+ }
+
+ const handler = () => {
+ console.log('🔔 Awareness changed at', new Date().toLocaleTimeString());
+ debugAwarenessStates(provider);
+ };
+
+ awareness.on('change', handler);
+ console.log('✅ Awareness monitoring enabled');
+
+ // Return cleanup function
+ return () => {
+ awareness.off('change', handler);
+ console.log('🛑 Awareness monitoring disabled');
+ };
+}
+
+/**
+ * Exposes debug utilities to window object for easy console access.
+ */
+export function installAwarenessDebugTools(provider: any) {
+ if (typeof window !== 'undefined') {
+ (window as any).debugAwareness = () => debugAwarenessStates(provider);
+ (window as any).monitorAwareness = () => enableAwarenessMonitoring(provider);
+ console.log('🛠️ Awareness debug tools installed:');
+ console.log(' - window.debugAwareness() - Show current awareness states');
+ console.log(' - window.monitorAwareness() - Enable continuous monitoring');
+ }
+}
diff --git a/web/lib/errors.ts b/web/lib/errors.ts
new file mode 100644
index 00000000..033faca1
--- /dev/null
+++ b/web/lib/errors.ts
@@ -0,0 +1,19 @@
+import { ApiError } from '@/api-gen'
+
+/**
+ * Normalize various error shapes to a user-friendly message.
+ * - ApiError with string body/message
+ * - native Error
+ * - string fallback
+ */
+export function resolveErrorMessage(error: unknown, fallback: string): string {
+ if (error instanceof ApiError) {
+ const body = error.body
+ if (typeof body === 'string' && body.trim().length > 0) return body
+ if (error.message) return error.message
+ }
+ if (error instanceof Error) return error.message
+ if (typeof error === 'string') return error
+ return fallback
+}
+
diff --git a/web/lib/is-dev.ts b/web/lib/is-dev.ts
new file mode 100644
index 00000000..4953e8ac
--- /dev/null
+++ b/web/lib/is-dev.ts
@@ -0,0 +1 @@
+export const isDev = process.env.NODE_ENV !== "production";
\ No newline at end of file
diff --git a/web/lib/jotai.ts b/web/lib/jotai.ts
new file mode 100644
index 00000000..23a4cd8d
--- /dev/null
+++ b/web/lib/jotai.ts
@@ -0,0 +1,40 @@
+import type { Atom, PrimitiveAtom } from 'jotai'
+import { createStore, useAtom, useAtomValue, useSetAtom } from 'jotai'
+import { selectAtom } from 'jotai/utils'
+import { useCallback } from 'react'
+
+export const jotaiStore = createStore()
+
+export const createAtomAccessor = (atom: PrimitiveAtom) =>
+ [
+ () => jotaiStore.get(atom),
+ (value: T) => jotaiStore.set(atom, value),
+ ] as const
+
+const options = { store: jotaiStore }
+/**
+ * @param atom - jotai
+ * @returns - [atom, useAtom, useAtomValue, useSetAtom, jotaiStore.get, jotaiStore.set]
+ */
+export const createAtomHooks = (atom: PrimitiveAtom) =>
+ [
+ atom,
+ () => useAtom(atom, options),
+ () => useAtomValue(atom, options),
+ () => useSetAtom(atom, options),
+ ...createAtomAccessor(atom),
+ ] as const
+
+export const createAtomSelector = (atom: Atom) => {
+
+ const useHook = (selector: (a: T) => R, deps: any[] = []) =>
+ useAtomValue(
+ selectAtom(
+ atom,
+ useCallback((a) => selector(a as T), deps),
+ ),
+ )
+
+ useHook.__atom = atom
+ return useHook
+}
\ No newline at end of file
diff --git a/web/lib/jwt.ts b/web/lib/jwt.ts
new file mode 100644
index 00000000..c76a7c93
--- /dev/null
+++ b/web/lib/jwt.ts
@@ -0,0 +1,88 @@
+const base64UrlDecode = (input: string) => {
+ const normalized = input.replace(/-/g, '+').replace(/_/g, '/')
+ const padding = (4 - (normalized.length % 4)) % 4
+ const padded = normalized.padEnd(normalized.length + padding, '=')
+
+ try {
+ if (typeof window !== 'undefined' && typeof window.atob === 'function') {
+ return window.atob(padded)
+ }
+
+ if (typeof Buffer !== 'undefined') {
+ return Buffer.from(padded, 'base64').toString('binary')
+ }
+ } catch {
+ return ''
+ }
+
+ return ''
+}
+
+const decodeBinaryString = (binary: string) => {
+ try {
+ if (typeof TextDecoder !== 'undefined') {
+ const bytes = Uint8Array.from(binary, char => char.charCodeAt(0))
+ return new TextDecoder().decode(bytes)
+ }
+
+ if (typeof Buffer !== 'undefined') {
+ return Buffer.from(binary, 'binary').toString('utf-8')
+ }
+ } catch {
+ return ''
+ }
+
+ return ''
+}
+
+export const decodeJwtPayload = (token: string | null | undefined): Record | null => {
+ if (!token) {
+ return null
+ }
+
+ try {
+ const segments = token.split('.')
+ if (segments.length < 2) {
+ return null
+ }
+
+ const binary = base64UrlDecode(segments[1])
+ if (!binary) {
+ return null
+ }
+
+ const json = decodeBinaryString(binary)
+ if (!json) {
+ return null
+ }
+
+ return JSON.parse(json)
+ } catch {
+ return null
+ }
+}
+
+export const readPayloadFieldAsString = (
+ payload: Record | null,
+ keys: string[],
+): string | null => {
+ if (!payload) {
+ return null
+ }
+
+ for (const key of keys) {
+ const value = payload[key]
+ if (typeof value === 'string') {
+ const trimmed = value.trim()
+ if (trimmed.length > 0) {
+ return trimmed
+ }
+ }
+
+ if (typeof value === 'number' || typeof value === 'bigint') {
+ return value.toString()
+ }
+ }
+
+ return null
+}
diff --git a/web/lib/network-config.ts b/web/lib/network-config.ts
index 9d8fe29c..0f12a1f8 100644
--- a/web/lib/network-config.ts
+++ b/web/lib/network-config.ts
@@ -8,7 +8,7 @@ const DEFAULT_INTERNAL_BASE = 'http://localhost:30080/api/v1'
const DEFAULT_EXTERNAL_BASE = 'http://localhost:8020/api/v1'
-const resolveAccessToken = async (_options?: any): Promise => {
+const resolveAccessToken = async (): Promise => {
const current = getAuthState()
return current?.user?.accessToken ?? ''
}
diff --git a/web/lib/shiki-highlighter.ts b/web/lib/shiki-highlighter.ts
new file mode 100644
index 00000000..a66cce24
--- /dev/null
+++ b/web/lib/shiki-highlighter.ts
@@ -0,0 +1,156 @@
+import {
+ type BundledHighlighterOptions,
+ type BundledLanguage,
+ type CodeOptionsMeta,
+ type CodeOptionsThemes,
+ type CodeToHastOptionsCommon,
+ type Highlighter,
+} from 'shiki';
+import type { BundledTheme } from 'shiki/themes';
+import {
+ type Components,
+ type Options as ToJsxOptions,
+ toJsxRuntime,
+} from 'hast-util-to-jsx-runtime';
+import { Fragment, type ReactNode } from 'react';
+import { jsx, jsxs } from 'react/jsx-runtime';
+import type { Root } from 'hast';
+
+export const defaultThemes = {
+ light: 'github-light',
+ dark: 'github-dark',
+};
+
+export type HighlightOptionsCommon = CodeToHastOptionsCommon &
+ CodeOptionsMeta & {
+ /**
+ * The Regex Engine for Shiki
+ *
+ * @defaultValue 'js'
+ */
+ engine?: 'js' | 'oniguruma';
+ components?: Partial;
+
+ fallbackLanguage?: BundledLanguage;
+ };
+
+export type HighlightOptionsThemes = CodeOptionsThemes;
+
+export type HighlightOptions = HighlightOptionsCommon &
+ (HighlightOptionsThemes | Record);
+
+const highlighters = new Map>();
+
+export async function highlightHast(
+ code: string,
+ options: HighlightOptions,
+): Promise {
+ const {
+ lang: initialLang,
+ fallbackLanguage,
+ engine = 'oniguruma',
+ ...rest
+ } = options;
+ let lang = initialLang;
+ let themes: CodeOptionsThemes;
+ let themesToLoad;
+
+ if ('theme' in options && options.theme) {
+ themes = { theme: options.theme };
+ themesToLoad = [themes.theme];
+ } else {
+ themes = {
+ themes:
+ 'themes' in options && options.themes ? options.themes : defaultThemes,
+ };
+ themesToLoad = Object.values(themes.themes).filter((v) => v !== undefined);
+ }
+
+ const highlighter = await getHighlighter(engine, {
+ langs: [],
+ themes: themesToLoad,
+ });
+
+ try {
+ await highlighter.loadLanguage(lang as BundledLanguage);
+ } catch {
+ lang = fallbackLanguage ?? 'text';
+ await highlighter.loadLanguage(lang as BundledLanguage);
+ }
+
+ return highlighter.codeToHast(code, {
+ lang,
+ ...rest,
+ ...themes,
+ defaultColor: 'themes' in themes ? false : undefined,
+ });
+}
+
+export function hastToJsx(hast: Root, options?: Partial) {
+ return toJsxRuntime(hast, {
+ jsx,
+ jsxs,
+ development: false,
+ Fragment,
+ ...options,
+ });
+}
+
+/**
+ * Get Shiki highlighter instance (singleton pattern).
+ *
+ * @param engineType - Shiki Regex engine to use.
+ * @param options - Shiki options.
+ */
+export async function getHighlighter(
+ engineType: 'js' | 'oniguruma',
+ options: Omit<
+ BundledHighlighterOptions,
+ 'engine'
+ >,
+) {
+ const { createHighlighter } = await import('shiki');
+ let highlighter = highlighters.get(engineType);
+
+ if (!highlighter) {
+ let engine;
+
+ if (engineType === 'js') {
+ engine = import('shiki/engine/javascript').then((res) =>
+ res.createJavaScriptRegexEngine(),
+ );
+ } else {
+ engine = import('shiki/engine/oniguruma').then((res) =>
+ res.createOnigurumaEngine(import('shiki/wasm')),
+ );
+ }
+
+ highlighter = createHighlighter({
+ ...options,
+ engine,
+ });
+
+ highlighters.set(engineType, highlighter);
+ return highlighter;
+ }
+
+ return highlighter.then(async (instance) => {
+ await Promise.all([
+ // @ts-expect-error unknown
+ instance.loadLanguage(...options.langs),
+ // @ts-expect-error unknown
+ instance.loadTheme(...options.themes),
+ ]);
+
+ return instance;
+ });
+}
+
+export async function highlight(
+ code: string,
+ options: HighlightOptions,
+): Promise {
+ return hastToJsx(await highlightHast(code, options), {
+ components: options.components,
+ });
+}
diff --git a/web/lib/state/proxy-with-local-storage.ts b/web/lib/state/proxy-with-local-storage.ts
new file mode 100644
index 00000000..4f0c81d6
--- /dev/null
+++ b/web/lib/state/proxy-with-local-storage.ts
@@ -0,0 +1,61 @@
+import { proxy, subscribe, snapshot } from 'valtio'
+
+import { readJsonStorage, writeJsonStorage } from '@/lib/storage'
+
+const isBrowser = typeof window !== 'undefined'
+
+const cloneState = (value: T): T =>
+ JSON.parse(JSON.stringify(value)) as T
+
+const isPlainObject = (value: unknown): value is Record =>
+ typeof value === 'object' && value !== null && !Array.isArray(value)
+
+export interface ProxyWithLocalStorageOptions {
+ key: string
+ defaultState: State
+ persistKeys?: (keyof State)[]
+}
+
+/**
+ * Create a Valtio proxy that hydrates from and persists to localStorage.
+ * Only runs browser-side; on the server it falls back to the provided defaults.
+ */
+export const proxyWithLocalStorage = ({
+ key,
+ defaultState,
+ persistKeys,
+}: ProxyWithLocalStorageOptions) => {
+ const state = proxy(cloneState(defaultState))
+
+ if (isBrowser) {
+ const stored = readJsonStorage>(key)
+ if (stored) {
+ const allowedKeys = persistKeys ? new Set(persistKeys) : null
+ for (const [rawKey, rawValue] of Object.entries(stored)) {
+ const keyTyped = rawKey as keyof State
+ if (allowedKeys && !allowedKeys.has(keyTyped)) continue
+ const currentValue = state[keyTyped]
+ const nextValue = rawValue as State[keyof State]
+
+ if (isPlainObject(currentValue) && isPlainObject(nextValue)) {
+ Object.assign(currentValue, nextValue)
+ } else {
+ state[keyTyped] = nextValue
+ }
+ }
+ }
+
+ subscribe(state, () => {
+ const snap = snapshot(state) as State
+ const data = persistKeys
+ ? persistKeys.reduce((acc, keyName) => {
+ acc[keyName] = snap[keyName]
+ return acc
+ }, {} as Partial)
+ : snap
+ writeJsonStorage(key, data)
+ })
+ }
+
+ return state
+}
diff --git a/web/lib/storage.ts b/web/lib/storage.ts
index af161590..7305816c 100644
--- a/web/lib/storage.ts
+++ b/web/lib/storage.ts
@@ -1,30 +1,36 @@
-import { Credentials } from '@/api-anclax'
import type { AuthUser } from '@/atoms/auth'
const isBrowser = typeof window !== 'undefined'
-const AUTH_STORAGE_KEY = 'user'
-
-export const getStoredAuthUser = (): AuthUser | null => {
+export const readJsonStorage = (key: string): T | null => {
if (!isBrowser) return null
try {
- const raw = window.localStorage.getItem(AUTH_STORAGE_KEY)
+ const raw = window.localStorage.getItem(key)
if (!raw) return null
- return JSON.parse(raw) as AuthUser
+ return JSON.parse(raw) as T
} catch {
return null
}
}
-export const setStoredAuthUser = (user: AuthUser | null): void => {
+export const writeJsonStorage = (key: string, value: T | null): void => {
if (!isBrowser) return
try {
- if (!user) {
- window.localStorage.removeItem(AUTH_STORAGE_KEY)
- } else {
- window.localStorage.setItem(AUTH_STORAGE_KEY, JSON.stringify(user))
+ if (value === null) {
+ window.localStorage.removeItem(key)
+ return
}
+ window.localStorage.setItem(key, JSON.stringify(value))
} catch {
// ignore storage errors
}
}
+
+const AUTH_STORAGE_KEY = 'user'
+
+export const getStoredAuthUser = (): AuthUser | null =>
+ readJsonStorage(AUTH_STORAGE_KEY)
+
+export const setStoredAuthUser = (user: AuthUser | null): void => {
+ writeJsonStorage(AUTH_STORAGE_KEY, user)
+}
diff --git a/web/lib/time.ts b/web/lib/time.ts
new file mode 100644
index 00000000..528e3d90
--- /dev/null
+++ b/web/lib/time.ts
@@ -0,0 +1,14 @@
+export function formatRelativeTime(timestamp: number | null): string {
+ if (!timestamp) return ''
+
+ const now = Date.now()
+ const diff = now - timestamp
+ const seconds = Math.floor(diff / 1000)
+ const minutes = Math.floor(seconds / 60)
+ const hours = Math.floor(minutes / 60)
+
+ if (seconds < 5) return 'just now'
+ if (seconds < 60) return `${seconds}s ago`
+ if (minutes < 60) return `${minutes}m ago`
+ return `${hours}h ago`
+}
\ No newline at end of file
diff --git a/web/lib/utils.ts b/web/lib/utils.ts
index 266a2b23..21efbf18 100644
--- a/web/lib/utils.ts
+++ b/web/lib/utils.ts
@@ -5,6 +5,10 @@ export function cn(...inputs: ClassValue[]) {
return twMerge(clsx(inputs))
}
+export function isSSR() {
+ return typeof window === "undefined"
+}
+
export function concatenate(buf1: Uint8Array, buf2: Uint8Array) {
const ret = new Uint8Array(buf1.length + buf2.length);
ret.set(buf1);
diff --git a/web/modules/database/queries/databases.ts b/web/modules/database/queries/databases.ts
new file mode 100644
index 00000000..62b4ea75
--- /dev/null
+++ b/web/modules/database/queries/databases.ts
@@ -0,0 +1,45 @@
+import { useQuery } from "@tanstack/react-query";
+import type { Database } from "@/api-gen/models/Database";
+import type { Schema } from "@/api-gen/models/Schema";
+import { DefaultService } from "@/api-gen/services/DefaultService";
+
+export const databaseKeys = {
+ all: ["databases"] as const,
+ list: () => ["databases", "list"] as const,
+ detail: (id: number) => ["databases", "detail", id] as const,
+};
+
+const DISABLED_DATABASE_DETAIL_KEY = ["databases", "detail", "disabled"] as const;
+
+const createDatabaseDetailQueryOptions = (databaseId: number | null) => ({
+ queryKey: databaseId != null ? databaseKeys.detail(databaseId) : DISABLED_DATABASE_DETAIL_KEY,
+ enabled: databaseId != null,
+ queryFn: async (): Promise => {
+ if (databaseId == null) {
+ throw new Error("Database ID is required to fetch details");
+ }
+ return DefaultService.getDatabase(databaseId);
+ },
+ staleTime: 1000 * 30,
+});
+
+export const useDatabasesQuery = () =>
+ useQuery({
+ queryKey: databaseKeys.list(),
+ queryFn: async () => {
+ const data = await DefaultService.listDatabases();
+ return [...data].sort((a, b) => a.name.localeCompare(b.name));
+ },
+ staleTime: 1000 * 60,
+ });
+
+export const useDatabaseInfoQuery = (databaseId: number | null) =>
+ useQuery({
+ ...createDatabaseDetailQueryOptions(databaseId),
+ });
+
+export const useDatabaseSchemaQuery = (databaseId: number | null) =>
+ useQuery({
+ ...createDatabaseDetailQueryOptions(databaseId),
+ select: (database) => database.schemas ?? [],
+ });
diff --git a/web/modules/notebook/awareness/hooks.ts b/web/modules/notebook/awareness/hooks.ts
new file mode 100644
index 00000000..1bb774f1
--- /dev/null
+++ b/web/modules/notebook/awareness/hooks.ts
@@ -0,0 +1,90 @@
+"use client";
+
+import { useCallback, useMemo } from "react";
+import { useSnapshot } from "valtio";
+import { useNotebookRuntime } from "../providers/notebook-runtime-context";
+import {
+ awarenessState,
+ type AwarenessCursorState,
+ type AwarenessEditingState,
+ type AwarenessPresence,
+} from "./state";
+
+export function useNotebookPresence(): AwarenessPresence[] {
+ const snapshot = useSnapshot(awarenessState);
+ return useMemo(() => Array.from(snapshot.presences.values()), [snapshot.presences]);
+}
+
+export function useSelfPresence(): AwarenessPresence | null {
+ const snapshot = useSnapshot(awarenessState);
+ return useMemo(() => {
+ if (snapshot.selfId == null) {
+ return null;
+ }
+ return snapshot.presences.get(snapshot.selfId) ?? null;
+ }, [snapshot.presences, snapshot.selfId]);
+}
+
+export function useCellPresence(cellId: string): AwarenessPresence[] {
+ const presences = useNotebookPresence();
+ return useMemo(
+ () =>
+ presences.filter(
+ (presence) =>
+ presence.editing?.cellId === cellId || presence.cursor?.cellId === cellId,
+ ),
+ [cellId, presences],
+ );
+}
+
+export function useSelfEditing(): [
+ AwarenessEditingState | null,
+ (state: AwarenessEditingState | null) => void,
+] {
+ const selfPresence = useSelfPresence();
+ const { setEditingState } = useAwarenessActions();
+
+ const editingState = useMemo(() => selfPresence?.editing ?? null, [selfPresence]);
+
+ return [editingState, setEditingState];
+}
+
+export function useSelfCursor(): [
+ AwarenessCursorState | null,
+ (state: AwarenessCursorState | null) => void,
+] {
+ const selfPresence = useSelfPresence();
+ const { setCursorState } = useAwarenessActions();
+
+ const cursorState = useMemo(() => selfPresence?.cursor ?? null, [selfPresence]);
+
+ return [cursorState, setCursorState];
+}
+
+export function useAwarenessActions() {
+ const { resource } = useNotebookRuntime();
+
+ const setEditingState = useCallback(
+ (state: AwarenessEditingState | null) => {
+ resource.provider.awareness.setLocalStateField("editing", state);
+ },
+ [resource],
+ );
+
+ const setCursorState = useCallback(
+ (state: AwarenessCursorState | null) => {
+ resource.provider.awareness.setLocalStateField("cursor", state);
+ },
+ [resource],
+ );
+
+ return { setEditingState, setCursorState };
+}
+
+export function getAwarenessSelectionClass(color: string): string {
+ return `y-selection-${color.replace(/[^a-zA-Z0-9-]/g, "")}`;
+}
+
+export function getAwarenessCaretClass(color: string): string {
+ return `y-caret-${color.replace(/[^a-zA-Z0-9-]/g, "")}`;
+}
diff --git a/web/modules/notebook/awareness/index.ts b/web/modules/notebook/awareness/index.ts
new file mode 100644
index 00000000..f805b219
--- /dev/null
+++ b/web/modules/notebook/awareness/index.ts
@@ -0,0 +1,2 @@
+export * from "./state";
+export * from "./hooks";
diff --git a/web/modules/notebook/awareness/state.ts b/web/modules/notebook/awareness/state.ts
new file mode 100644
index 00000000..e4c273e2
--- /dev/null
+++ b/web/modules/notebook/awareness/state.ts
@@ -0,0 +1,66 @@
+import { proxy } from "valtio";
+import { proxyMap } from "valtio/utils";
+
+export interface AwarenessUser {
+ id: number | null;
+ name: string;
+ color: string;
+ avatarSeed: string;
+}
+
+export interface AwarenessEditingState {
+ cellId: string;
+ origin: string;
+}
+
+export interface AwarenessCursorState {
+ cellId: string;
+ selections: Array<{
+ t: "y-rel-b64";
+ anchor: string;
+ head: string;
+ }>;
+}
+
+export interface AwarenessPresence {
+ clientId: number;
+ isSelf: boolean;
+ user: AwarenessUser;
+ editing: AwarenessEditingState | null;
+ cursor: AwarenessCursorState | null;
+}
+
+const presences = proxyMap();
+
+export const awarenessState = proxy({
+ presences,
+ selfId: null as number | null,
+});
+
+export type AwarenessState = typeof awarenessState;
+
+export function applyAwarenessSnapshot(options: {
+ entries: Iterable<[number, AwarenessPresence]>;
+ selfId: number | null;
+}) {
+ const { entries, selfId } = options;
+ const seen = new Set();
+
+ for (const [clientId, presence] of entries) {
+ seen.add(clientId);
+ awarenessState.presences.set(clientId, presence);
+ }
+
+ for (const key of Array.from(awarenessState.presences.keys())) {
+ if (!seen.has(key)) {
+ awarenessState.presences.delete(key);
+ }
+ }
+
+ awarenessState.selfId = selfId;
+}
+
+export function resetAwarenessState() {
+ awarenessState.presences.clear();
+ awarenessState.selfId = null;
+}
diff --git a/web/modules/notebook/collab/yjs/developer-guide.md b/web/modules/notebook/collab/yjs/developer-guide.md
new file mode 100644
index 00000000..37a4aa43
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/developer-guide.md
@@ -0,0 +1,816 @@
+# Y.js Notebook CRDT Schema: Developer's Guide
+
+## 1. Introduction
+
+This document outlines the architecture and best practices for using the Y.js-based collaborative notebook schema. This schema is designed for real-time, multi-user editing of notebooks, providing a robust "Map-Reduce" like data model that includes features like cell management, code execution state, soft deletion, and a scoped undo/redo system.
+
+Understanding this architecture is key to building a stable and predictable user experience.
+
+## 2. Schema Architecture
+
+Our Y.js document is more than just a collection of keys; it's a structured system where different parts are intentionally separated to manage complexity, control the undo history, and optimize performance.
+
+The root of the document is a `Y.Map` accessible via the key `rw-notebook-root`. All notebook data resides within this root map.
+
+### 2.1. Top-Level Structure
+
+The root map contains several key top-level entries:
+
+| Key (`/src/yjs/schema/core/keys.ts`) | Type | Description |
+| :--- | :--- | :--- |
+| `NB_ID`, `NB_TITLE`, `NB_DATABASE_ID` | `string` | Scalar properties of the notebook. |
+| `NB_TAGS` | `Y.Array` | A list of notebook tags. |
+| `NB_METADATA` | `Y.Map` | Application-specific metadata (e.g., `appVersion`). |
+| `NB_CELL_MAP` | `Y.Map` | **The core storage for all cells.** Maps a unique `cellId` to its `YCell` object. |
+| `NB_CELL_ORDER` | `Y.Array` | **Defines the notebook's layout.** An array of `cellId`s that dictates the visible order of cells. |
+| `NB_OUTPUTS` | `Y.Map`| **Decoupled execution results.** Maps a `cellId` to its output state (running, stale, result, etc.). |
+| `NB_TOMBSTONES` | `Y.Map` | **Soft-delete flags.** Maps a `cellId` to `true` if it has been soft-deleted. |
+| `NB_TOMBSTONE_META` | `Y.Map` | **Metadata for soft-deleted cells** (e.g., deletion timestamp, reason). |
+| `SCHEMA_META_KEY` | `Y.Map` | Internal schema metadata, primarily for tracking the schema `version`. |
+
+### 2.2. Cell Management: The "Map + Order" Pattern
+
+A core design choice is the separation of cell content from cell order.
+
+- **`NB_CELL_MAP` (The "What"):** This `Y.Map` acts as a key-value store where every cell, identified by a unique `cellId`, lives. This allows for constant-time `O(1)` lookups of any cell's data, which is highly efficient. A `YCell` itself is a `Y.Map` containing its `source` (`Y.Text`), `kind`, `metadata`, etc.
+
+- **`NB_CELL_ORDER` (The "Where"):** This `Y.Array` stores only the `cellId` strings. Its sequence represents the rendered order of cells in the UI.
+
+**Why this pattern?**
+1. **Efficiency:** Moving a cell doesn't require moving its entire content blob (which could be large). You only need to move a small string ID within the `NB_CELL_ORDER` array.
+2. **Atomicity:** An operation like moving a cell is a single, clean modification to one array, making it a discrete, undoable action.
+3. **Data Integrity:** It allows for "orphan" cells (in the map but not the order), which can be useful for recovery or specific application logic. The provided `reconcileNotebook` tool can clean these up.
+
+### 2.3. Decoupled Outputs: The `NB_OUTPUTS` Map
+
+Critically, a cell's execution output **is not** stored inside its `YCell` in the `NB_CELL_MAP`. Instead, it lives in the separate, top-level `NB_OUTPUTS` map.
+
+**Why are outputs separate?**
+1. **Undo/Redo Control:** The `UndoManager` is configured to track changes to `NB_CELL_MAP` and `NB_CELL_ORDER` but **not** `NB_OUTPUTS`. This prevents a user's "undo" action (e.g., undoing a text change) from accidentally reverting a cell's execution result.
+2. **Separation of Concerns:** Cell content is the user's domain. Execution output is the system's response. This separation keeps the data model clean.
+3. **Performance:** Outputs can be large. Keeping them separate avoids bloating the `YCell` objects and allows for more granular loading strategies if needed.
+
+### 2.4. Soft Deletion & Vacuuming: The Tombstone Lifecycle
+
+Deleting a cell is a multi-stage, non-destructive process.
+
+1. **Soft Deletion (`softDeleteCell`):** When a user "deletes" a cell, it is not removed from the document. Instead:
+ * Its `cellId` is removed from `NB_CELL_ORDER` (disappears from the UI).
+ * A flag is set in `NB_TOMBSTONES` (`tombstones.set(cellId, true)`).
+ * Metadata (like `deletedAt` timestamp) is added to `NB_TOMBSTONE_META`.
+ * The actual cell data remains in `NB_CELL_MAP`. This makes the "delete" action easily **undoable** and allows for a "trash" or "restore" feature.
+
+2. **Vacuuming (`vacuumNotebook`):** This is a destructive, non-reversible maintenance operation. It scans for tombstoned cells that meet specific criteria (e.g., have been deleted for more than 30 days, have a trusted timestamp).
+ * If conditions are met, the cell is permanently deleted from `NB_CELL_MAP`, `NB_TOMBSTONES`, `NB_TOMBSTONE_META` and `NB_OUTPUTS`.
+ * This is a cleanup task that should be run periodically by a trusted peer or backend service, **not** by a typical client.
+
+## 3. Best Practices & Usage Guide
+
+To interact with the schema, you should almost always use the provided operation functions instead of directly manipulating the Y-types. These functions encapsulate essential logic, such as using the correct `origin` for undo/redo tracking.
+
+### 3.1. Initialization and Migration
+
+1. **Creating a Doc:** Start with a new `Y.Doc()`.
+2. **Bootstrapping:** Call `bootstrapDoc(doc, initialModel?)` to set up the entire schema structure (root key, maps, arrays). This function is idempotent and safe to call on an existing doc. It also enables the `auto-stale` mechanism by default.
+3. **Migration:** **Always** run `migrateNotebookSchema(doc)` after bootstrapping. This ensures the document's schema version is up-to-date with your application code. It will apply any necessary migrations and can also run `reconcileNotebook` to fix integrity issues.
+
+```typescript
+import * as Y from "yjs";
+import { bootstrapDoc, migrateNotebookSchema } from "@/yjs/schema";
+
+const doc = new Y.Doc();
+// Set up the basic structure
+bootstrapDoc(doc, { title: "My New Notebook" });
+// Ensure the schema is up-to-date and consistent
+migrateNotebookSchema(doc, { autoReconcile: true });
+```
+
+### 3.2. Cell Operations (CRUD)
+
+- **Create:** First, create a cell with `createCell(model)`, then insert it into the notebook with `insertCell(nb, cell, index)`.
+
+ ```typescript
+ import { createCell, insertCell } from "@/yjs/schema";
+ const newCell = createCell({ kind: "sql", source: "SELECT * FROM t;" });
+ insertCell(notebookRoot, newCell, 0); // Insert at the top
+ ```
+
+- **Read:** Use the conversion utilities to get plain JavaScript objects suitable for UI rendering. This is crucial for frameworks like React.
+
+ ```typescript
+ import { yNotebookToModel, listCells, yCellToModel } from "@/yjs/schema";
+
+ // Get all cells as an ordered array of models
+ const cellModels = listCells(notebookRoot).map(yCellToModel);
+
+ // Or get the entire notebook model
+ const notebookModel = yNotebookToModel(notebookRoot);
+ ```
+
+- **Update:** To modify a cell's source, get the underlying `Y.Text` object and perform operations on it. The `auto-stale` mechanism will automatically mark the cell's output as stale.
+
+ ```typescript
+ import { getCell } from "@/yjs/schema";
+ import * as Y from "yjs";
+
+ const cell = getCell(notebookRoot, cellId);
+ const source = cell?.get("source") as Y.Text | undefined;
+ source?.insert(0, "-- My comment\n");
+ ```
+
+- **Delete (User Action):** Use `softDeleteCell(nb, cellId)` for all user-initiated deletions. This is the **standard, undoable** way to delete.
+
+- **Move:** Use `moveCell(nb, cellId, toIndex)`.
+
+### 3.3. Code Execution Flow
+
+The `runId` is a key concept for preventing race conditions where a late result from a previous execution overwrites a newer one.
+
+1. **Start Execution:** Call `startExecuteCell(nb, cellId)`. This sets `running: true`, `stale: false`, and generates a unique `runId` for this execution attempt.
+
+2. **Process on Backend:** Send the code to your backend. The backend should ideally receive the `runId`.
+
+3. **Apply Result:** When the backend responds, use `applyExecuteResult` with the `expectedRunId` to safely apply the result.
+
+ ```typescript
+ import { startExecuteCell, applyExecuteResult } from "@/yjs/schema";
+ import { getOutputsMap } from "@/yjs/schema";
+
+ // 1. User clicks "Run"
+ startExecuteCell(notebookRoot, cellId);
+ const runId = getOutputsMap(notebookRoot).get(cellId)?.get("runId");
+
+ // 2. Send to backend...
+ const result = await myApi.execute(source, runId);
+
+ // 3. Backend returns; apply the result
+ applyExecuteResult(notebookRoot, cellId, result, {
+ expectedRunId: runId, // This acts as a concurrency lock
+ });
+
+ // A simpler alternative is available if you don't pass runId around:
+ // applyExecuteResultForCurrentRun(notebookRoot, cellId, result);
+ ```
+
+4. **Deadlock breaker:** `startExecuteCell` records `executedBy` (awareness `clientId`). An execution watchdog runs on the client side: if the executor disappears from awareness for a grace period, it calls `forceStopExecuteCell` (runId-guarded, marks `running=false`, `stale=true`). This keeps spinners from hanging indefinitely while remaining idempotent.
+
+### 3.4. Undo and Redo
+
+Setting up the undo manager is straightforward. It is pre-configured to only track the relevant data structures and origins.
+
+```typescript
+import { createNotebookUndoManager } from "@/yjs/schema";
+
+const undoManager = createNotebookUndoManager(notebookRoot);
+
+// Later, in your UI...
+button.onclick = () => undoManager.undo();
+button2.onclick = () => undoManager.redo();
+```
+
+## 4. API Guide: Public vs. Internal
+
+Treat the schema functions like a library. Some are for general use, others are for internal tooling.
+
+### Recommended Public API (For Application Developers)
+
+These are your primary tools for building notebook features.
+
+- **Setup:** `bootstrapDoc`, `migrateNotebookSchema`
+- **Cell Ops:** `createCell`, `insertCell`, `softDeleteCell`, `restoreCell`, `moveCell`
+- **Execution:** `startExecuteCell`, `applyExecuteResult`, `applyExecuteResultForCurrentRun`
+- **Data Access/Conversion:** `yNotebookToModel`, `yCellToModel`, `yOutputsToModel`, `listCells`, `getCell`, `getOutputsMap`, `getOutputEntry`
+- **Quality/Undo:** `createNotebookUndoManager`
+
+### Internal & Maintenance APIs (Use with Caution)
+
+These functions are powerful but have side effects. They are typically not undoable and can perform destructive actions. They are best suited for maintenance scripts, migrations, or backend processes.
+
+- **`removeCell`**: The "hard delete." It's permanent and not tracked by the undo manager. Use `softDeleteCell` for UI actions.
+- **`reconcileNotebook` / `reconcileOutputs`**: Fixes data inconsistencies. While useful, it makes direct changes under the `MAINT_ORIGIN`. It's a good idea to run this on document load.
+- **`vacuumNotebook`**: Permanently deletes soft-deleted data. **Do not run this on the client-side** unless you have a very specific reason and understand the consequences.
+- **`setTombstoneTimestamp`**: Part of the internal vacuuming lifecycle.
+- **Direct Y-Type Access (`.get`, `.set`, `.push`, `.delete`)**: Bypassing the operation functions means you are responsible for maintaining data integrity and using the correct `origin`. Avoid this for standard features.
+
+## 5. Key Concepts & Potential Pitfalls
+
+### Distinction: `softDeleteCell` vs. `removeCell`
+
+- **`softDeleteCell`**: User-facing "delete". Reversible. Undoable. Hides the cell.
+- **`removeCell`**: Admin/system "hard delete". Irreversible. Not undoable. Permanently removes data from the document.
+
+### Understanding Origins
+
+The `origin` parameter in `doc.transact(fn, origin)` is crucial for controlling the undo manager.
+
+- `USER_ACTION_ORIGIN`: Use this for all user-driven, undoable actions. Most of the public API functions use this by default.
+- `EXECUTION_ORIGIN`: Used by execution functions to prevent changes to outputs (`running`, `stale`, `result`) from being undone.
+- `MAINT_ORIGIN`, `VACUUM_ORIGIN`: Used for maintenance tasks that should **not** be undoable. `reconcile` and `vacuum` use these.
+
+Using the wrong origin can break the undo/redo logic. Stick to the provided operation functions to stay safe.
+
+### Working with Live Data (Y-Types vs. Models)
+
+- **Y-Types (`Y.Map`, `Y.Text`)** are the live, mutable, collaborative data source.
+- **Models (Plain JS Objects)** are immutable snapshots generated by the `y...ToModel` functions.
+
+Your UI should follow this one-way data flow:
+1. Subscribe to changes on the Y.js document.
+2. On change, generate a new `Model` from the `Y-Types`.
+3. Pass this immutable `Model` to your UI components for rendering.
+4. User interaction in the UI calls an **operation function** (`insertCell`, `moveCell`, etc.).
+5. The operation function modifies the `Y-Types`, which triggers a new change event, restarting the cycle.
+
+**Never** store a `Y-Type` in React state. Always convert it to a plain model.
+
+### Data Integrity: `validate` vs. `reconcile`
+
+- `validateNotebook(nb)`: A **read-only** check that returns an array of issues (e.g., duplicate IDs in `order`, orphan cells). It's safe to run anytime for diagnostics.
+- `reconcileNotebook(nb)`: A **write** operation that attempts to automatically **fix** some of the issues found by `validateNotebook`. It is not undoable.
+
+## 6. In-depth Analysis: Design Philosophy and Trade-offs
+
+Our Schema is not just a collection of random choices but a thoughtfully designed system where each decision has its reasons and trade-offs.
+
+### 6.1. Why Must `NB_OUTPUTS` Be Decoupled? — Precise Control of Undo Scope
+
+This is one of the most critical design decisions in the entire architecture. If states like `result` and `running` are stored directly inside `YCell` (part of `NB_CELL_MAP`), it would lead to disastrous user experience issues.
+
+**Imagine the consequences:**
+`createNotebookUndoManager` is configured to track all changes in `NB_CELL_MAP`.
+1. The user modifies a line of SQL code. This change is captured by the `UndoManager`.
+2. The user clicks "run," and the code executes. `startExecuteCell` and `applyExecuteResult` update the `result` field in `YCell`. These changes would also be captured by the `UndoManager`.
+3. The user realizes a mistake in the code and wants to undo the changes using `Ctrl+Z`.
+4. **The problem arises:** `UndoManager` would roll back both the code and the execution results! The execution results would disappear unexpectedly, which is completely counterintuitive.
+
+**By moving `NB_OUTPUTS` out of `NB_CELL_MAP`, we achieve separation of concerns:**
+- **User Intent (`NB_CELL_MAP`, `NB_CELL_ORDER`):** Users want to control and undo modifications to code, Markdown, and cell order.
+- **System State (`NB_OUTPUTS`):** Code execution is a system response to user intent. It has its own lifecycle and should not be polluted by the user's edit history.
+
+Additionally, we use `EXECUTION_ORIGIN` to tag all writes to `NB_OUTPUTS`. Even if `UndoManager` is mistakenly configured to track `NB_OUTPUTS`, these changes would be ignored, providing a double layer of protection.
+
+### 6.2. Why Use the "Map + Order" Pattern? — UI-oriented "MapReduce"
+
+This pattern can be seen as a "MapReduce" approach to front-end data processing.
+
+- **`NB_CELL_MAP` (Map Phase):** This is the complete data set (Data Source). It contains all cell information, whether visible, soft-deleted, etc., enabling `O(1)` time complexity for cell data access.
+
+- **`NB_CELL_ORDER` (Reduce Phase):** This is a "view" or "index" (View/Index). It "extracts" and "sorts" a subset of data from the complete data set for UI rendering.
+
+**This pattern offers significant advantages in complex operations:**
+- **Move Cell (`moveCell`):** This is a `delete` + `insert` operation on the small array `NB_CELL_ORDER`, which is low-cost and strongly atomic. If cell data and order were coupled, moving a large cell would involve extensive data copying and moving, which is inefficient and error-prone.
+- **Concurrent Editing:** Suppose user A is editing the content of cell `C1` (modifying `Y.Text` in `NB_CELL_MAP` for `C1`), while user B moves `C1` (modifying `NB_CELL_ORDER`). Since both operations target different parts of the Y.js document, Y.js can merge these changes without conflict.
+
+### 6.3. The Trade-offs of the Tombstone Mechanism
+
+Tombstone provides recoverable soft deletion, a powerful feature, but it comes with costs.
+
+- **Benefits:** Undoing deletions becomes simple. Implementing a "recycle bin" feature is possible. In collaborative scenarios, a user's delete action doesn't immediately destroy data another user might be referencing.
+- **Cost/Trade-off:** Document size will only grow. Soft-deleted cell data remains in `NB_CELL_MAP` until `vacuumNotebook` is executed. This necessitates a reliable backend maintenance task or a trusted client to periodically perform "garbage collection" to prevent document bloat.
+
+## 7. In-depth Analysis: Undo/Redo Subsystem (`NotebookUndoHistory`)
+
+We didn't just use Y.js's `UndoManager` but built a semantic layer `NotebookUndoHistory` (`/src/yjs/undo/notebookUndoHistory.ts`) on top of it to provide richer, business-need-oriented historical snapshots.
+
+**Limitations of Y.js `UndoManager`:**
+- Its undo stack items are very "primitive." A user typing "hello" might leave 5 separate, scattered `insert` operations in the stack.
+- Its `origin` filtering is "all-or-nothing."
+- It doesn't provide an easy-to-present, human-readable change summary for UI.
+
+**How Does `NotebookUndoHistory` Enhance It?**
+
+1. **Listening and Capturing:** It listens to `UndoManager`'s `stack-item-added` and `stack-item-updated` events. When one or more consecutive user actions (within a `captureTimeout`) are merged into an undo-stack-item, this class intervenes.
+
+2. **Event Description (`describeEvent`):** It traverses all Y.js low-level events (`Y.YMapEvent`, `Y.YTextEvent`, etc.) in the `stackItem` and calls helper functions like `summarizeMapChange` and `summarizeTextChange` to **translate** these low-level events into human-readable descriptions.
+ - `Y.YTextEvent` → `" +18 -5 “some insightful words” "`
+ - `Y.YArrayEvent` → `" +1 (Map), -2 "`
+ - `Y.YMapEvent` → `" ~Metadata, +Source "`
+
+3. **Transaction Aggregation (`captureTransaction`):** It aggregates all change descriptions in a single transaction into an `UndoScopeTransaction` object, attaching timestamps and `origin` descriptions.
+
+4. **Scope and Snapshot (`UndoHistorySnapshot`):** Each `stackItem` in `UndoManager` is treated as a "Scope." `NotebookUndoHistory` attaches metadata to this Scope and aggregates multiple consecutive transactions (if they belong to the same Scope). Ultimately, it generates a complete `UndoHistorySnapshot` with `undo` and `redo` arrays, each containing an `UndoScopeSummary` with detailed, UI-consumable transaction lists and change summaries.
+
+This class is a crucial bridge between low-level CRDT operations and upper-level UI (e.g., "History" panel), transforming raw, chaotic change streams into structured, meaningful historical records.
+
+## 8. In-depth Analysis: Responsive "Auto-Stale" Mechanism
+
+`enableAutoStaleOnSource` (`/src/yjs/schema/quality/auto_stale.ts`) is a sophisticated automation tool for maintaining data consistency: when code changes, the corresponding output should be marked as "stale."
+
+**How Does It Work? — A Multi-layered Listening Architecture**
+
+1. **Preventing Duplicate Bindings (`BOUND_DOCS`):** It uses a `WeakSet` to record already bound `Y.Doc`. The `WeakSet` ensures if `Y.Doc` objects are garbage collected, references in `WeakSet` are automatically removed, elegantly preventing memory leaks and duplicate bindings.
+
+2. **Listening to Cell Birth and Death (`onMapChange`):**
+ - It first registers a listener on `NB_CELL_MAP`.
+ - When a new cell is **added** (`add`) or **updated** (`update`) to `cellMap`, it calls `bindCell` on the new `YCell` instance, incorporating the new cell into monitoring.
+ - When a cell is **deleted** (`delete`), it finds and **cleans up** all listeners associated with that `cellId` to prevent memory leaks.
+
+3. **Listening to Changes in Cell Pointers (`onCellKeyChange`):**
+ - The `bindCell` function registers a listener on each `YCell` instance.
+ - This listener specifically monitors the `CELL_SOURCE` key. If `CELL_SOURCE` is replaced from one `Y.Text` instance to another, it will:
+ - Unbind the listener from the old `Y.Text` instance.
+ - Call `bindText` to register a new listener on the new `Y.Text` instance.
+
+4. **Listening to Actual Text Content Changes (`onTextChange`):**
+ - The `bindText` function registers the final listener on `Y.Text` instances.
+ - **Any** modification to `Y.Text` content (`insert`, `delete`) triggers `onTextChange`.
+ - This function immediately calls `markCellOutputStale(nb, cellId)`, setting the `stale` flag in `NB_OUTPUTS` for the corresponding cell to `true`.
+
+Through this precise chain of listeners, the system can automatically and accurately maintain the `stale` status whether the cell is newly created, the source is entirely replaced, or the source content is modified in fragments. The `disable()` return function provides a "one-click clean-up" ability for all listeners, crucial when components are unmounted or documents are closed.
+
+## 9. In-depth Analysis: A Concrete Data Integrity Repair Case
+
+`reconcileNotebook` is the last line of defense for data health. Let's look at a specific scenario.
+
+**Scenario:**
+- User A and User B are collaboratively editing the same Notebook.
+- The network temporarily disconnects.
+- User A creates and inserts a new cell `C3`. Locally, `NB_CELL_MAP` gains `C3`, and `NB_CELL_ORDER` also gets `C3`'s ID.
+- User B, while offline, swaps the order of `C1` and `C2`.
+- The network restores. Y.js begins synchronizing data.
+
+**Possible Issue:**
+Due to Y.js's merge algorithm, an intermediate state might occur: User B's device successfully receives User A's creation of `C3` in `NB_CELL_MAP`, but a conflict in merging changes to the `NB_CELL_ORDER` array prevents `C3`'s ID from being successfully inserted into User B's `NB_CELL_ORDER`.
+
+**Result:**
+- On User B's device, data becomes inconsistent: `C3` **exists** in `cellMap` but is **missing** from the `order` array.
+- **UI Behavior:** Since the UI renders based on the `order` array, User B can't see cell `C3`. `C3` becomes an "orphan" cell.
+
+**How to Resolve?**
+
+1. **Validation (`validateNotebook`):**
+ When the application loads the document, run `validateNotebook(nb)`. It will return an `issues` array containing a warning like:
+ ```json
+ {
+ "path": "cellMap.C3",
+ "level": "warning",
+ "message": "Cell id \"C3\" exists in cellMap but not referenced by order"
+ }
+ ```
+
+2. **Repair (`reconcileNotebook`):**
+ Then call `reconcileNotebook(nb, { appendOrphans: true })`. It performs the following:
+ - Iterates over all keys (`C1`, `C2`, `C3`) in `cellMap`.
+ - Iterates over all IDs (`C2`, `C1`) in `order`.
+ - Identifies `C3` as an "orphan" (in map but not in order).
+ - Appends `C3` to the end of the `order` array.
+ - The entire operation is completed in a `doc.transact`, using `MAINT_ORIGIN`, so it doesn't pollute the user's undo history.
+
+**Final Outcome:**
+`NB_CELL_ORDER` now becomes `['C2', 'C1', 'C3']`. The UI detects the change in the `order` array and re-renders, making cell `C3` visible to User B, restoring data consistency. This is the value `reconcileNotebook` provides.
+
+## 10. Advanced Topic: Concurrency & Conflict Resolution in Practice
+
+The primary reason for using a CRDT like Y.js is its ability to merge concurrent edits without requiring a central server to resolve conflicts. Our schema is designed to leverage this power gracefully.
+
+Let's walk through a classic conflict scenario: **User A edits a line of code that User B simultaneously deletes.**
+
+**Initial State:**
+- The source code in a cell is: `SELECT * FROM users;`
+- This is represented by a `Y.Text` object in the cell's `source`.
+
+**The Scenario:**
+1. User A and User B are both online and have the same document state.
+2. User B goes offline (e.g., closes their laptop).
+3. **User A (Online):** Edits the line to fix a typo, changing it to `SELECT * FROM customers;`.
+ - **Under the hood:** This is not a string replacement. Y.js generates a `delete` operation for `"users"` and an `insert` operation for `"customers"` at a specific position in the `Y.Text` structure.
+4. **User B (Offline):** Decides the entire line is unnecessary and deletes it.
+ - **Under the hood:** User B's client generates a `delete` operation for the entire string `SELECT * FROM users;`.
+5. User B comes back online. Their Y.js client reconnects and starts syncing changes with User A's client.
+
+**The Merge Resolution (What Y.js Does Automatically):**
+
+Y.js's `Y.Text` CRDT doesn't see "lines" or "words"; it sees a sequence of items with unique IDs. When an item is deleted, it's not truly erased but marked as a "tombstone" (a different concept from our schema's `NB_TOMBSTONES`). Inserts are always respected.
+
+1. **User A's change arrives at User B's client:** User B's document sees an instruction to insert `"customers"` and delete `"users"`. Since the characters of `"users"` have already been "deleted" by User B, the delete instruction is simply acknowledged as having been fulfilled. The `insert` for `"customers"` is new information and is applied.
+2. **User B's change arrives at User A's client:** User A's document sees an instruction to delete the characters corresponding to the original `SELECT * FROM users;`. The characters for `"SELECT * FROM "` and `";"` still exist and are deleted. The characters for `"users"` are already gone (replaced by `"customers"`), so that part of the operation has no effect.
+
+**Final, Merged State:**
+The `Y.Text` object will contain:
+`customers`
+
+This might seem surprising, but it's the logically consistent outcome of the CRDT merge algorithm: **inserts always win, and deletes are idempotent.** The final state reflects *both* users' intentions applied to the document: the line was (mostly) deleted, AND the entity name was corrected. There is no data loss and no "conflict resolution" modal for the user to deal with. The schema's job is to let this happen seamlessly.
+
+## 11. Advanced Topic: Performance, Memory, and Scalability
+
+A robust system must also be performant. Here are key considerations for this schema.
+
+### The Necessity of `vacuumNotebook`
+As established, soft-deleting with tombstones causes the document to grow indefinitely. The `vacuumNotebook` function is the only way to reclaim this space.
+
+**Strategy for execution:**
+- **Server-Side is Ideal:** The safest place to run the vacuum is on a server. You could implement a hook that triggers when the last collaborator leaves a document session. The server would load the document, run `vacuumNotebook`, and save the compacted version.
+- **Trusted Client:** In a less-centralized setup, a designated "admin" client or a scheduled maintenance process could be responsible for this.
+- **Frequency:** The `ttlMs` (Time-To-Live) parameter is your control knob. A value of 30 days (`30 * 24 * 3600 * 1000`) is a reasonable default, balancing the ability to restore cells against document growth.
+
+### Why `Y.Text` is Non-Negotiable for Source Code
+It might seem simpler to store `cell.source` as a plain string. This would be a critical mistake.
+- **With a String:** If User A and User B both edit the source concurrently, the last writer would win. The document would receive two `.set("source", "...")` operations. Whichever one is processed last would completely overwrite the other's work. This is a destructive race condition.
+- **With `Y.Text`:** As shown in the concurrency example, `Y.Text` allows for character-level merging. It understands the *intent* of the edits (insertions and deletions at specific points) and merges them non-destructively. For any collaborative text editing, `Y.Text` is essential.
+
+### The Overhead of Observers (`auto-stale`)
+The `enableAutoStaleOnSource` mechanism is powerful but registers many event listeners.
+- **Performance:** Y.js observers are highly optimized. The overhead for a few hundred or even a few thousand cells is negligible on modern devices. The performance cost is not a primary concern for typical notebook sizes.
+- **Memory Management:** The **critical** part is cleanup. Un-disposed observers lead to memory leaks. This is why `enableAutoStaleOnSource` returns a `disable` function.
+ - In a Single-Page Application (SPA) framework like React, you must call this `disable` function in the `useEffect` cleanup return:
+
+ ```javascript
+ useEffect(() => {
+ if (!notebookRoot) return;
+ const disable = enableAutoStaleOnSource(notebookRoot);
+ return () => {
+ disable(); // <-- This is CRUCIAL for preventing memory leaks
+ };
+ }, [notebookRoot]);
+ ```
+
+## 12. Advanced Topic: Extending the Schema
+
+Your application will evolve. A well-designed schema should be extensible without requiring a full rewrite. The key is to follow the existing patterns.
+
+**Scenario: Adding a "Comments" feature to each cell.**
+
+## 13. Awareness Integration Overview
+
+The awareness system is split into two independent layers:
+
+1. **Data Layer (Jotai Atoms):**
+ - `AwarenessProvider` syncs Y.js awareness ↔ Jotai atoms
+ - Core atoms in `atoms/notebook/core/awareness.ts` (primitive storage)
+ - Business atoms in `atoms/notebook/awareness.ts` (derived, cell-scoped subscriptions)
+ - Use `useCellPresence(cellId)` for reactive presence data per cell
+ - Use `useSelfEditing()` and `useSelfCursor()` to update local state
+
+2. **UI Layer (Monaco Integration):**
+ - **Text Sync:** `JotaiMonacoBinding` handles Y.Text ↔ Monaco Model synchronization
+ - Uses `USER_ACTION_ORIGIN` for undo/redo compatibility
+ - Located in `modules/notebook/lib/collaborative-monaco/y-jotai-monaco.ts`
+ - **Awareness UI:** `useMonacoAwareness` hook renders collaborative features
+ - Subscribes to Jotai awareness atoms
+ - Renders remote selection decorations and cursor widgets with labels
+ - Updates local cursor/selection to awareness atoms
+ - Located in `modules/notebook/hooks/useMonacoAwareness.ts`
+
+**Architecture Benefits:**
+- Complete separation of text sync and awareness concerns
+- Custom awareness UI without forking y-monaco
+- Fine-grained Jotai subscriptions (no unnecessary re-renders)
+- Easy testing and customization
+
+Components consume read-only hooks (`useCellPresence`) and write through atoms (`updateAwarenessCursorStateAtom`). Never mutate the underlying Y.js awareness directly—this keeps cleanup centralized and avoids stale state on disconnect.
+
+Let's say you want a simple, threaded comment system for each cell.
+
+### Step 1: Define the Data Structure
+A list of text comments seems appropriate. So, for a given `cellId`, we need a `Y.Array`.
+
+### Step 2: Choose the Location (The Architectural Decision)
+
+Where should this data live?
+
+- **Option 1 (Incorrect):** Directly modifying `YCell` to add `cell.set("comments", new Y.Array())`. Why is this likely wrong? Because `YCell` lives in `NB_CELL_MAP`, which is tracked by the main `UndoManager`. This means adding a comment would become an undoable action *in the same history stack as code edits*. A user trying to undo a code change might accidentally undo adding a comment, which is a confusing experience.
+
+- **Option 2 (Correct, following the pattern):** Decouple the data. Create a new, top-level `Y.Map`.
+
+ In `/src/yjs/schema/core/keys.ts`, add a new key:
+ ```typescript
+ export const NB_CELL_COMMENTS = "cellComments"; // Y.Map>
+ ```
+
+### Step 3: Create Accessor and Operation Functions
+
+Following the pattern of `outputs`, create helper functions.
+
+```typescript
+// In a new file, e.g., /src/yjs/schema/access/comments.ts
+
+import { NB_CELL_COMMENTS } from "../core/keys";
+import * as Y from "yjs";
+
+export const getCellComments = (nb: Y.Map, cellId: string): Y.Array | undefined => {
+ const commentsMap = nb.get(NB_CELL_COMMENTS) as Y.Map> | undefined;
+ return commentsMap?.get(cellId);
+};
+
+export const ensureCellComments = (nb: Y.Map, cellId: string): Y.Array => {
+ let commentsMap = nb.get(NB_CELL_COMMENTS) as Y.Map> | undefined;
+ if (!commentsMap) {
+ commentsMap = new Y.Map>();
+ nb.set(NB_CELL_COMMENTS, commentsMap);
+ }
+ let comments = commentsMap.get(cellId);
+ if (!comments) {
+ comments = new Y.Array();
+ commentsMap.set(cellId, comments);
+ }
+ return comments;
+};
+
+// In a new file, e.g., /src/yjs/schema/ops/comments.ts
+export const addComment = (nb: Y.Map, cellId: string, text: string) => {
+ // Should this be undoable? Let's create a new origin for it.
+ const COMMENT_ORIGIN = Symbol("COMMENT_ACTION");
+
+ const apply = () => {
+ const comments = ensureCellComments(nb, cellId);
+ comments.push([new Y.Text(text)]);
+ };
+
+ withTransactOptional(nb, apply, COMMENT_ORIGIN);
+};
+```
+
+### Step 4: Integrate with Undo (Optional)
+If you want comments to have their *own* undo history, you could even create a *separate* `UndoManager` that *only* tracks `COMMENT_ORIGIN` and the `NB_CELL_COMMENTS` map. This gives you granular control over the user experience.
+
+By following the established patterns of **decoupling data by concern**, creating **accessor/operation functions**, and using the **origin system**, you can extend the schema to support new features while maintaining its robustness and clarity.
+
+
+# Y.js Notebook Backend: Developer Guide
+
+> This section focuses on how the backend (Go / Fiber / Postgres) collaborates with the frontend’s Y.Doc, including snapshot persistence, replaying pending updates, and the rationale behind custom message types 100 / 101.
+
+## 1. Backend Responsibilities and Boundaries
+
+In collaborative notebook scenarios, the backend’s responsibilities are intentionally limited to:
+
+- Acting as a **WebSocket relay**: Broadcasting standard `y-websocket` Sync/Awareness/Auth messages to rooms, without understanding internal CRDT semantics.
+- Serving as a **snapshot store**: Periodically persisting the complete Y.Doc state (`encodeStateAsUpdate`), and providing a stable starting point for new clients.
+- Buffering and replaying **pending updates**: Storing incremental updates not yet covered by a snapshot, so new clients can quickly catch up via “snapshot + delta.”
+- Managing **snapshot upload scheduling**, but never proactively modifying document content (all CRDT logic remains in the Y.js client).
+
+In other words, the backend is not the “brain” that understands CRDTs, but rather:
+
+- For WebSocket: simply broadcasts by topic.
+- For HTTP: only persists snapshots and a small amount of protocol-aligned metadata (state vector).
+- For memory: manages each doc’s pending updates, session list, and snapshot scheduling state.
+
+## 2. WebSocket Message Layer: Standard + Custom Signals
+
+### 2.1 Standard Yjs / y-websocket Messages
+
+The backend follows the `y-websocket` protocol, where the outermost **messageType (varUint)** uses:
+
+- `0` → `messageSync`: SyncStep1 / SyncStep2 / Update (three subtypes).
+- `1` → `messageAwareness`: Awareness updates.
+- `2` → `messageAuth`: Auth protocol.
+- `3` → `messageQueryAwareness`: Request for awareness from peers.
+
+Handling is simple:
+
+- Sync: Broadcast to other WebSocket sessions in the same doc room.
+- Awareness / QueryAwareness / Auth: Also broadcast by topic, without modifying payload.
+
+### 2.2 Custom Message Types: `100` & `101`
+
+Beyond the standard messages, we introduce two **strictly notebook-collaboration-specific** custom messages:
+
+- `100` → `YwsMessageSnapshotRequest`: Server → client, requests the client to upload an HTTP snapshot.
+- `101` → `YwsMessageUpdateMeta`: Client → server, attaches `clientID` / `clock` metadata to the most recent Yjs update.
+
+These are needed to solve two core problems without breaking `y-websocket` protocol or implementing a full Yjs update decoder on the backend:
+
+1. **When should a snapshot be uploaded, and by whom?**
+ - The server needs to persist documents periodically, but can’t grab a full snapshot on every small edit (too costly).
+ - The server can’t “push HTTP”; it can only notify a client via WebSocket to initiate an HTTP PUT.
+2. **How to safely trim pending updates without losing data?**
+ - A snapshot is a “point-in-time” full doc state; the server also keeps pending updates that arrived after the snapshot.
+ - If you only use timestamps (`savedAt`) to delete “older” pending updates, you risk a classic race: **some updates may not be included in the snapshot, but have a server time earlier than `savedAt`, and get wrongly deleted**.
+ - Y.js provides a more rigorous **State Vector (clientID → clock mapping)**. If each pending update also has `(clientID, clock)`, the server can use `stateVector[clientID] >= clock` to precisely determine if an update is covered by the snapshot.
+
+Custom messages 100 / 101 are designed to address these issues.
+
+## 3. Custom Message `100`: Snapshot Request Scheduling
+
+### 3.1 Why `YwsMessageSnapshotRequest (100)`?
+
+The “naive” approach might be:
+
+- Clients “occasionally” decide to upload snapshots, or users manually click a “save” button.
+- The server passively accepts HTTP snapshots, never actively scheduling them.
+
+This has clear drawbacks:
+
+- The server can’t control snapshot frequency or timing, making resource planning difficult.
+- No guarantee of periodic persistence for “long-idle” documents (e.g., regular backups).
+- In multi-client collaboration, **it’s unclear who uploads the snapshot**, leading to:
+ - Either every client uploads (wasting bandwidth).
+ - Or only the “original creator” uploads (if they go offline, snapshots stop entirely).
+
+So we switched to a **server-driven snapshot, client-executed** model:
+
+- The server maintains a simple state machine per doc (`collabDocState`):
+ - `sessions`: current online WebSocket sessions.
+ - `lastUpdate`: time of last Yjs update.
+ - `hasUnsavedChanges`: whether there are unsaved changes.
+ - `pendingUpdates`: raw Yjs updates not yet covered by a snapshot (with meta).
+ - `awaitingSnapshot` / `lastRequest`: whether a snapshot request has been sent, and when.
+- A ticker runs `enqueueSnapshotRequests()` every `5s`:
+ - If `hasUnsavedChanges=false`, skip.
+ - If no online `sessions`, skip and wait for next client.
+ - If `now - lastUpdate < 30s`, still editing frequently, skip (debounce).
+ - Otherwise, pick a session from `sessions`, send `YwsMessageSnapshotRequest(100, docID)`, and update:
+ - `awaitingSnapshot=true`
+ - `lastRequest=now`
+ - No second 100 for the same doc within the `requestBackoff` window (default 15s).
+
+### 3.2 What Does This Achieve?
+
+- **Doc-level throttling**: No matter how many clients collaborate, a doc triggers at most one snapshot upload after “30s idle.”
+- **Centralized responsibility**: Snapshot frequency and timing are fully server-driven; clients just execute as needed.
+- **Controlled network cost**:
+ - 100 is a tiny WebSocket message (a few varUint + docID).
+ - Snapshot uploads only happen for docs with unsaved changes and after cooldown.
+
+In multi-user scenarios, this design balances:
+
+- No extra editing latency.
+- Avoids redundant snapshot uploads by every client.
+- Ops can tune backend parameters (`snapshotDebounceInterval`, `snapshotCheckInterval`, `snapshotRequestRetryWindow`) to control persistence cost.
+
+## 4. Custom Message `101`: Update Metadata & State Vector Trimming
+
+### 4.1 Problem: Timestamps Alone Can’t Safely Trim Pending Updates
+
+Originally, the server only recorded:
+
+- Each pending update’s arrival time `update.at`.
+- The most recent snapshot’s save time `savedAt`.
+
+On snapshot save:
+
+```go
+for _, update := range state.pendingUpdates {
+ if update.at.After(savedAt) {
+ keep = append(keep, update)
+ }
+}
+```
+
+This is unsafe in CRDT terms:
+
+- The snapshot’s client-side generation time `T_gen` is always before the server’s database write time `savedAt`.
+- If other clients’ updates arrive at the server between `(T_gen, savedAt)`, their `update.at < savedAt`, but **aren’t included in the snapshot**.
+- Comparing only times, these updates are wrongly considered “covered by the snapshot” and deleted from `pendingUpdates`—classic data loss race.
+
+Y.js provides a better tool: **State Vector**.
+
+- State Vector is a map: `clientID → clock`.
+- It means “this document contains N operations from each client.”
+- If:
+ - The snapshot has a state vector `SV_snapshot`.
+ - Each pending update has `(clientID, clock_end)`.
+ - Then `SV_snapshot[clientID] >= clock_end` means “this update is covered by the snapshot.”
+
+The key: **The server originally didn’t know each pending update’s `clientID` and clock range**, unless it implemented a full Yjs update decoder (StructStore / DeleteSet)—complex and costly.
+
+### 4.2 Solution: Frontend Adds Meta, Backend Stays Blind
+
+We deliberately avoid re-implementing Yjs update parsing in Go, and instead:
+
+- Keep the backend “blind” to Yjs internals: just store/forward raw update payloads.
+- Have the frontend, when sending each Yjs update, send a tiny custom message with `(clientID, clock)` as a **side-channel** to the backend.
+- The backend matches this meta to the just-enqueued pending update, storing it in the `docUpdate` struct.
+- On snapshot save, use the HTTP state vector and these meta to trim precisely.
+
+This is the role of `YwsMessageUpdateMeta (101)`.
+
+### 4.3 101: Semantics and Lifecycle
+
+- **Message format (WS payload):**
+
+ ```text
+ [ messageType = 101 : varUint,
+ clientID : varUint,
+ clock : varUint ]
+ ```
+
+- **Send timing (frontend):**
+ - Whenever the local Y.Doc produces an update (`doc.on("update")`), the frontend uses `Y.encodeStateVector(doc)` / `Y.decodeStateVector` to compute the current client’s clock.
+ - `{ clientID: doc.clientID, clock }` is queued locally.
+ - Before sending the actual Yjs update (via `messageSync` + `SyncStep2` + inner update) over WebSocket, intercept `ws.send`:
+ - If sending a Yjs update and the `pending` meta queue is non-empty, first `send(encodeMeta(meta))`, then the Yjs update.
+ - From the server’s perspective, every Yjs update is preceded by a matching meta.
+
+- **Receive and match (backend):**
+ - In `CollaborativeService.Handle`, case `YwsMessageUpdateMeta`:
+
+ ```go
+ clientID, _ := lib0.ReadVarUintFrom(r)
+ clock, _ := lib0.ReadVarUintFrom(r)
+ s.recordUpdateMeta(docID, clientID, clock)
+ ```
+
+ - `recordUpdateMeta` logic:
+ - If there’s a pending update without meta, attach this meta to the first such update.
+ - Otherwise, queue the meta for the next update.
+
+ - In `recordDocUpdate` when enqueuing a Yjs update:
+
+ ```go
+ state.pendingUpdates = append(state.pendingUpdates, docUpdate{at: now, payload: cloneBytes(payload)})
+ if len(state.pendingMeta) > 0 {
+ meta := state.pendingMeta[0]
+ state.pendingMeta = state.pendingMeta[1:]
+ last := &state.pendingUpdates[len(state.pendingUpdates)-1]
+ last.clientID = meta.clientID
+ last.clock = meta.clock
+ last.hasMeta = true
+ }
+ ```
+
+ - Result: As long as meta and update are sent in order, every pending update gets its `(clientID, clock)`.
+
+### 4.4 Snapshot Save: Trimming Logic
+
+HTTP snapshot upload (simplified):
+
+- Client uploads:
+
+ ```ts
+ const stateVector = Y.encodeStateVector(doc) as Uint8Array
+ const snapshotVector = toBase64(stateVector)
+ const update = Y.encodeStateAsUpdate(doc) as Uint8Array
+ await DefaultService.uploadCollabNotebookSnapshot(notebookId, blob, {
+ headers: { "X-Yjs-State-Vector": snapshotVector },
+ })
+ ```
+
+- Backend controller parses header:
+
+ ```go
+ var stateVector map[uint64]uint64
+ if header := c.Get("X-Yjs-State-Vector"); header != "" {
+ raw, _ := base64.StdEncoding.DecodeString(header)
+ stateVector, _ = service.DecodeYjsStateVector(raw)
+ }
+ svc.UpsertCollabDocSnapshot(ctx, notebookID, payload, stateVector)
+ ```
+
+- Final trimming in `CollaborativeService.RecordSnapshotSaved`:
+
+ ```go
+ filtered := state.pendingUpdates[:0]
+ for _, update := range state.pendingUpdates {
+ snapshotClock, ok := stateVector[update.clientID]
+ if ok && update.hasMeta && snapshotClock >= update.clock {
+ // Fully covered by snapshot, safe to discard
+ continue
+ }
+ filtered = append(filtered, update)
+ }
+ state.pendingUpdates = nil
+ if len(filtered) > 0 {
+ state.pendingUpdates = filtered
+ }
+ state.hasUnsavedChanges = len(state.pendingUpdates) > 0
+ ```
+
+So:
+
+- **If meta is correctly matched**, any update covered by the snapshot’s state vector is discarded (avoiding replay).
+- Any update arriving after the snapshot (from any client) stays in pending until the next snapshot.
+- Trimming logic **no longer relies on timestamps**, but on Y.js state vector semantics.
+
+## 5. Design Philosophy and Trade-offs
+
+### 5.1 Keep Backend “Translucent,” Don’t Re-implement Yjs Internals
+
+We deliberately avoid:
+
+- Parsing Yjs update’s StructStore / DeleteSet internals.
+- Replaying CRDT operations on the server.
+- Modifying document content server-side.
+
+Why?
+
+- Y.js internals are highly optimized and evolving; duplicating them is error-prone and hinders upgrades.
+- The console’s main goals are maintainability, observability, and extensibility—not building a server-side CRDT engine.
+- With thin custom protocols (100 / 101), we achieve:
+ - Controllable snapshot scheduling.
+ - Race-free data trimming.
+ - Decoupling from frontend CRDT implementation.
+
+### 5.2 Accept Moderate Protocol Extensions for Clear Responsibility
+
+Adding YwsMessageSnapshotRequest(100) / YwsMessageUpdateMeta(101) means:
+
+- Two non-standard types in the WebSocket message table.
+- Debug tools and protocol analyzers must recognize these types (`debug-bus.ts` supports this).
+
+But the benefits are:
+
+- Backend fully controls snapshot strategy without interfering with normal editing.
+- State Vector enables precise trimming of pending updates, no need for complex Yjs decoding.
+- Multi-user, concurrent editing and snapshot scenarios guarantee “broadcast, never lose,” with bandwidth under control and strong consistency.
+
+### 5.3 Advice for Future Maintainers
+
+If you need to extend this protocol (e.g., for stats, profiling, migration tools), follow these principles:
+
+- **Prefer HTTP/REST for batch/offline operations**; WebSocket is for real-time, lightweight signals.
+- **Be cautious adding new custom message types**; each should have clear boundaries (who sends, who receives, impact on document invariants).
+- **Don’t try to “understand” Yjs update payloads server-side** unless you’re willing to fully track Y.js internals.
+- Whenever you need to distinguish “covered by snapshot” vs. “not yet covered,” use State Vector, not timestamps or request order.
+
+By following this philosophy, the backend can evolve snapshot strategies, storage, and monitoring without compromising CRDT correctness.
diff --git a/web/modules/notebook/collab/yjs/schema/access/accessors.ts b/web/modules/notebook/collab/yjs/schema/access/accessors.ts
new file mode 100644
index 00000000..8b852783
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/schema/access/accessors.ts
@@ -0,0 +1,30 @@
+import * as Y from "yjs";
+import { NB_CELL_MAP, NB_CELL_ORDER } from "../core/keys";
+import type { YNotebook, YCell } from "../core/types";
+
+export const getCellMap = (nb: YNotebook): Y.Map => {
+ let m = nb.get(NB_CELL_MAP) as Y.Map | undefined;
+ if (!m) {
+ m = new Y.Map();
+ nb.set(NB_CELL_MAP, m);
+ }
+ return m;
+};
+
+export const getOrder = (nb: YNotebook): Y.Array => {
+ let a = nb.get(NB_CELL_ORDER) as Y.Array | undefined;
+ if (!a) {
+ a = new Y.Array();
+ nb.set(NB_CELL_ORDER, a);
+ }
+ return a;
+};
+
+export const getCell = (nb: YNotebook, id: string): YCell | undefined => getCellMap(nb).get(id);
+
+export const listCells = (nb: YNotebook): YCell[] => {
+ const order = getOrder(nb).toArray();
+ const map = getCellMap(nb);
+ return order.map((id) => map.get(id)).filter((x): x is YCell => !!x);
+};
+
diff --git a/web/modules/notebook/collab/yjs/schema/access/cells.ts b/web/modules/notebook/collab/yjs/schema/access/cells.ts
new file mode 100644
index 00000000..2cb74c8b
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/schema/access/cells.ts
@@ -0,0 +1,67 @@
+import * as Y from "yjs";
+import { ulid } from "ulid";
+import {
+ CELL_EXEC_BY,
+ CELL_FINGERPRINT,
+ CELL_ID,
+ CELL_KIND,
+ CELL_META,
+ CELL_SOURCE,
+} from "../core/keys";
+import { type CellKind, type CellModel, DEFAULT_CELL_METADATA, type YCell } from "../core/types";
+import { CELL_ID_GUARD_ORIGIN } from "../core/origins";
+import { withTransactOptional } from "../core/transaction";
+
+const CELL_ID_REGISTRY: WeakMap = new WeakMap();
+
+/** 保护 Cell id 不被后续变更
+ * (CRDT 合并后仍保持稳定主键)
+ * 但是在理论上,Cell ID 不应该被修改,因为它是用户不可见的主键,在 UI 上没有编辑入口
+ */
+export const lockCellId = (cell: YCell) => {
+ if (CELL_ID_REGISTRY.has(cell)) return;
+ const id = cell.get(CELL_ID);
+ if (typeof id !== "string" || id.length === 0)
+ throw new Error("Cell id must be a non-empty string");
+ CELL_ID_REGISTRY.set(cell, id);
+ cell.observe((event) => {
+ if (event.transaction?.origin === CELL_ID_GUARD_ORIGIN) return;
+ if (!event.keysChanged.has(CELL_ID)) return;
+ const locked = CELL_ID_REGISTRY.get(cell);
+ if (!locked) return;
+ const current = cell.get(CELL_ID);
+ if (current === locked) return;
+ const reset = () => cell.set(CELL_ID, locked);
+ withTransactOptional(cell, reset, CELL_ID_GUARD_ORIGIN);
+ });
+};
+
+export const createCell = (init: Partial & { kind: CellKind }): YCell => {
+ if (!init?.kind) throw new Error("Cell kind required");
+ const c = new Y.Map();
+ c.set(CELL_ID, init.id ?? ulid());
+ c.set(CELL_KIND, init.kind);
+
+ const text = new Y.Text();
+ text.insert(0, init?.source ?? "");
+ c.set(CELL_SOURCE, text);
+
+ const m = new Y.Map();
+ const md = init?.metadata;
+ if (
+ md &&
+ md.backgroundDDL !== undefined &&
+ md.backgroundDDL !== DEFAULT_CELL_METADATA.backgroundDDL
+ ) {
+ m.set("backgroundDDL", md.backgroundDDL);
+ }
+ c.set(CELL_META, m);
+
+ if (init.fingerprint) c.set(CELL_FINGERPRINT, init.fingerprint);
+ if (init.executedBy) c.set(CELL_EXEC_BY, init.executedBy);
+
+ // The caller is responsible for invoking lockCellId once the cell
+ // is attached to a Y.Doc (e.g. insertCell handles this). Locking while the
+ // map is detached triggers Yjs "Add type to doc" errors, so we defer it.
+ return c;
+};
diff --git a/web/modules/notebook/collab/yjs/schema/access/conversion.ts b/web/modules/notebook/collab/yjs/schema/access/conversion.ts
new file mode 100644
index 00000000..f84152d3
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/schema/access/conversion.ts
@@ -0,0 +1,143 @@
+import * as Y from "yjs";
+import {
+ CELL_EXEC_BY,
+ CELL_FINGERPRINT,
+ CELL_ID,
+ CELL_KIND,
+ CELL_META,
+ CELL_SOURCE,
+ NB_DATABASE_ID,
+ NB_ID,
+ NB_METADATA,
+ NB_TAGS,
+ NB_TITLE,
+ NB_CELL_ORDER,
+ NB_TOMBSTONES,
+ NB_OUTPUTS,
+} from "../core/keys";
+import {
+ type CellKind,
+ type CellMetadataModel,
+ type CellModel,
+ DEFAULT_CELL_METADATA,
+ type NotebookMetadataModel,
+ type NotebookModel,
+ type YCell,
+ type YNotebook,
+ type YOutputsMap,
+} from "../core/types";
+import { getCellMap } from "./accessors";
+import { CellOutput } from "../types";
+import { QueryResponse } from "@/api-gen";
+
+export const yCellToModel = (c: YCell): CellModel => {
+ const src = (c.get(CELL_SOURCE) as Y.Text | undefined)?.toString() ?? "";
+ const mdY = c.get(CELL_META) as Y.Map | undefined;
+ const metadata: CellMetadataModel = {
+ backgroundDDL: mdY?.get("backgroundDDL") ?? DEFAULT_CELL_METADATA.backgroundDDL,
+ };
+ const rawId = c.get(CELL_ID);
+ const id = typeof rawId === "string" ? rawId : String(rawId ?? "");
+ if (typeof rawId !== "string") {
+ console.warn(`Cell id is not a string, got ${String(rawId)}`);
+ }
+ const rawKind = c.get(CELL_KIND);
+ const kind = (typeof rawKind === "string" ? rawKind : "raw") as CellKind;
+ if (typeof rawKind !== "string") {
+ console.warn(`Cell kind is not a string for id ${id}`);
+ }
+
+ return {
+ id,
+ kind,
+ source: src,
+ metadata,
+ fingerprint: c.get(CELL_FINGERPRINT) ?? undefined,
+ executedBy: c.get(CELL_EXEC_BY) ?? undefined,
+ };
+};
+
+export const yNotebookToModel = (nb: YNotebook): NotebookModel => {
+ const tags = (nb.get(NB_TAGS) as Y.Array | undefined)?.toArray() ?? [];
+ const metaY = nb.get(NB_METADATA) as Y.Map | undefined;
+ const metadata: NotebookMetadataModel = {
+ appVersion: metaY?.get("appVersion") ?? undefined,
+ };
+ const rawId = nb.get(NB_ID);
+ const id = typeof rawId === "string" ? rawId : String(rawId ?? "");
+ const rawTitle = nb.get(NB_TITLE);
+ const title = typeof rawTitle === "string" ? rawTitle : "Untitled Notebook";
+ const rawDbId = nb.get(NB_DATABASE_ID);
+ const databaseId = typeof rawDbId === "string" ? rawDbId : null;
+ const order = (nb.get(NB_CELL_ORDER) as Y.Array | undefined)?.toArray() ?? [];
+ const cells = getCellMap(nb);
+ const tomb = nb.get(NB_TOMBSTONES) as Y.Map | undefined;
+ const tombstones: Record = {};
+ tomb?.forEach((v, k) => {
+ if (v) tombstones[k] = true;
+ });
+
+ return {
+ id,
+ title,
+ databaseId,
+ tags,
+ metadata,
+ order,
+ cells,
+ tombstones,
+ };
+};
+
+type OutputModel = Omit;
+
+/** 将 YNotebook 中的 outputs 区转换为可序列化 JSON 对象 */
+export const yOutputsToModel = (nb: YNotebook): Record => {
+ const outputs = nb.get(NB_OUTPUTS) as YOutputsMap;
+ if (!outputs) return {};
+
+ const result: Record = {};
+
+ outputs.forEach((entry, id) => {
+ if (!(entry instanceof Y.Map)) return; // skip invalid
+ if (typeof id !== "string" || id.length === 0) return;
+
+ const out: OutputModel = {
+ running: false,
+ stale: false,
+ };
+ const running = entry.get("running");
+ const stale = entry.get("stale");
+ const startedAt = entry.get("startedAt");
+ const completedAt = entry.get("completedAt");
+ const qres = entry.get("result") as QueryResponse | undefined;
+
+ if (typeof running === "boolean") out.running = running;
+ if (typeof stale === "boolean") out.stale = stale;
+ if (typeof startedAt === "string") out.startedAt = startedAt;
+ if (typeof completedAt === "string") out.completedAt = completedAt;
+ if (typeof entry.get("executedBy") === "number") {
+ out.executedBy = entry.get("executedBy") as number;
+ }
+
+ // result 对象结构容忍性转换
+ if (
+ qres &&
+ typeof qres === "object" &&
+ Array.isArray(qres.columns) &&
+ Array.isArray(qres.rows) &&
+ typeof qres.rowsAffected === "number"
+ ) {
+ out.result = {
+ columns: qres.columns,
+ rows: qres.rows,
+ rowsAffected: qres.rowsAffected,
+ ...(typeof qres.error === "string" ? { error: qres.error } : {}),
+ };
+ }
+
+ result[id] = out;
+ });
+
+ return result;
+};
diff --git a/web/modules/notebook/collab/yjs/schema/access/outputs.ts b/web/modules/notebook/collab/yjs/schema/access/outputs.ts
new file mode 100644
index 00000000..3d946a7b
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/schema/access/outputs.ts
@@ -0,0 +1,37 @@
+import * as Y from "yjs";
+import { NB_OUTPUTS } from "../core/keys";
+import type { YOutputsMap, YOutputEntry } from "../core/types";
+
+/** 顶层 Outputs 容器:Y.Map */
+export const getOutputsMap = (nb: Y.Map): YOutputsMap => {
+ let m = nb.get(NB_OUTPUTS) as YOutputsMap | undefined;
+ if (!m) {
+ m = new Y.Map();
+ nb.set(NB_OUTPUTS, m);
+ }
+ return m;
+};
+
+/** 获取某个 cell 的输出记录(不存在则返回 undefined,不创建) */
+export const getOutputEntry = (nb: Y.Map, cellId: string): YOutputEntry | undefined => {
+ if (!cellId || typeof cellId !== "string") {
+ throw new Error(`Invalid cellId: ${cellId}`);
+ }
+ const m = nb.get(NB_OUTPUTS) as YOutputsMap | undefined;
+ if (!m) return undefined;
+ return m.get(cellId);
+};
+
+/** 确保某个 cell 的输出记录存在(必要时创建空骨架) */
+export const ensureOutputEntry = (nb: Y.Map, cellId: string): YOutputEntry => {
+ const m = getOutputsMap(nb);
+ let e = m.get(cellId);
+ if (!(e instanceof Y.Map)) {
+ e = new Y.Map();
+ // 初始骨架:running/stale 显式化,其他字段按需补充
+ e.set("running", false);
+ e.set("stale", false);
+ m.set(cellId, e);
+ }
+ return e;
+};
diff --git a/web/modules/notebook/collab/yjs/schema/access/root.ts b/web/modules/notebook/collab/yjs/schema/access/root.ts
new file mode 100644
index 00000000..5ac02024
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/schema/access/root.ts
@@ -0,0 +1,20 @@
+import * as Y from "yjs";
+import { ROOT_NOTEBOOK_KEY, SCHEMA_META_KEY } from "../core/keys";
+import type { NotebookRoot, YNotebook } from "../core/types";
+
+export const getNotebookRoot = (doc: Y.Doc): YNotebook => doc.getMap(ROOT_NOTEBOOK_KEY);
+
+export const getSchemaMeta = (nb: YNotebook): Y.Map => {
+ let schemaMeta = nb.get(SCHEMA_META_KEY) as Y.Map | undefined;
+ if (!schemaMeta) {
+ schemaMeta = new Y.Map();
+ nb.set(SCHEMA_META_KEY, schemaMeta);
+ }
+ return schemaMeta;
+};
+
+export const getNotebookRootAndMeta = (doc: Y.Doc): NotebookRoot => {
+ const root = getNotebookRoot(doc);
+ const schemaMeta = getSchemaMeta(root);
+ return { root, schemaMeta };
+};
diff --git a/web/modules/notebook/collab/yjs/schema/access/tombstone.ts b/web/modules/notebook/collab/yjs/schema/access/tombstone.ts
new file mode 100644
index 00000000..c120500c
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/schema/access/tombstone.ts
@@ -0,0 +1,53 @@
+import * as Y from "yjs";
+import { NB_TOMBSTONES, NB_TOMBSTONE_META } from "../core/keys";
+
+export type TombstoneClock = "trusted" | "local";
+export interface TombstoneMeta {
+ deletedAt?: number;
+ reason?: string;
+ clock?: TombstoneClock;
+}
+export type TombstoneMetaEntry = Y.Map;
+export type TombstoneMetaMap = Y.Map;
+
+export const isValidTombstoneClock = (v: unknown): v is TombstoneClock => v === "trusted" || v === "local";
+
+export const tombstonesMap = (nb: Y.Map): Y.Map => {
+ let t = nb.get(NB_TOMBSTONES) as Y.Map | undefined;
+ if (!t) {
+ t = new Y.Map();
+ nb.set(NB_TOMBSTONES, t);
+ }
+ return t;
+};
+
+export const tombstoneMetaMap = (nb: Y.Map): TombstoneMetaMap => {
+ let m = nb.get(NB_TOMBSTONE_META) as TombstoneMetaMap | undefined;
+ if (!m) {
+ m = new Y.Map();
+ nb.set(NB_TOMBSTONE_META, m);
+ }
+ return m;
+};
+
+export const ensureTombstoneMetaEntry = (tm: TombstoneMetaMap, id: string): TombstoneMetaEntry => {
+ let e = tm.get(id);
+ if (!(e instanceof Y.Map)) {
+ e = new Y.Map();
+ tm.set(id, e);
+ }
+ return e;
+};
+
+export const readTombstoneMetaEntry = (entry: TombstoneMetaEntry | undefined): TombstoneMeta => {
+ if (!(entry instanceof Y.Map)) return {};
+ const snapshot: TombstoneMeta = {};
+ const deletedAt = entry.get("deletedAt");
+ if (deletedAt !== undefined) snapshot.deletedAt = deletedAt as number;
+ const reason = entry.get("reason");
+ if (reason !== undefined) snapshot.reason = reason as string;
+ const clock = entry.get("clock");
+ if (clock !== undefined) snapshot.clock = clock as TombstoneClock;
+ return snapshot;
+};
+
diff --git a/web/modules/notebook/collab/yjs/schema/bootstrap.ts b/web/modules/notebook/collab/yjs/schema/bootstrap.ts
new file mode 100644
index 00000000..ec9f198f
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/schema/bootstrap.ts
@@ -0,0 +1,90 @@
+import * as Y from "yjs";
+import { ulid } from "ulid";
+import {
+ NB_DATABASE_ID,
+ NB_ID,
+ NB_METADATA,
+ NB_TAGS,
+ NB_TITLE,
+ NB_CELL_MAP,
+ NB_CELL_ORDER,
+ NB_TOMBSTONE_META,
+ NB_TOMBSTONES,
+} from "./core/keys";
+import type { NotebookModel, YNotebook } from "./core/types";
+import { getCellMap, getOrder } from "./access/accessors";
+import { MAINT_ORIGIN } from "./core/origins";
+import { getNotebookRoot } from "./access/root";
+
+// Creation & Initialization
+export const initNotebookInDoc = (doc: Y.Doc, init?: Partial): YNotebook => {
+ const root = getNotebookRoot(doc);
+
+ doc.transact(() => {
+ // Identifier fields
+ if (!root.has(NB_ID)) {
+ root.set(NB_ID, init?.id ?? ulid());
+ }
+ if (!root.has(NB_TITLE) || (root.get(NB_TITLE) as string).length === 0) root.set(NB_TITLE, init?.title ?? "Untitled Notebook");
+ if (!root.has(NB_DATABASE_ID)) root.set(NB_DATABASE_ID, init?.databaseId ?? "");
+
+ // tags
+ if (!root.has(NB_TAGS)) root.set(NB_TAGS, new Y.Array());
+ const tags = root.get(NB_TAGS) as Y.Array;
+ if (init?.tags?.length) {
+ const exist = new Set(tags.toArray());
+ const add: string[] = [];
+ for (const tag of init.tags) {
+ if (exist.has(tag)) continue;
+ exist.add(tag);
+ add.push(tag);
+ }
+ if (add.length) tags.push(add);
+ }
+
+ // metadata
+ if (!root.has(NB_METADATA)) root.set(NB_METADATA, new Y.Map());
+ const meta = root.get(NB_METADATA) as Y.Map;
+ if (init?.metadata) {
+ for (const [k, v] of Object.entries(init.metadata)) {
+ if (v === undefined || meta.has(k)) continue;
+ meta.set(k, v as any);
+ }
+ }
+
+ // cell structures (Map + Order)
+ if (!root.has(NB_CELL_MAP)) root.set(NB_CELL_MAP, new Y.Map());
+ if (!root.has(NB_CELL_ORDER)) root.set(NB_CELL_ORDER, new Y.Array());
+
+ // tombstones
+ if (!root.has(NB_TOMBSTONES)) root.set(NB_TOMBSTONES, new Y.Map());
+ if (!root.has(NB_TOMBSTONE_META)) root.set(NB_TOMBSTONE_META, new Y.Map());
+
+ // outputs
+ // Responsibility boundary adjustment: do not force creation of NB_OUTPUTS during bootstrap.
+ // - For "new documents": it is recommended to run migration (migrateNotebookSchema) immediately after bootstrapping, migration ensures complete structure;
+ // - For "runtime access": use lazy creation via getOutputsMap/ensureOutputEntry in access/outputs to avoid unnecessary writes.
+
+ // optional seed for order
+ if (init?.order?.length) {
+ getCellMap(root); // Ensure NB_CELL_MAP exists
+ const order = getOrder(root);
+ const existing = new Set(order.toArray());
+ const append: string[] = [];
+ for (const id of init.order) {
+ if (existing.has(id)) continue;
+ existing.add(id);
+ append.push(id);
+ }
+ if (append.length) order.push(append);
+ }
+ }, MAINT_ORIGIN);
+
+ return root as any;
+};
+
+/** 最小化引导:不做版本迁移;仅建立结构并返回 root */
+export const bootstrapDoc = (doc: Y.Doc, init?: Partial) => {
+ const root = initNotebookInDoc(doc, init);
+ return root;
+};
diff --git a/web/modules/notebook/collab/yjs/schema/core/keys.ts b/web/modules/notebook/collab/yjs/schema/core/keys.ts
new file mode 100644
index 00000000..0a146623
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/schema/core/keys.ts
@@ -0,0 +1,46 @@
+// Root keys
+export const ROOT_NOTEBOOK_KEY = "rw-notebook-root"; // Y.Map
+export const SCHEMA_META_KEY = "schema-meta"; // Y.Map<{version:number, app?:string}>
+
+// Notebook scalar/meta keys
+export const NB_ID = "id";
+export const NB_TITLE = "title";
+export const NB_DATABASE_ID = "databaseId";
+export const NB_TAGS = "tags"; // Y.Array
+export const NB_METADATA = "metadata"; // Y.Map
+
+// 与正文(NB_CELL_MAP)解耦,UndoManager 默认就不会追踪;
+// removeCell() 时容易一并清理(或由 vacuum 做延迟清理);
+// 打开 Notebook 时“一次性加载 outputs”只需要读一个 Map。
+export const NB_OUTPUTS = "outputs"; // Y.Map>
+// outputs.get(cellId) -> Y.Map (值类型固定,但以 Y.Map 存,方便部分字段小改)
+// {
+// running: boolean, // 是否正在执行(协同广播)
+// stale: boolean, // 与 source 不匹配时置 true,执行成功后置 false
+// startedAt?: number, // 本次执行开始时间(本地或受信时钟)
+// completedAt?: number, // 结果完成写入时间
+// runId?: string, // 本次运行的标识(ULID),用于并发守门
+// executedBy?: number, // 启动本次执行的 awareness clientId
+// // 固定结构的查询结果,整块覆盖:
+// result?: QueryResponse, // { columns, rows, rowsAffected, error? }
+// }
+// Q: 如果把 output 放进 YCell 里呢?
+// A: UndoManager 的 scope 正在追踪 NB_CELL_MAP,虽然我们可以靠 EXECUTION_ORIGIN 规避
+// 但后续很容易被误加跟踪导致“撤销把输出也回滚”。
+// 同时,cell 变得臃肿,序列化/迁移/重构时更容易相互牵连。
+
+// Notebook cell storage (Map + Order)
+export const NB_CELL_MAP = "cellMap"; // Y.Map
+export const NB_CELL_ORDER = "order"; // Y.Array
+
+// Tombstone(软删除标记 + 元信息)
+export const NB_TOMBSTONES = "tombstones"; // Y.Map (cellId -> true)
+export const NB_TOMBSTONE_META = "tombstoneMeta"; // Y.Map>
+
+// Cell keys
+export const CELL_ID = "id";
+export const CELL_KIND = "kind"; // 'sql' | 'markdown'
+export const CELL_SOURCE = "source"; // Y.Text
+export const CELL_META = "metadata"; // Y.Map (shallow only)
+export const CELL_FINGERPRINT = "fingerprint"; // string (hash) current code version hash (unused)
+export const CELL_EXEC_BY = "executedBy"; // userId (last runner) (unused)
diff --git a/web/modules/notebook/collab/yjs/schema/core/origins.ts b/web/modules/notebook/collab/yjs/schema/core/origins.ts
new file mode 100644
index 00000000..219dbedb
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/schema/core/origins.ts
@@ -0,0 +1,5 @@
+export const USER_ACTION_ORIGIN = Symbol("USER_ACTION"); // User-triggered operation (reversible)
+export const VACUUM_ORIGIN = Symbol("VACUUM"); // Cleanup operation (irreversible)
+export const MAINT_ORIGIN = Symbol("MAINTENANCE"); // Maintenance/repair (irreversible)
+export const CELL_ID_GUARD_ORIGIN = Symbol("CELL_ID_GUARD"); // Internal protection (irreversible)
+export const EXECUTION_ORIGIN = Symbol("EXECUTION"); // Prevent rollback of Output or "running" state.
\ No newline at end of file
diff --git a/web/modules/notebook/collab/yjs/schema/core/time.ts b/web/modules/notebook/collab/yjs/schema/core/time.ts
new file mode 100644
index 00000000..08017346
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/schema/core/time.ts
@@ -0,0 +1,15 @@
+export interface ClockSource {
+ now(): number;
+ trusted: boolean;
+}
+
+export const systemClock: ClockSource = {
+ now: () => Date.now(),
+ trusted: false,
+};
+
+/** Conservative lower bound: avoid confusion between monotonic clock and wall clock after restart */
+export const WALL_CLOCK_EPOCH_FLOOR_MS = Date.UTC(2001, 0, 1);
+
+export const DEFAULT_FUTURE_SKEW_MS = 5 * 60 * 1000;
+
diff --git a/web/modules/notebook/collab/yjs/schema/core/transaction.ts b/web/modules/notebook/collab/yjs/schema/core/transaction.ts
new file mode 100644
index 00000000..e0adcd14
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/schema/core/transaction.ts
@@ -0,0 +1,14 @@
+import * as Y from "yjs";
+
+export const withTransactOptional = (
+ node: Y.AbstractType,
+ fn: () => void,
+ origin?: any
+) => {
+ const doc = node.doc as Y.Doc | undefined;
+ if (doc) {
+ doc.transact(fn, origin);
+ } else {
+ fn();
+ }
+};
\ No newline at end of file
diff --git a/web/modules/notebook/collab/yjs/schema/core/types.ts b/web/modules/notebook/collab/yjs/schema/core/types.ts
new file mode 100644
index 00000000..7d7bcded
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/schema/core/types.ts
@@ -0,0 +1,57 @@
+import * as Y from "yjs";
+import { CellOutput } from "../types";
+
+export type CellKind = "sql" | "markdown";
+
+export interface CellMetadataModel {
+ backgroundDDL?: boolean;
+}
+export const DEFAULT_CELL_METADATA: Readonly = Object.freeze({
+ backgroundDDL: false,
+});
+
+export interface CellModel {
+ id: string;
+ kind: CellKind;
+ source: string;
+ metadata: CellMetadataModel;
+ fingerprint?: string;
+ executedBy?: string;
+}
+
+export interface NotebookMetadataModel {
+ appVersion?: string;
+}
+
+export interface NotebookModel {
+ id: string;
+ title: string;
+ databaseId: string | null;
+ tags: string[];
+ metadata: NotebookMetadataModel;
+ order: string[]; // Ordered cellId list
+ cells: Y.Map;
+ tombstones: Record;
+}
+
+// Y Handles(Outputs)
+type CellOutputValue = CellOutput[keyof CellOutput];
+export type YOutputEntry = Y.Map;
+export type YOutputsMap = Y.Map;
+
+type YNotebookValue =
+| string // id, title, databaseId
+| unknown
+| Y.Array // tags, order
+| Y.Map // cellMap
+| Y.Map // tombstones
+| Y.Map; // metadata, schemaMeta
+
+// Y Handles (keep permissive any typing to align with Yjs flexibility)
+export type YNotebook = Y.Map;
+export type YCell = Y.Map;
+
+export interface NotebookRoot {
+ root: YNotebook;
+ schemaMeta: Y.Map;
+}
diff --git a/web/modules/notebook/collab/yjs/schema/core/version.ts b/web/modules/notebook/collab/yjs/schema/core/version.ts
new file mode 100644
index 00000000..d5d1c98a
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/schema/core/version.ts
@@ -0,0 +1,3 @@
+export const FIRST_SCHEMA_VERSION = 1_000_000 as const;
+export const SCHEMA_VERSION = 1_000_001 as const; // v1.000.000
+
diff --git a/web/modules/notebook/collab/yjs/schema/index.ts b/web/modules/notebook/collab/yjs/schema/index.ts
new file mode 100644
index 00000000..88e5dbbc
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/schema/index.ts
@@ -0,0 +1,36 @@
+// Core
+export * from "./core/origins";
+export * from "./core/keys";
+export * from "./core/version";
+export * from "./core/time";
+export * from "./core/types";
+
+// Access
+export * from "./access/root";
+export * from "./access/accessors";
+export * from "./access/cells";
+export * from "./access/tombstone";
+export * from "./access/conversion";
+export * from "./access/outputs";
+
+// Operations
+export * from "./ops/mutations";
+export * from "./ops/soft_delete";
+export * from "./ops/tombstone_maint";
+export * from "./ops/execute";
+
+// Quality
+export * from "./quality/undo";
+export * from "./quality/validation";
+export * from "./quality/reconcile";
+export * from "./quality/auto_stale";
+
+// Bootstrap
+export * from "./bootstrap";
+
+// Migration framework
+export * from "./migrate/registry";
+export * from "./migrate/migrate";
+// import all built-in migrations to register them
+import "./migrate/v1_000_000_v1_000_001";
+
diff --git a/web/modules/notebook/collab/yjs/schema/migrate/migrate.ts b/web/modules/notebook/collab/yjs/schema/migrate/migrate.ts
new file mode 100644
index 00000000..cd5257e5
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/schema/migrate/migrate.ts
@@ -0,0 +1,157 @@
+import * as Y from "yjs";
+import { MAINT_ORIGIN } from "../core/origins";
+import { FIRST_SCHEMA_VERSION, SCHEMA_VERSION } from "../core/version";
+import { getNotebookRoot, getSchemaMeta } from "../access/root";
+import { MIGRATION_REGISTRY } from "./registry";
+import { reconcileNotebook, type ReconcileOptions } from "../quality/reconcile";
+import { validateNotebook } from "../quality/validation";
+import { reconcileOutputs, type ReconcileOutputsOptions } from "../quality/reconcile_outputs";
+import { getCellMap } from "../access/accessors";
+import { CELL_ID } from "../core/keys";
+import { lockCellId } from "../access/cells";
+
+export const migrateNotebookSchema = (
+ doc: Y.Doc,
+ opts?: {
+ log?: (msg: string) => void;
+ /** Feature flag: 迁移完成后(或版本已最新时)自动执行一次 reconcile */
+ autoReconcile?: boolean;
+ /** 传入给 reconcileNotebook 的细化选项 */
+ reconcile?: {
+ notebook: ReconcileOptions,
+ outputs: ReconcileOutputsOptions
+ };
+ }
+): number => {
+ const log = opts?.log ?? (() => {});
+
+ const root = getNotebookRoot(doc);
+
+ doc.transact(() => {
+ const cellMap = getCellMap(root);
+ cellMap.forEach((cell, id) => {
+ if (cell instanceof Y.Map) {
+ const internalId = cell.get(CELL_ID);
+ if (typeof internalId !== "string" || internalId.length === 0) {
+ log(`[migrate-fixup] Repairing missing internal ID for cell with key "${id}".`);
+ cell.set(CELL_ID, id);
+ }
+ }
+ });
+ }, MAINT_ORIGIN);
+
+ const meta = getSchemaMeta(root);
+ const currentVersion =
+ typeof meta.get("version") === "number" ? (meta.get("version") as number) : FIRST_SCHEMA_VERSION;
+
+ if (currentVersion === SCHEMA_VERSION) {
+ log(`[migrate] Schema already up-to-date (v${SCHEMA_VERSION}).`);
+ if (opts?.autoReconcile) {
+ const report = reconcileNotebook(root, opts.reconcile?.notebook);
+ const outputsReport = reconcileOutputs(root, opts.reconcile?.outputs);
+ if (report.changed) {
+ console.info(
+ `[reconcileOutputs] cleaned ${outputsReport.patchStats.deletedCount} invalid entries (${outputsReport.removedOrphans.length} orphans, ${outputsReport.removedInvalid.length} invalid)`
+ );
+ }
+ if (report.changed) {
+ log(
+ `[migrate] Auto-reconcile applied: order ${report.previousOrderLength} → ${report.finalOrderLength}, appended ${report.appendedOrphans.length}, removed dup=${report.removedDuplicates.length}, missing=${report.removedMissingFromMap.length}, tomb=${report.removedTombstoned.length}, invalid=${report.removedInvalid.length}`
+ );
+ } else {
+ log(`[migrate] Auto-reconcile found no changes.`);
+ }
+ const issues = validateNotebook(root);
+ if (issues.length > 0) {
+ log(`[migrate] Validation after reconcile: ${issues.length} issues.`);
+ issues.forEach((i) => log(` [${i.level}] ${i.path}: ${i.message}`));
+ }
+ }
+ return currentVersion;
+ }
+
+ if (currentVersion > SCHEMA_VERSION) {
+ log(
+ `[migrate] Warning: document schema (v${currentVersion}) is newer than current runtime (v${SCHEMA_VERSION}).`
+ );
+ return currentVersion;
+ }
+
+ let workingVersion = currentVersion;
+ while (workingVersion < SCHEMA_VERSION) {
+ const migrator = MIGRATION_REGISTRY.get(workingVersion);
+ if (!migrator) {
+ log(`[migrate] No migration path from v${workingVersion} → v${SCHEMA_VERSION}.`);
+ break;
+ }
+
+ const targetVersion = workingVersion + 1;
+ log(`[migrate] Applying migration v${workingVersion} → v${targetVersion} ...`);
+
+ doc.transact(() => {
+ // 再次核对版本,尽量在并发场景下避免重复执行迁移体
+ const liveVersion = typeof meta.get("version") === "number" ? (meta.get("version") as number) : FIRST_SCHEMA_VERSION;
+ if (liveVersion !== workingVersion) {
+ log(`[migrate] Skip step v${workingVersion} → v${targetVersion} due to concurrent advance to v${liveVersion}.`);
+ return;
+ }
+
+ migrator({
+ doc,
+ root,
+ fromVersion: workingVersion,
+ toVersion: targetVersion,
+ origin: MAINT_ORIGIN,
+ log,
+ });
+ meta.set("version", targetVersion);
+ }, MAINT_ORIGIN);
+
+ workingVersion = targetVersion;
+ }
+
+ if (workingVersion === SCHEMA_VERSION) {
+ log(`[migrate] Migration complete (v${SCHEMA_VERSION}).`);
+ } else {
+ log(`[migrate] Incomplete migration (stopped at v${workingVersion}).`);
+ }
+
+ if (opts?.autoReconcile) {
+ const report = reconcileNotebook(root, opts.reconcile?.notebook);
+ const outputsReport = reconcileOutputs(root, opts.reconcile?.outputs);
+ if (outputsReport.changed) {
+ console.info(
+ `[reconcileOutputs] cleaned ${outputsReport.patchStats.deletedCount} invalid entries (${outputsReport.removedOrphans.length} orphans, ${outputsReport.removedInvalid.length} invalid)`
+ );
+ }
+ if (report.changed) {
+ log(
+ `[migrate] Auto-reconcile applied: order ${report.previousOrderLength} → ${report.finalOrderLength}, appended ${report.appendedOrphans.length}, removed dup=${report.removedDuplicates.length}, missing=${report.removedMissingFromMap.length}, tomb=${report.removedTombstoned.length}, invalid=${report.removedInvalid.length}`
+ );
+ } else {
+ log(`[migrate] Auto-reconcile found no changes.`);
+ }
+ }
+
+ const issues = validateNotebook(root);
+ if (issues.length > 0) {
+ log(`[migrate] Validation after migration${opts?.autoReconcile ? "+reconcile" : ""}: ${issues.length} issues.`);
+ issues.forEach((i) => log(` [${i.level}] ${i.path}: ${i.message}`));
+ }
+
+ log(`[migrate] Activating notebook...`);
+ const cellMap = getCellMap(root);
+ cellMap.forEach((cell, id) => {
+ if (cell instanceof Y.Map) {
+ try {
+ lockCellId(cell);
+ } catch (e) {
+ log(`[migrate] Error locking ID for cell "${id}": ${(e as Error).message}`);
+ }
+ }
+ });
+ log(`[migrate] Notebook activated (ID locks only).`);
+
+ return workingVersion;
+};
+
diff --git a/web/modules/notebook/collab/yjs/schema/migrate/registry.ts b/web/modules/notebook/collab/yjs/schema/migrate/registry.ts
new file mode 100644
index 00000000..3b77043e
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/schema/migrate/registry.ts
@@ -0,0 +1,26 @@
+import * as Y from "yjs";
+import { YNotebook } from "../core/types";
+
+export interface NotebookMigrationContext {
+ doc: Y.Doc;
+ root: YNotebook;
+ fromVersion: number;
+ toVersion: number;
+ origin: symbol;
+ log: (msg: string) => void;
+}
+
+/** 单个迁移器的签名 */
+export type NotebookMigration = (ctx: NotebookMigrationContext) => void;
+
+/** 全局迁移注册表 */
+export const MIGRATION_REGISTRY = new Map();
+
+/** 注册迁移器(vX -> vY) */
+export const registerNotebookMigration = (fromVersion: number, fn: NotebookMigration) => {
+ if (MIGRATION_REGISTRY.has(fromVersion)) {
+ throw new Error(`Migration from version ${fromVersion} already registered`);
+ }
+ MIGRATION_REGISTRY.set(fromVersion, fn);
+};
+
diff --git a/web/modules/notebook/collab/yjs/schema/migrate/v1_000_000_v1_000_001.ts b/web/modules/notebook/collab/yjs/schema/migrate/v1_000_000_v1_000_001.ts
new file mode 100644
index 00000000..273575cc
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/schema/migrate/v1_000_000_v1_000_001.ts
@@ -0,0 +1,24 @@
+import * as Y from "yjs";
+import { registerNotebookMigration } from "./registry";
+import { NB_OUTPUTS } from "../core/keys";
+import { MAINT_ORIGIN } from "../core/origins";
+
+/**
+ * v1_000_000 → v1_000_001
+ * - Ensure NB_OUTPUTS (Y.Map) exists at root.
+ */
+registerNotebookMigration(1_000_000, (ctx) => {
+ const { doc, root, log } = ctx;
+ const apply = () => {
+ const r = root as Y.Map;
+ if (!r.has(NB_OUTPUTS)) {
+ r.set(NB_OUTPUTS, new Y.Map());
+ log?.(`[migration v1_000_001] Created root outputs map (${NB_OUTPUTS}).`);
+ } else {
+ log?.(`[migration v1_000_001] Outputs map already present.`);
+ }
+ };
+
+ // 事务封装,避免进入撤销栈
+ doc.transact(apply, MAINT_ORIGIN);
+});
diff --git a/web/modules/notebook/collab/yjs/schema/ops/execute.ts b/web/modules/notebook/collab/yjs/schema/ops/execute.ts
new file mode 100644
index 00000000..05faef92
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/schema/ops/execute.ts
@@ -0,0 +1,150 @@
+import { ulid } from "ulid";
+import { EXECUTION_ORIGIN } from "../core/origins";
+import type { YNotebook } from "../core/types";
+import { getOutputsMap, ensureOutputEntry } from "../access/outputs";
+import { withTransactOptional } from "../core/transaction";
+import { QueryResponse } from "@/api-gen";
+
+/** Start execution: reset and mark running=true */
+export const startExecuteCell = (
+ nb: YNotebook,
+ cellId: string,
+ opts?: { now?: number; executedBy?: number }
+) => {
+ const apply = () => {
+ const entry = ensureOutputEntry(nb, cellId);
+ const runId = ulid();
+ const now = opts?.now ?? Date.now();
+
+ // Overwrite basic execution state
+ entry.set("running", true);
+ entry.set("stale", false);
+ entry.set("startedAt", now);
+ entry.set("runId", runId);
+ if (typeof opts?.executedBy === "number") {
+ entry.set("executedBy", opts.executedBy);
+ } else {
+ entry.delete("executedBy");
+ }
+ entry.delete("completedAt");
+ };
+ withTransactOptional(nb, apply, EXECUTION_ORIGIN);
+};
+
+/** Apply execution result (completely overwrite old result) */
+export const applyExecuteResult = (
+ nb: YNotebook,
+ cellId: string,
+ result: QueryResponse,
+ opts?: {
+ completedAt?: number;
+ /** 期待匹配的 runId;用于并发守门 */
+ expectedRunId?: string;
+ /** 忽略 runId 校验,强制写入(谨慎使用) */
+ ignoreRunId?: boolean;
+ /** 应用完成后是否清除 runId(默认 true,避免重复写入) */
+ clearRunId?: boolean;
+ }
+) => {
+ const apply = () => {
+ const outputs = getOutputsMap(nb);
+ const entry = outputs.get(cellId);
+ // If there is no active/history entry, do not create a new entry, just ignore (avoid orphan results)
+ if (!entry) return;
+
+ // Concurrency control: if runId exists, require match with expectedRunId to write by default
+ if (!opts?.ignoreRunId) {
+ const currentRunId = entry.get("runId") as string | undefined;
+ const expected = opts?.expectedRunId;
+
+ // Case A: entry has runId, but no expected provided by caller — do not write (cannot confirm ownership)
+ if (currentRunId && !expected) return;
+ // Case B: entry has no runId, but expected is provided — do not write (run has ended/cleaned up)
+ if (!currentRunId && expected) return;
+ // Case C: both exist but not equal — do not write (old result)
+ if (currentRunId && expected && currentRunId !== expected) return;
+ }
+
+ const now = opts?.completedAt ?? Date.now();
+
+ entry.set("running", false);
+ entry.set("stale", false);
+ entry.set("completedAt", now);
+ entry.set("result", result);
+
+ // By default, clear runId to avoid repeated results being written later
+ const shouldClear = opts?.clearRunId ?? true;
+ if (shouldClear) entry.delete("runId");
+ };
+ withTransactOptional(nb, apply, EXECUTION_ORIGIN);
+};
+
+/**
+ * Internal convenience method: use the current entry's runId as expectedRunId for result submission.
+ * Does not expose runId to caller; if there is no runId (not started or already cleaned up), ignore submission.
+ */
+export const applyExecuteResultForCurrentRun = (
+ nb: YNotebook,
+ cellId: string,
+ result: QueryResponse,
+ opts?: { completedAt?: number; ignoreRunId?: boolean; clearRunId?: boolean }
+) => {
+ const outputs = getOutputsMap(nb);
+ const entry = outputs.get(cellId);
+ const expectedRunId = entry?.get("runId") as string | undefined;
+ if (!expectedRunId && !opts?.ignoreRunId) return;
+ applyExecuteResult(nb, cellId, result, { ...opts, expectedRunId });
+};
+
+/** Mark stale=true when source is modified */
+export const markCellOutputStale = (
+ nb: YNotebook,
+ cellId: string,
+ opts?: { origin?: symbol }
+) => {
+ const apply = () => {
+ const outputs = getOutputsMap(nb);
+ const entry = outputs.get(cellId);
+ if (!entry) return;
+ const current = entry.get("stale");
+ if (current === true) return;
+ entry.set("stale", true);
+ };
+ withTransactOptional(nb, apply, opts?.origin ?? EXECUTION_ORIGIN);
+};
+
+/** Force-stop a running execution when owner is gone; keeps existing result, marks stale. */
+export const forceStopExecuteCell = (
+ nb: YNotebook,
+ cellId: string,
+ opts?: {
+ expectedRunId?: string;
+ ignoreRunId?: boolean;
+ completedAt?: number;
+ markStale?: boolean;
+ }
+) => {
+ const apply = () => {
+ const outputs = getOutputsMap(nb);
+ const entry = outputs.get(cellId);
+ if (!entry) return;
+
+ if (!opts?.ignoreRunId) {
+ const currentRunId = entry.get("runId") as string | undefined;
+ const expected = opts?.expectedRunId;
+ if (currentRunId && !expected) return;
+ if (!currentRunId && expected) return;
+ if (currentRunId && expected && currentRunId !== expected) return;
+ }
+
+ const now = opts?.completedAt ?? Date.now();
+ const markStale = opts?.markStale ?? true;
+
+ entry.set("running", false);
+ if (markStale) entry.set("stale", true);
+ entry.set("completedAt", now);
+ entry.delete("runId");
+ };
+
+ withTransactOptional(nb, apply, EXECUTION_ORIGIN);
+};
diff --git a/web/modules/notebook/collab/yjs/schema/ops/mutations.ts b/web/modules/notebook/collab/yjs/schema/ops/mutations.ts
new file mode 100644
index 00000000..cd5b438a
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/schema/ops/mutations.ts
@@ -0,0 +1,98 @@
+import * as Y from "yjs";
+import { USER_ACTION_ORIGIN } from "../core/origins";
+import { CELL_ID, NB_TOMBSTONES, NB_TOMBSTONE_META, NB_OUTPUTS } from "../core/keys";
+import type { YCell, YNotebook } from "../core/types";
+import { getCellMap, getOrder } from "../access/accessors";
+import { lockCellId } from "../access/cells";
+import { withTransactOptional } from "../core/transaction";
+import { ulid } from "ulid";
+
+/** Insert a cell at the specified position (append if index is omitted) */
+export const insertCell = (
+ nb: YNotebook,
+ cell: YCell,
+ index?: number,
+ origin: symbol = USER_ACTION_ORIGIN
+) => {
+ if (cell.doc) {
+ const preset = cell.get(CELL_ID) as string | undefined;
+ if (preset) {
+ console.warn(`Inserting cell with preset id "${preset}". This may cause id conflicts. For safety, a new id will be generated instead.`);
+ }
+ }
+ const id = ulid();
+ cell.set(CELL_ID, id);
+
+ const apply = () => {
+ const map = getCellMap(nb);
+ const order = getOrder(nb);
+ const snapshot = order.toArray();
+ for (let i = snapshot.length - 1; i >= 0; i -= 1) {
+ if (snapshot[i] === id) order.delete(i, 1);
+ }
+
+ map.set(id, cell);
+ lockCellId(cell);
+
+ const len = order.length;
+ let target = index ?? len;
+ if (target < 0) target = 0;
+ if (target > len) target = len;
+ order.insert(target, [id]);
+ };
+
+ withTransactOptional(nb, apply, origin);
+};
+
+/** Delete by cellId (hard delete: remove from order and map; for soft delete use softDeleteCell) */
+export const removeCell = (
+ nb: YNotebook,
+ id: string,
+ origin: symbol = USER_ACTION_ORIGIN
+) => {
+ const apply = () => {
+ const order = getOrder(nb);
+ const map = getCellMap(nb);
+ const snapshot = order.toArray();
+ for (let i = snapshot.length - 1; i >= 0; i -= 1) {
+ if (snapshot[i] === id) order.delete(i, 1);
+ }
+ map.delete(id);
+
+ const tomb = nb.get(NB_TOMBSTONES) as Y.Map | undefined;
+ tomb?.delete(id);
+ const tm = nb.get(NB_TOMBSTONE_META) as Y.Map | undefined;
+ tm?.delete(id);
+
+ // Also clean up outputs (if present)
+ const outputs = nb.get(NB_OUTPUTS) as Y.Map | undefined;
+ outputs?.delete(id);
+ };
+ withTransactOptional(nb, apply, origin);
+};
+
+/** Move cell to a new position (stable by id) */
+export const moveCell = (
+ nb: YNotebook,
+ id: string,
+ toIndex: number,
+ origin: symbol = USER_ACTION_ORIGIN
+) => {
+ const apply = () => {
+ const order = getOrder(nb);
+ const arr = order.toArray();
+ const from = arr.indexOf(id);
+ if (from < 0) return;
+ const len = arr.length;
+ let target = Number.isFinite(toIndex) ? toIndex : len - 1;
+ if (target < 0) target = 0;
+ if (target > len) target = len;
+ if (target === from || (from === len - 1 && target >= len)) return;
+
+ order.delete(from, 1);
+ const newLen = order.length;
+ if (target > newLen) target = newLen;
+ order.insert(target, [id]);
+ };
+ withTransactOptional(nb, apply, origin);
+};
diff --git a/web/modules/notebook/collab/yjs/schema/ops/soft_delete.ts b/web/modules/notebook/collab/yjs/schema/ops/soft_delete.ts
new file mode 100644
index 00000000..ac79588f
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/schema/ops/soft_delete.ts
@@ -0,0 +1,87 @@
+import * as Y from "yjs";
+import { USER_ACTION_ORIGIN } from "../core/origins";
+import { WALL_CLOCK_EPOCH_FLOOR_MS, systemClock, type ClockSource } from "../core/time";
+import { getOrder, getCellMap } from "../access/accessors";
+import { tombstonesMap, tombstoneMetaMap, ensureTombstoneMetaEntry, type TombstoneMetaMap } from "../access/tombstone";
+import type { YNotebook } from "../core/types";
+import { NB_TOMBSTONES, NB_TOMBSTONE_META } from "../core/keys";
+import { withTransactOptional } from "../core/transaction";
+
+export interface SoftDeleteOptions {
+ timestamp?: number;
+ trusted?: boolean;
+ clock?: ClockSource;
+}
+
+/** Soft delete: remove from Order and set tombstone, keep entity until vacuum cleanup */
+export const softDeleteCell = (
+ nb: YNotebook,
+ cellId: string,
+ reason?: string,
+ opts?: SoftDeleteOptions
+) => {
+ const resolve = (): { ts?: number; clock?: "trusted" | "local" } => {
+ const cs = opts?.clock ?? systemClock;
+ const hasTs = opts?.timestamp != null;
+ const ts = hasTs ? (opts!.timestamp as number) : cs.now();
+ if (typeof ts !== "number" || Number.isNaN(ts)) return {};
+ if (ts < WALL_CLOCK_EPOCH_FLOOR_MS) return {};
+ const trusted = opts?.trusted ?? (hasTs ? true : cs.trusted ?? false);
+ return { ts, clock: trusted ? "trusted" : "local" };
+ };
+
+ const apply = () => {
+ // Remove from order; entity remains in map (for audit/recovery), and mark tombstone
+ const order = getOrder(nb);
+ const snapshot = order.toArray();
+ for (let i = snapshot.length - 1; i >= 0; i -= 1) {
+ if (snapshot[i] === cellId) order.delete(i, 1);
+ }
+
+ const t = tombstonesMap(nb);
+ t.set(cellId, true);
+
+ const tm = tombstoneMetaMap(nb);
+ const { ts, clock } = resolve();
+ const entry = ensureTombstoneMetaEntry(tm, cellId);
+ if (reason !== undefined) entry.set("reason", reason);
+ if (ts !== undefined) entry.set("deletedAt", ts);
+ if (clock) entry.set("clock", clock);
+ };
+
+ withTransactOptional(nb, apply, USER_ACTION_ORIGIN);
+};
+
+/** Restore soft delete: clear tombstone and re-inject into order at specified position */
+export const restoreCell = (
+ nb: YNotebook,
+ cellId: string,
+ index?: number,
+ origin: symbol = USER_ACTION_ORIGIN
+) => {
+ const apply = () => {
+ const map = getCellMap(nb);
+ const cell = map.get(cellId);
+ if (!cell) return;
+
+ // Do not call lockCellId here, as it is already ensured during insert/create; restore only operates on order + tombs
+ const order = getOrder(nb);
+ const snapshot = order.toArray();
+ for (let i = snapshot.length - 1; i >= 0; i -= 1) {
+ if (snapshot[i] === cellId) order.delete(i, 1);
+ }
+
+ const len = order.length;
+ let target = index ?? len;
+ if (target < 0) target = 0;
+ if (target > len) target = len;
+ order.insert(target, [cellId]);
+
+ const tomb = nb.get(NB_TOMBSTONES) as Y.Map | undefined;
+ tomb?.delete(cellId);
+ const tm = nb.get(NB_TOMBSTONE_META) as TombstoneMetaMap | undefined;
+ tm?.delete(cellId);
+ };
+
+ withTransactOptional(nb, apply, origin);
+};
diff --git a/web/modules/notebook/collab/yjs/schema/ops/tombstone_maint.ts b/web/modules/notebook/collab/yjs/schema/ops/tombstone_maint.ts
new file mode 100644
index 00000000..d76884ac
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/schema/ops/tombstone_maint.ts
@@ -0,0 +1,93 @@
+import type { Map } from "yjs";
+import { MAINT_ORIGIN, VACUUM_ORIGIN } from "../core/origins";
+import { DEFAULT_FUTURE_SKEW_MS, WALL_CLOCK_EPOCH_FLOOR_MS } from "../core/time";
+import { getOrder, getCellMap } from "../access/accessors";
+import { ensureTombstoneMetaEntry, readTombstoneMetaEntry, tombstoneMetaMap, tombstonesMap } from "../access/tombstone";
+import type { YNotebook } from "../core/types";
+import { NB_OUTPUTS } from "../core/keys";
+import { withTransactOptional } from "../core/transaction";
+
+export interface TombstoneTimestampOptions {
+ reason?: string;
+ clock?: { now(): number; trusted: boolean };
+ trusted?: boolean;
+ origin?: symbol;
+}
+
+/** Maintenance/repair: set tombstone's deletedAt (does not enter undo stack) */
+export const setTombstoneTimestamp = (
+ nb: YNotebook,
+ cellId: string,
+ timestamp: number,
+ opts?: TombstoneTimestampOptions
+) => {
+ if (typeof timestamp !== "number" || Number.isNaN(timestamp)) return;
+ if (timestamp < WALL_CLOCK_EPOCH_FLOOR_MS) return;
+
+ const resolvedClock = opts?.trusted ?? opts?.clock?.trusted ?? true ? "trusted" : "local";
+
+ const apply = () => {
+ // tombstone flag must be at least true
+ const tomb = tombstonesMap(nb);
+ if (!tomb.get(cellId)) tomb.set(cellId, true);
+
+ const tm = tombstoneMetaMap(nb);
+ const entry = ensureTombstoneMetaEntry(tm, cellId);
+ entry.set("deletedAt", timestamp);
+ entry.set("clock", resolvedClock);
+ if (opts?.reason !== undefined) entry.set("reason", opts.reason);
+ };
+
+ withTransactOptional(nb, apply, opts?.origin ?? MAINT_ORIGIN);
+};
+
+/** Actual cleanup: only remove from map & meta when "trusted clock + expired TTL + not in order" conditions are met */
+export const vacuumNotebook = (
+ nb: YNotebook,
+ ttlMs = 30 * 24 * 3600 * 1000,
+ opts?: {
+ clock?: { now(): number; trusted: boolean };
+ now?: number;
+ nowTrusted?: boolean;
+ maxFutureSkewMs?: number;
+ }
+) => {
+ const t = tombstonesMap(nb);
+ const tm = tombstoneMetaMap(nb);
+ const map = getCellMap(nb);
+
+ const clock = opts?.clock;
+ const nowValue = opts?.now ?? (clock ? clock.now() : Date.now());
+ const nowTrusted = opts?.nowTrusted ?? (opts?.now != null ? true : clock?.trusted ?? false);
+ const maxFutureSkew = opts?.maxFutureSkewMs ?? DEFAULT_FUTURE_SKEW_MS;
+
+ const sweep = () => {
+ const orderIds = new Set(getOrder(nb).toArray());
+ t.forEach((flag, id) => {
+ if (!flag) return;
+ const metaSnapshot = readTombstoneMetaEntry(tm.get(id));
+ const { deletedAt, clock: clk } = metaSnapshot;
+ if (typeof deletedAt !== "number" || Number.isNaN(deletedAt) || deletedAt <= 0) return;
+
+ const tsTrusted = clk === "trusted";
+ if (tsTrusted && !nowTrusted) return;
+ if (!tsTrusted) return;
+ if (deletedAt - nowValue > maxFutureSkew) return;
+ if (nowValue - deletedAt < ttlMs) return;
+
+ // Only clean up entities not in order; if still in order, consider as "not soft deleted or already restored"
+ if (orderIds.has(id)) return;
+
+ // Actually delete entity and meta
+ map.delete(id);
+ tm.delete(id);
+ t.delete(id);
+
+ // Also clean up corresponding outputs (if present)
+ const outputs = nb.get(NB_OUTPUTS) as Map | undefined;
+ outputs?.delete(id);
+ });
+ };
+
+ withTransactOptional(nb, sweep, VACUUM_ORIGIN);
+};
diff --git a/web/modules/notebook/collab/yjs/schema/quality/auto_stale.ts b/web/modules/notebook/collab/yjs/schema/quality/auto_stale.ts
new file mode 100644
index 00000000..3596d086
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/schema/quality/auto_stale.ts
@@ -0,0 +1,129 @@
+import * as Y from "yjs";
+import { CELL_ID, CELL_SOURCE } from "../core/keys";
+import type { YNotebook, YCell } from "../core/types";
+import { markCellOutputStale } from "../ops/execute";
+import { getCellMap } from "../access/accessors";
+
+/**
+ * Listens for CELL_SOURCE (Y.Text) content changes in each cell, sets outputs[cellId].stale=true
+ * Listens for changes to the CELL_SOURCE reference itself (Y.Text replaced), re-binds to new text
+ * Listens for add/update/delete in cellMap: auto-binds for new/updated cells; cleans up references on delete
+ *
+ * @returns A disable function that cleans up all listeners. Safe to call multiple times.
+ */
+export const enableAutoStaleOnSource = (nb: YNotebook): (() => void) => {
+ let disposed = false;
+
+ // Track all active listeners for easy cleanup
+ const cellUnsub = new Map void>(); // Unsubscribe cell internal key listener
+ const cellTextUnsub = new Map void>(); // Unsubscribe current source text listener for this cell
+ const idUnsub = new Map void>(); // Aggregate unsubscribe by id (handle cellMap replace/delete)
+
+ const bindText = (cell: YCell, text: Y.Text) => {
+ // If there is an old text listener, unsubscribe first
+ const prevDispose = cellTextUnsub.get(cell);
+ if (prevDispose) {
+ try { prevDispose(); } catch {}
+ cellTextUnsub.delete(cell);
+ }
+
+ const onTextChange = () => {
+ const cellId = cell.get(CELL_ID);
+ if (typeof cellId !== "string" || cellId.length === 0) return;
+ // Any source change only sets stale=true, does not clear result
+ markCellOutputStale(nb, cellId);
+ };
+
+ text.observe(onTextChange);
+
+ // Save the unsubscribe function for the current text of this cell
+ const dispose = () => {
+ try { text.unobserve(onTextChange); } catch {}
+ };
+ cellTextUnsub.set(cell, dispose);
+ };
+
+ const bindCell = (cell: YCell, id: string) => {
+ // Listen for "CELL_SOURCE" being replaced (Y.Text object changed)
+ const onCellKeyChange = (ev: Y.YMapEvent) => {
+ if (!ev.keysChanged.has(CELL_SOURCE)) return;
+ const next = cell.get(CELL_SOURCE);
+ if (next instanceof Y.Text) bindText(cell, next);
+ else {
+ // If the new value is not Y.Text, unsubscribe old text listener
+ const prevDispose = cellTextUnsub.get(cell);
+ if (prevDispose) { try { prevDispose(); } catch {} cellTextUnsub.delete(cell); }
+ }
+ };
+
+ // Initially bind current text
+ const t = cell.get(CELL_SOURCE);
+ if (t instanceof Y.Text) bindText(cell, t);
+
+ // Listen for cell internal key changes
+ cell.observe(onCellKeyChange);
+
+ // Save the unsubscribe function for this cell
+ const prev = cellUnsub.get(cell);
+ cellUnsub.set(cell, () => {
+ try { cell.unobserve(onCellKeyChange); } catch {}
+ // 同时解绑当前文本监听
+ const td = cellTextUnsub.get(cell);
+ if (td) { try { td(); } catch {} cellTextUnsub.delete(cell); }
+ if (prev) prev();
+ });
+
+ // Create/replace the aggregate unsubscribe function for this id (clean up old one on update/rebind)
+ const old = idUnsub.get(id);
+ if (old) { try { old(); } catch {} }
+ idUnsub.set(id, () => {
+ const d = cellUnsub.get(cell);
+ if (d) { try { d(); } catch {} cellUnsub.delete(cell); }
+ const td = cellTextUnsub.get(cell);
+ if (td) { try { td(); } catch {} cellTextUnsub.delete(cell); }
+ });
+ };
+
+ const cellMap = getCellMap(nb);
+
+ // Bind for existing cells
+ cellMap.forEach((cell, key) => {
+ if (cell instanceof Y.Map) bindCell(cell as YCell, String(key));
+ });
+
+ // Listen for add/update/delete in cellMap
+ const onMapChange = (ev: Y.YMapEvent) => {
+ // Handle add/update: Yjs does not distinguish add/update eventType, but we judge from value
+ ev.changes.keys.forEach((change, key) => {
+ // key 是 cellId
+ if (change.action === "add" || change.action === "update") {
+ const cell = cellMap.get(key);
+ if (cell instanceof Y.Map) bindCell(cell as YCell, String(key));
+ }
+ if (change.action === "delete") {
+ // Aggregate unsubscribe by id (no need to get deleted cell)
+ const d = idUnsub.get(String(key));
+ if (d) { try { d(); } catch {} idUnsub.delete(String(key)); }
+ }
+ });
+ };
+
+ cellMap.observe(onMapChange);
+
+ // Return disable function (idempotent - safe to call multiple times)
+ const disable = () => {
+ if (disposed) return;
+ disposed = true;
+
+ try { cellMap.unobserve(onMapChange); } catch {}
+ // First unsubscribe by id in aggregate, then clean up leftovers
+ idUnsub.forEach((dispose) => { try { dispose(); } catch {} });
+ idUnsub.clear();
+ cellUnsub.forEach((dispose) => { try { dispose(); } catch {} });
+ cellUnsub.clear();
+ cellTextUnsub.forEach((dispose) => { try { dispose(); } catch {} });
+ cellTextUnsub.clear();
+ };
+
+ return disable;
+};
diff --git a/web/modules/notebook/collab/yjs/schema/quality/reconcile.ts b/web/modules/notebook/collab/yjs/schema/quality/reconcile.ts
new file mode 100644
index 00000000..a2e0fb90
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/schema/quality/reconcile.ts
@@ -0,0 +1,293 @@
+import * as Y from "yjs";
+import { MAINT_ORIGIN } from "../core/origins";
+import type { YNotebook } from "../core/types";
+import { getCellMap, getOrder } from "../access/accessors";
+import { tombstonesMap } from "../access/tombstone";
+import { validateNotebook, type ValidationIssue } from "./validation";
+import { withTransactOptional } from "../core/transaction";
+
+/** Delete range in the order array */
+export interface DeleteRange {
+ start: number;
+ len: number;
+}
+
+export interface ReconcileOptions {
+ /** Append orphan cells (present in map, missing in order) to the end */
+ appendOrphans?: boolean;
+ /** Sort appended orphans by id (stable across peers) */
+ sortOrphansById?: boolean;
+ /** Drop tombstoned ids from order */
+ dropTombstonedFromOrder?: boolean;
+ /** Drop invalid/non-string ids and ids missing in map from order */
+ dropInvalidOrderEntries?: boolean;
+ /** Validate notebook after reconcile and include issues in the report */
+ validateAfter?: boolean;
+}
+
+export interface ReconcileReport {
+ changed: boolean;
+ previousOrderLength: number;
+ finalOrderLength: number;
+ removedMissingFromMap: string[];
+ removedTombstoned: string[];
+ removedDuplicates: string[];
+ removedInvalid: string[];
+ appendedOrphans: string[];
+ /** Summary of the applied minimal-diff patch */
+ patchStats: {
+ deleteOps: number; // number of delete ranges applied
+ deletedCount: number; // total items deleted
+ appendedCount: number; // items appended at end
+ };
+ /** Optional validation issues when validateAfter=true */
+ validationIssues?: ValidationIssue[];
+}
+
+/**
+ * Normalize reconcile options with defaults.
+ */
+export const resolveReconcileOptions = (
+ opts?: ReconcileOptions
+): Required => ({
+ appendOrphans: opts?.appendOrphans ?? true,
+ sortOrphansById: opts?.sortOrphansById ?? true,
+ dropTombstonedFromOrder: opts?.dropTombstonedFromOrder ?? true,
+ dropInvalidOrderEntries: opts?.dropInvalidOrderEntries ?? true,
+ validateAfter: opts?.validateAfter ?? false,
+});
+
+/**
+ * Build a set of tombstoned ids from the tombstone map.
+ */
+export const buildTombstoneSet = (tomb?: Y.Map): Set => {
+ const tombSet = new Set();
+ tomb?.forEach((flag, id) => {
+ if (flag) tombSet.add(id);
+ });
+ return tombSet;
+};
+
+export interface ClassificationResult {
+ kept: string[];
+ indexesToDelete: number[];
+ removedMissingFromMap: string[];
+ removedTombstoned: string[];
+ removedDuplicates: string[];
+ removedInvalid: string[];
+}
+
+/**
+ * Classify the current order entries, producing kept ids and the delete index list,
+ * while recording removal reasons for reporting.
+ */
+export const classifyOrderEntries = (
+ orderSnapshot: any[],
+ options: Required,
+ mapHas: (id: string) => boolean,
+ tombSet: Set
+): ClassificationResult => {
+ const seen = new Set();
+ const removedMissingFromMap: string[] = [];
+ const removedTombstoned: string[] = [];
+ const removedDuplicates: string[] = [];
+ const removedInvalid: string[] = [];
+ const kept: string[] = [];
+ const indexesToDelete: number[] = [];
+
+ for (let i = 0; i < orderSnapshot.length; i += 1) {
+ const raw = orderSnapshot[i] as any;
+
+ // Always drop non-string entries
+ if (typeof raw !== "string") {
+ removedInvalid.push(String(raw));
+ indexesToDelete.push(i);
+ continue;
+ }
+ // Empty string: optionally keep
+ if (raw.length === 0) {
+ if (options.dropInvalidOrderEntries) {
+ removedInvalid.push(raw);
+ indexesToDelete.push(i);
+ } else {
+ kept.push(raw);
+ }
+ continue;
+ }
+
+ if (seen.has(raw)) {
+ removedDuplicates.push(raw);
+ indexesToDelete.push(i);
+ continue;
+ }
+
+ if (!mapHas(raw)) {
+ if (options.dropInvalidOrderEntries) {
+ removedMissingFromMap.push(raw);
+ indexesToDelete.push(i);
+ } else {
+ kept.push(raw);
+ seen.add(raw);
+ }
+ continue;
+ }
+
+ if (options.dropTombstonedFromOrder && tombSet.has(raw)) {
+ removedTombstoned.push(raw);
+ indexesToDelete.push(i);
+ continue;
+ }
+
+ seen.add(raw);
+ kept.push(raw);
+ }
+
+ return {
+ kept,
+ indexesToDelete,
+ removedMissingFromMap,
+ removedTombstoned,
+ removedDuplicates,
+ removedInvalid,
+ };
+};
+
+/**
+ * Collect orphan ids (present in map but not in kept and not tombstoned).
+ */
+export const findOrphansToAppend = (
+ map: Y.Map,
+ keptSet: Set,
+ tombSet: Set,
+ options: Required
+): string[] => {
+ const orphans: string[] = [];
+ if (!options.appendOrphans) return orphans;
+ map.forEach((_cell, id) => {
+ if (!keptSet.has(id) && !tombSet.has(id)) orphans.push(id);
+ });
+ if (options.sortOrphansById) orphans.sort();
+ return orphans;
+};
+
+/**
+ * Compress a sorted list of indexes to delete into contiguous ranges.
+ */
+export const mergeDeleteIndexesToRanges = (indexesToDelete: number[]): DeleteRange[] => {
+ if (indexesToDelete.length === 0) return [];
+ const idxs = [...indexesToDelete].sort((a, b) => a - b);
+ const ranges: DeleteRange[] = [];
+ let rangeStart: number | null = null;
+ let prev: number | null = null;
+ for (const idx of idxs) {
+ if (rangeStart == null) {
+ rangeStart = idx;
+ prev = idx;
+ } else if (prev != null && idx === prev + 1) {
+ prev = idx;
+ } else {
+ ranges.push({ start: rangeStart, len: prev! - rangeStart + 1 });
+ rangeStart = idx;
+ prev = idx;
+ }
+ }
+ if (rangeStart != null) ranges.push({ start: rangeStart, len: prev! - rangeStart + 1 });
+ return ranges;
+};
+
+/**
+ * Apply minimal patch to the Y.Array order: delete ranges and append orphans.
+ */
+export const applyOrderPatch = (
+ nb: YNotebook,
+ order: Y.Array,
+ deleteRanges: DeleteRange[],
+ orphans: string[]
+) => {
+ const willDelete = deleteRanges.length > 0;
+ const willAppend = orphans.length > 0;
+ if (!willDelete && !willAppend) return;
+
+ const apply = () => {
+ // apply deletions from end to start to keep indices valid
+ for (let i = deleteRanges.length - 1; i >= 0; i -= 1) {
+ const { start, len } = deleteRanges[i]!;
+ if (len > 0) order.delete(start, len);
+ }
+ if (willAppend) order.push(orphans);
+ };
+ withTransactOptional(nb, apply, MAINT_ORIGIN);
+};
+
+export const reconcileNotebook = (
+ nb: YNotebook,
+ opts?: ReconcileOptions
+): ReconcileReport => {
+ const options = resolveReconcileOptions(opts);
+
+ const order = getOrder(nb);
+ const map = getCellMap(nb);
+ const tomb = tombstonesMap(nb);
+ const tombSet = buildTombstoneSet(tomb);
+
+ // snapshot for computation only (we will apply minimal patch)
+ const before = order.toArray();
+
+ // Critical safety guard: when order is populated but cell map is completely empty,
+ // treat this as a likely partial-load state and abort reconciliation to avoid
+ // deleting valid order entries prematurely.
+ if (before.length > 0 && map.size === 0) {
+ const patchStats = {
+ deleteOps: 0,
+ deletedCount: 0,
+ appendedCount: 0,
+ };
+ const validationIssues = options.validateAfter ? validateNotebook(nb) : undefined;
+ return {
+ changed: false,
+ previousOrderLength: before.length,
+ finalOrderLength: before.length,
+ removedMissingFromMap: [],
+ removedTombstoned: [],
+ removedDuplicates: [],
+ removedInvalid: [],
+ appendedOrphans: [],
+ patchStats,
+ validationIssues,
+ };
+ }
+ const classification = classifyOrderEntries(before, options, (id) => map.has(id), tombSet);
+
+ const keptSet = new Set(classification.kept);
+ const orphans = findOrphansToAppend(map, keptSet, tombSet, options);
+
+ const deleteRanges = mergeDeleteIndexesToRanges(classification.indexesToDelete);
+ const changed = deleteRanges.length > 0 || orphans.length > 0;
+
+ if (changed) {
+ applyOrderPatch(nb, order, deleteRanges, orphans);
+ }
+
+ const finalLen = changed ? order.length : before.length; // minimal overhead read
+
+ const patchStats = {
+ deleteOps: deleteRanges.length,
+ deletedCount: deleteRanges.reduce((acc, r) => acc + r.len, 0),
+ appendedCount: orphans.length,
+ };
+
+ const validationIssues = options.validateAfter ? validateNotebook(nb) : undefined;
+
+ return {
+ changed,
+ previousOrderLength: before.length,
+ finalOrderLength: finalLen,
+ removedMissingFromMap: classification.removedMissingFromMap,
+ removedTombstoned: classification.removedTombstoned,
+ removedDuplicates: classification.removedDuplicates,
+ removedInvalid: classification.removedInvalid,
+ appendedOrphans: orphans,
+ patchStats,
+ validationIssues,
+ };
+};
diff --git a/web/modules/notebook/collab/yjs/schema/quality/reconcile_outputs.ts b/web/modules/notebook/collab/yjs/schema/quality/reconcile_outputs.ts
new file mode 100644
index 00000000..d59213ba
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/schema/quality/reconcile_outputs.ts
@@ -0,0 +1,100 @@
+import * as Y from "yjs";
+import { MAINT_ORIGIN } from "../core/origins";
+import { NB_OUTPUTS } from "../core/keys";
+import type { YNotebook } from "../core/types";
+import { getCellMap } from "../access/accessors";
+import { validateNotebook, type ValidationIssue } from "./validation";
+import { withTransactOptional } from "../core/transaction";
+
+export interface ReconcileOutputsOptions {
+ /** Remove orphan outputs not present in cellMap */
+ removeOrphans?: boolean;
+ /** Remove output entries that are not Y.Map structure */
+ removeInvalid?: boolean;
+ /** Return result after validation */
+ validateAfter?: boolean;
+}
+
+export interface ReconcileOutputsReport {
+ changed: boolean;
+ previousCount: number;
+ finalCount: number;
+ removedOrphans: string[];
+ removedInvalid: string[];
+ patchStats: {
+ deletedCount: number;
+ };
+ validationIssues?: ValidationIssue[];
+}
+
+/**
+ * Clean up orphan or invalid records in outputs area.
+ * - Delete keys that are not strings / corresponding cell does not exist / not Y.Map
+ * - Does not fix result content or timestamps
+ */
+export const reconcileOutputs = (
+ nb: YNotebook,
+ opts?: ReconcileOutputsOptions
+): ReconcileOutputsReport => {
+ const options: Required = {
+ removeOrphans: opts?.removeOrphans ?? false,
+ removeInvalid: opts?.removeInvalid ?? true,
+ validateAfter: opts?.validateAfter ?? false,
+ };
+
+ const outputs = nb.get(NB_OUTPUTS) as Y.Map> | undefined;
+ const map = getCellMap(nb);
+ if (!outputs) {
+ return {
+ changed: false,
+ previousCount: 0,
+ finalCount: 0,
+ removedOrphans: [],
+ removedInvalid: [],
+ patchStats: { deletedCount: 0 },
+ };
+ }
+
+ const beforeCount = outputs.size;
+ const removedOrphans: string[] = [];
+ const removedInvalid: string[] = [];
+
+ outputs.forEach((entry, key) => {
+ // 非字符串 key
+ if (typeof key !== "string" || key.length === 0) {
+ if (options.removeInvalid) removedInvalid.push(String(key));
+ return;
+ }
+ // 非 Y.Map
+ if (!(entry instanceof Y.Map)) {
+ if (options.removeInvalid) removedInvalid.push(key);
+ return;
+ }
+ // 孤立(map 中不存在)
+ if (options.removeOrphans && !map.has(key)) {
+ removedOrphans.push(key);
+ }
+ });
+
+ const willDelete = removedOrphans.length + removedInvalid.length > 0;
+ if (willDelete) {
+ const apply = () => {
+ for (const id of removedInvalid) outputs.delete(id);
+ for (const id of removedOrphans) outputs.delete(id);
+ };
+ withTransactOptional(nb, apply, MAINT_ORIGIN);
+ }
+
+ const afterCount = outputs.size;
+ const validationIssues = options.validateAfter ? validateNotebook(nb) : undefined;
+
+ return {
+ changed: willDelete,
+ previousCount: beforeCount,
+ finalCount: afterCount,
+ removedOrphans,
+ removedInvalid,
+ patchStats: { deletedCount: removedInvalid.length + removedOrphans.length },
+ validationIssues,
+ };
+};
diff --git a/web/modules/notebook/collab/yjs/schema/quality/undo.ts b/web/modules/notebook/collab/yjs/schema/quality/undo.ts
new file mode 100644
index 00000000..44b34591
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/schema/quality/undo.ts
@@ -0,0 +1,33 @@
+import * as Y from "yjs";
+import { UndoManager } from "yjs";
+import { USER_ACTION_ORIGIN } from "../core/origins";
+import { NB_CELL_MAP, NB_CELL_ORDER, NB_TOMBSTONE_META, NB_TOMBSTONES } from "../core/keys";
+import type { YNotebook, YCell } from "../core/types";
+
+/**
+ * Undo manager (Notebook)
+ *
+ * - Mainly tracks: order (sequence changes) and cellMap (content changes).
+ * - Also tracks: tombstones / tombstoneMeta, ensuring that composite actions like "delete/restore" can be consistently undone.
+ * - Maintenance operations (VACUUM/MAINT) use a separate origin and are excluded via trackedOrigins, so they do not enter the undo stack.
+ */
+export const createNotebookUndoManager = (
+ nb: YNotebook,
+ options?: { captureTimeout?: number; trackedOrigins?: Set }
+): UndoManager => {
+ const { captureTimeout = 500, trackedOrigins = new Set([null, USER_ACTION_ORIGIN]) } = options ?? {};
+
+ // Structures to be included in UndoManager (track only if present)
+ const order = nb.get(NB_CELL_ORDER) as Y.Array | undefined;
+ const cellMap = nb.get(NB_CELL_MAP) as Y.Map | undefined;
+ const tombstones = nb.get(NB_TOMBSTONES) as Y.Map | undefined;
+ const tombstoneMeta = nb.get(NB_TOMBSTONE_META) as Y.Map | undefined;
+
+ const scopes = [order, cellMap, tombstones, tombstoneMeta].filter(Boolean) as Array>;
+
+ return new UndoManager(scopes, {
+ captureTimeout,
+ // Track both default (null, usually from editor binding) and explicit USER_ACTION origins
+ trackedOrigins,
+ });
+};
diff --git a/web/modules/notebook/collab/yjs/schema/quality/validation.ts b/web/modules/notebook/collab/yjs/schema/quality/validation.ts
new file mode 100644
index 00000000..511ee6b5
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/schema/quality/validation.ts
@@ -0,0 +1,219 @@
+import * as Y from "yjs";
+import { NB_OUTPUTS, NB_TOMBSTONES } from "../core/keys";
+import { CELL_ID, CELL_KIND } from "../core/keys";
+import type { YNotebook, YCell, YOutputsMap } from "../core/types";
+import { getCellMap, getOrder } from "../access/accessors";
+import { type TombstoneMetaMap, tombstoneMetaMap, isValidTombstoneClock } from "../access/tombstone";
+import { QueryResponse } from "@/api-gen";
+
+
+export interface ValidationIssue {
+ path: string;
+ level: "error" | "warning";
+ message: string;
+}
+
+/** Basic consistency check: id uniqueness, order reference integrity, tombstone validity */
+export const validateNotebook = (nb: YNotebook): ValidationIssue[] => {
+ const issues: ValidationIssue[] = [];
+
+ const order = getOrder(nb).toArray();
+ const map = getCellMap(nb);
+ const tomb = nb.get(NB_TOMBSTONES) as Y.Map | undefined;
+ const tombSet = new Set();
+ tomb?.forEach((flag, id) => {
+ if (flag) tombSet.add(id);
+ });
+
+ // 1) id in order must exist in map and not be duplicated
+ const seenOrder = new Map();
+ order.forEach((id, idx) => {
+ if (typeof id !== "string" || id.length === 0) {
+ issues.push({
+ path: `order[${idx}]`,
+ level: "error",
+ message: `Invalid cell id at order[${idx}]`,
+ });
+ return;
+ }
+ const dup = seenOrder.get(id);
+ if (dup !== undefined) {
+ issues.push({
+ path: `order[${idx}]`,
+ level: "error",
+ message: `Duplicate cell id "${id}" also present at order[${dup}]`,
+ });
+ } else {
+ seenOrder.set(id, idx);
+ }
+ if (!map.has(id)) {
+ issues.push({
+ path: `order[${idx}]`,
+ level: "error",
+ message: `Cell id "${id}" referenced by order but missing in cellMap`,
+ });
+ }
+ if (tombSet.has(id)) {
+ issues.push({
+ path: `order[${idx}]`,
+ level: "warning",
+ message: `Cell id "${id}" appears in order but is marked tombstoned`,
+ });
+ }
+ });
+
+ const orderSet = new Set(order.filter((id): id is string => typeof id === "string"));
+
+ // 2) id in map not present in order means it is an orphan entity (possibly tombstone residue or pending recovery)
+ map.forEach((cell: YCell, id) => {
+ if (!orderSet.has(id)) {
+ issues.push({
+ path: `cellMap.${id}`,
+ level: "warning",
+ message: `Cell id "${id}" exists in cellMap but not referenced by order`,
+ });
+ }
+ const kind = cell?.get(CELL_KIND);
+ if (!kind) {
+ issues.push({
+ path: `cellMap.${id}`,
+ level: "error",
+ message: `Missing cell kind for "${id}"`,
+ });
+ }
+ const embeddedId = cell?.get(CELL_ID);
+ if (embeddedId !== undefined && embeddedId !== id) {
+ issues.push({
+ path: `cellMap.${id}`,
+ level: "warning",
+ message: `cellMap key "${id}" mismatches embedded id "${embeddedId}"`,
+ });
+ }
+ });
+
+ // 3) Tombstone validity
+ const tm = nb.get("tombstoneMeta") as TombstoneMetaMap | undefined;
+ const tmm = tm ?? tombstoneMetaMap(nb);
+ tmm?.forEach((meta, id) => {
+ const deletedAt = meta?.get("deletedAt");
+ if (deletedAt != null && (typeof deletedAt !== "number" || Number.isNaN(deletedAt))) {
+ issues.push({
+ path: `tombstoneMeta.${id}`,
+ level: "warning",
+ message: `Invalid deletedAt for "${id}"`,
+ });
+ }
+ const clock = meta?.get("clock");
+ if (clock != null && !isValidTombstoneClock(clock)) {
+ issues.push({
+ path: `tombstoneMeta.${id}`,
+ level: "warning",
+ message: `Invalid clock tag for "${id}"`,
+ });
+ }
+ });
+
+ tomb?.forEach((flag, id) => {
+ if (!flag) return;
+ if (!map.has(id)) {
+ issues.push({
+ path: `tombstones.${id}`,
+ level: "warning",
+ message: `Tombstone exists for "${id}" but cellMap no longer has the entity`,
+ });
+ }
+ });
+
+ // 4) Outputs area check
+ const outputs = nb.get(NB_OUTPUTS) as YOutputsMap | undefined;
+ if (outputs) {
+ outputs.forEach((entry, id) => {
+ if (typeof id !== "string" || id.length === 0) {
+ issues.push({
+ path: `outputs[${id}]`,
+ level: "error",
+ message: `Invalid output key "${id}"`,
+ });
+ return;
+ }
+
+ // 4.1 Association integrity: id in outputs must exist in cellMap
+ if (!map.has(id)) {
+ issues.push({
+ path: `outputs.${id}`,
+ level: "warning",
+ message: `Output exists for "${id}" but cellMap no longer contains this cell`,
+ });
+ }
+
+ // 4.2 Field type and structure validity
+ if (!(entry instanceof Y.Map)) {
+ issues.push({
+ path: `outputs.${id}`,
+ level: "error",
+ message: `Output record for "${id}" is not a Y.Map`,
+ });
+ return;
+ }
+
+ const running = entry.get("running");
+ const stale = entry.get("stale");
+ if (running != null && typeof running !== "boolean") {
+ issues.push({
+ path: `outputs.${id}.running`,
+ level: "warning",
+ message: `"running" should be boolean, got ${typeof running}`,
+ });
+ }
+ if (stale != null && typeof stale !== "boolean") {
+ issues.push({
+ path: `outputs.${id}.stale`,
+ level: "warning",
+ message: `"stale" should be boolean, got ${typeof stale}`,
+ });
+ }
+
+ const startedAt = entry.get("startedAt");
+ const completedAt = entry.get("completedAt");
+ if (startedAt != null && typeof startedAt !== "number") {
+ issues.push({
+ path: `outputs.${id}.startedAt`,
+ level: "warning",
+ message: `"startedAt" should be number (timestamp), got ${typeof startedAt}`,
+ });
+ }
+ if (completedAt != null && typeof completedAt !== "number") {
+ issues.push({
+ path: `outputs.${id}.completedAt`,
+ level: "warning",
+ message: `"completedAt" should be number (timestamp), got ${typeof completedAt}`,
+ });
+ }
+
+ // 4.3 result structure
+ const result = entry.get("result") as QueryResponse;
+ if (result != null) {
+ const cols = result.columns;
+ const rows = result.rows;
+ const rowsAffected = result.rowsAffected;
+ const hasErr = "error" in result;
+
+ if (!Array.isArray(cols) || !Array.isArray(rows) || typeof rowsAffected !== "number") {
+ issues.push({
+ path: `outputs.${id}.result`,
+ level: "error",
+ message: `Invalid QueryResponse structure for "${id}"`,
+ });
+ } else if (hasErr && typeof result.error !== "string") {
+ issues.push({
+ path: `outputs.${id}.result.error`,
+ level: "warning",
+ message: `"error" field should be string when present`,
+ });
+ }
+ }
+ });
+ }
+
+ return issues;
+};
diff --git a/web/modules/notebook/collab/yjs/schema/types.ts b/web/modules/notebook/collab/yjs/schema/types.ts
new file mode 100644
index 00000000..e33fc22b
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/schema/types.ts
@@ -0,0 +1,37 @@
+import { z } from "zod";
+import { NB_CELL_MAP, NB_CELL_ORDER, NB_DATABASE_ID, NB_ID, NB_METADATA, NB_OUTPUTS, NB_TAGS, NB_TITLE, NB_TOMBSTONE_META, NB_TOMBSTONES } from "./core/keys";
+import { QueryResponse } from "@/api-gen";
+
+export const cellKindSchema = z.enum(["sql", "markdown"]);
+
+const cellSchema = z.object({
+ kind: z.enum(["sql", "markdown"]).default("sql"),
+ source: z.string(),
+ meta: z.record(z.string(), z.unknown()).default({}),
+});
+
+export const outputSchema = z.object({
+ running: z.boolean().optional(),
+ stale: z.boolean().optional(),
+ startedAt: z.string().optional(),
+ completedAt: z.string().optional(),
+ runId: z.string().optional(),
+ executedBy: z.number().optional(),
+ result: z.custom().optional(),
+});
+
+export const sqlNotebookSchema = z.object({
+ [NB_ID]: z.string().nullable().default(null),
+ [NB_TITLE]: z.string().default("Untitled Notebook"),
+ [NB_DATABASE_ID]: z.string().nullable().default(null),
+ [NB_TAGS]: z.array(z.string()).default([]),
+ [NB_METADATA]: z.record(z.string(), z.unknown()).default({}),
+ [NB_CELL_MAP]: z.record(z.string(),cellSchema).default({}),
+ [NB_CELL_ORDER]: z.array(z.string()).default([]),
+ [NB_OUTPUTS]: z.record(z.string(),outputSchema).default({}),
+ [NB_TOMBSTONES]: z.record(z.string(),z.boolean()).default({}),
+ [NB_TOMBSTONE_META]: z.record(z.string(),z.unknown()).default({}),
+});
+
+export type SQLNotebook = z.infer;
+export type CellOutput = z.infer;
\ No newline at end of file
diff --git a/web/modules/notebook/collab/yjs/snapshot/snapshot-uploader.ts b/web/modules/notebook/collab/yjs/snapshot/snapshot-uploader.ts
new file mode 100644
index 00000000..c907f2e4
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/snapshot/snapshot-uploader.ts
@@ -0,0 +1,48 @@
+import * as Y from "yjs";
+import { WebsocketProvider } from "y-websocket";
+import * as decoding from "lib0/decoding";
+import { uploadNotebookSnapshot } from "./upload";
+
+// Keep in sync with backend definition in pkg/service/collaborative_service.go
+const YWS_MESSAGE_SNAPSHOT_REQUEST = 100;
+
+export type SnapshotUploaderOptions = {
+ provider: WebsocketProvider;
+ doc: Y.Doc;
+ notebookId: string;
+};
+
+/**
+ * Registers provider.messageHandlers[100] to react to custom snapshot
+ * request messages from the server and upload a Yjs snapshot.
+ * Returns a cleanup function that restores the previous handler.
+ */
+export function setupSnapshotUploader({
+ provider,
+ doc,
+ notebookId,
+}: SnapshotUploaderOptions): () => void {
+ const prevHandler = provider.messageHandlers[YWS_MESSAGE_SNAPSHOT_REQUEST];
+
+ const handler = async (
+ _encoder: any,
+ decoder: decoding.Decoder,
+ // _provider: WebsocketProvider,
+ // _emitSynced: boolean,
+ // _messageType: number
+ ) => {
+ try {
+ const requestedDocId = decoding.readVarString(decoder);
+ if (requestedDocId !== notebookId) return;
+ await uploadNotebookSnapshot(doc, notebookId);
+ } catch (err) {
+ console.error("[snapshot-uploader] handler error", err);
+ }
+ };
+
+ provider.messageHandlers[YWS_MESSAGE_SNAPSHOT_REQUEST] = handler;
+
+ return () => {
+ provider.messageHandlers[YWS_MESSAGE_SNAPSHOT_REQUEST] = prevHandler;
+ };
+}
diff --git a/web/modules/notebook/collab/yjs/snapshot/upload.ts b/web/modules/notebook/collab/yjs/snapshot/upload.ts
new file mode 100644
index 00000000..593b6c75
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/snapshot/upload.ts
@@ -0,0 +1,52 @@
+import * as Y from 'yjs'
+import { DefaultService } from '@/api-gen/services/DefaultService'
+
+const inFlight = new Set()
+
+export type UploadOptions = {
+ debug?: boolean
+ dedupeKey?: string
+}
+
+const stateVectorHeader = 'X-Yjs-State-Vector'
+
+export async function uploadNotebookSnapshot(
+ doc: Y.Doc,
+ notebookId: string,
+ options: UploadOptions = {}
+): Promise<'uploaded' | 'skipped'> {
+ const { debug = false, dedupeKey = notebookId } = options
+ const log = (...args: any[]) => { if (debug) console.debug('[snapshot-upload]', ...args) }
+ if (!notebookId) return 'skipped'
+ if (inFlight.has(dedupeKey)) {
+ log('skipped: in-flight', { dedupeKey })
+ return 'skipped'
+ }
+ inFlight.add(dedupeKey)
+ try {
+ const stateVector = Y.encodeStateVector(doc) as Uint8Array
+ const snapshotVector = toBase64(stateVector)
+ const update = Y.encodeStateAsUpdate(doc) as Uint8Array
+ log('uploading', { bytes: update.byteLength, notebookId })
+ const blob = new Blob([update], { type: 'application/octet-stream' })
+ await DefaultService.uploadCollabNotebookSnapshot(notebookId, blob, {
+ headers: { [stateVectorHeader]: snapshotVector },
+ })
+ log('uploaded')
+ return 'uploaded'
+ } finally {
+ inFlight.delete(dedupeKey)
+ }
+}
+
+function toBase64(data: Uint8Array): string {
+ let binary = ''
+ data.forEach((value) => {
+ binary += String.fromCharCode(value)
+ })
+ return btoa(binary)
+}
+
+export function isSnapshotUploadInFlight(key: string): boolean {
+ return inFlight.has(key)
+}
diff --git a/web/modules/notebook/collab/yjs/undo/change-describer.ts b/web/modules/notebook/collab/yjs/undo/change-describer.ts
new file mode 100644
index 00000000..95f31abd
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/undo/change-describer.ts
@@ -0,0 +1,275 @@
+import * as Y from "yjs";
+import {
+ NB_CELL_MAP,
+ NB_CELL_ORDER,
+ NB_DATABASE_ID,
+ NB_TITLE,
+ CELL_KIND,
+ CELL_SOURCE,
+} from "@/modules/notebook/collab/yjs/schema/core/keys";
+import type { HistoryChange } from "@/modules/notebook/components/tabs/utils/history-analyzer";
+import type { YNotebook, YCell } from "@/modules/notebook/collab/yjs/schema/core/types";
+
+export interface DerivedHistoryChange {
+ type: HistoryChange["type"];
+ description: string;
+ details?: HistoryChange["details"];
+}
+
+interface CellSnapshot {
+ id: string;
+ kind: string;
+ source: string;
+ index: number;
+}
+
+interface NotebookHistoryAnalysisContext {
+ root: YNotebook;
+}
+
+const truncate = (value: string, limit = 100): string => {
+ if (value.length <= limit) return value;
+ return `${value.slice(0, limit)}...`;
+};
+
+const toCellSnapshot = (cell: YCell | undefined, root: YNotebook, cellId: string): CellSnapshot | null => {
+ if (!cell) return null;
+ const kind = String(cell.get(CELL_KIND) ?? "sql");
+ const source = (cell.get(CELL_SOURCE) as Y.Text | undefined)?.toString() ?? "";
+ const order = root.get(NB_CELL_ORDER) as Y.Array | undefined;
+ const index = order ? order.toArray().indexOf(cellId) : -1;
+ return {
+ id: cellId,
+ kind,
+ source,
+ index,
+ };
+};
+
+const toSnapshotFromDetachedCell = (cell: YCell | undefined, cellId: string): CellSnapshot | null => {
+ if (!cell) return null;
+ const kind = String(cell.get(CELL_KIND) ?? "sql");
+ const source = (cell.get(CELL_SOURCE) as Y.Text | undefined)?.toString() ?? "";
+ return {
+ id: cellId,
+ kind,
+ source,
+ index: -1,
+ };
+};
+
+const readDeletedIds = (event: Y.YArrayEvent): string[] => {
+ const deleted: string[] = [];
+ event.changes.deleted.forEach((item: Y.Item) => {
+ const content = (item.content as unknown as { getContent?: () => unknown }).getContent?.();
+ if (Array.isArray(content)) {
+ for (const value of content) {
+ if (typeof value === "string") deleted.push(value);
+ }
+ }
+ });
+ return deleted;
+};
+
+export const analyzeStackItemChanges = (
+ changedParentTypes: Map>, Array>>,
+ context: NotebookHistoryAnalysisContext,
+): DerivedHistoryChange[] => {
+ const additions = new Map();
+ const deletions = new Map();
+ const modifications = new Map();
+ const moves = new Map();
+ const otherChanges: DerivedHistoryChange[] = [];
+
+ const root = context.root;
+ const cellMap = root.get(NB_CELL_MAP) as Y.Map | undefined;
+ const order = root.get(NB_CELL_ORDER) as Y.Array | undefined;
+
+ const insertionPositions = new Map();
+ const deletionPositions = new Map();
+
+ changedParentTypes.forEach((events) => {
+ for (const event of events) {
+ if (event instanceof Y.YMapEvent) {
+ if (event.target === root) {
+ event.keys.forEach((change, key) => {
+ if (key === NB_TITLE) {
+ const nextTitle = String(root.get(NB_TITLE) ?? "Untitled Notebook");
+ const prevTitle = typeof change.oldValue === "string" ? change.oldValue : "Untitled Notebook";
+ if (nextTitle !== prevTitle) {
+ otherChanges.push({
+ type: "notebook_modified",
+ description: `Renamed notebook from "${prevTitle}" to "${nextTitle}"`,
+ });
+ }
+ }
+ if (key === NB_DATABASE_ID) {
+ const nextDb = String(root.get(NB_DATABASE_ID) ?? "none");
+ const prevDb =
+ typeof change.oldValue === "string" || typeof change.oldValue === "number"
+ ? String(change.oldValue)
+ : "none";
+ if (nextDb !== prevDb) {
+ otherChanges.push({
+ type: "metadata_changed",
+ description: `Changed database connection from "${prevDb}" to "${nextDb}"`,
+ });
+ }
+ }
+ });
+ } else if (event.target === cellMap) {
+ event.keys.forEach((change, key) => {
+ const cellId = String(key);
+ const currentCell = cellMap?.get(cellId);
+ if (change.action === "add" && currentCell) {
+ const snapshot = toCellSnapshot(currentCell, root, cellId);
+ if (snapshot) {
+ additions.set(cellId, {
+ type: "cell_added",
+ description: `Added ${snapshot.kind} cell`,
+ details: {
+ cellId,
+ cellType: snapshot.kind,
+ cellIndex: snapshot.index,
+ content: truncate(snapshot.source),
+ },
+ });
+ }
+ }
+ if (change.action === "delete") {
+ const previousCell = change.oldValue as YCell | undefined;
+ const snapshot = toSnapshotFromDetachedCell(previousCell, cellId);
+ const description = snapshot
+ ? `Deleted ${snapshot.kind} cell`
+ : "Deleted cell";
+ deletions.set(cellId, {
+ type: "cell_deleted",
+ description,
+ details: snapshot
+ ? {
+ cellId,
+ cellType: snapshot.kind,
+ cellIndex: snapshot.index,
+ content: truncate(snapshot.source),
+ }
+ : { cellId },
+ });
+ }
+ });
+ }
+ }
+
+ if (event instanceof Y.YTextEvent) {
+ const path = event.path;
+ const cellId = typeof path[path.length - 2] === "string" ? (path[path.length - 2] as string) : null;
+ if (!cellId || !cellMap) continue;
+ const cell = cellMap.get(cellId);
+ const snapshot = toCellSnapshot(cell, root, cellId);
+ if (!snapshot) continue;
+ modifications.set(cellId, {
+ type: "cell_modified",
+ description: `Modified ${snapshot.kind} cell content`,
+ details: {
+ cellId,
+ cellType: snapshot.kind,
+ cellIndex: snapshot.index,
+ content: truncate(snapshot.source),
+ },
+ });
+ }
+
+ if (event instanceof Y.YArrayEvent && event.target === order) {
+ const deletedIds = readDeletedIds(event);
+ const delta = event.delta;
+ let cursor = 0;
+ let deletedOffset = 0;
+
+ for (const part of delta) {
+ if (part.retain) {
+ cursor += part.retain;
+ }
+ if (part.delete) {
+ for (let i = 0; i < part.delete; i += 1) {
+ const cellId = deletedIds[deletedOffset++];
+ if (!cellId) continue;
+ deletionPositions.set(cellId, cursor + i);
+ }
+ }
+ if (part.insert) {
+ const values = Array.isArray(part.insert) ? part.insert : [part.insert];
+ values.forEach((value, indexOffset) => {
+ if (typeof value === "string") {
+ insertionPositions.set(value, cursor + indexOffset);
+ }
+ });
+ cursor += values.length;
+ }
+ }
+ }
+ }
+ });
+
+ insertionPositions.forEach((newIndex, cellId) => {
+ if (deletionPositions.has(cellId)) {
+ const oldIndex = deletionPositions.get(cellId)!;
+ if (oldIndex !== newIndex) {
+ const snapshot = cellMap ? toCellSnapshot(cellMap.get(cellId), root, cellId) : null;
+ const kind = snapshot?.kind ?? "cell";
+ moves.set(cellId, {
+ type: "cell_moved",
+ description: `Moved ${kind} cell from position ${oldIndex + 1} to ${newIndex + 1}`,
+ details: {
+ cellId,
+ cellType: kind,
+ oldIndex,
+ newIndex,
+ },
+ });
+ }
+ } else if (additions.has(cellId)) {
+ const change = additions.get(cellId)!;
+ change.details = {
+ ...change.details,
+ cellIndex: newIndex,
+ };
+ }
+ });
+
+ deletionPositions.forEach((position, cellId) => {
+ const existing = deletions.get(cellId);
+ if (existing) {
+ const details = existing.details ?? { cellId };
+ existing.details = {
+ ...details,
+ cellIndex: position,
+ };
+ return;
+ }
+
+ if (insertionPositions.has(cellId)) {
+ return;
+ }
+
+ const currentCell = cellMap?.get(cellId);
+ const snapshot = currentCell ? toSnapshotFromDetachedCell(currentCell, cellId) : null;
+ deletions.set(cellId, {
+ type: "cell_deleted",
+ description: snapshot ? `Deleted ${snapshot.kind} cell` : "Deleted cell",
+ details: {
+ cellId,
+ cellType: snapshot?.kind,
+ cellIndex: position,
+ content: snapshot ? truncate(snapshot.source) : undefined,
+ },
+ });
+ });
+
+ const result: DerivedHistoryChange[] = [];
+ result.push(...otherChanges);
+ additions.forEach((change) => result.push(change));
+ deletions.forEach((change) => result.push(change));
+ moves.forEach((change) => result.push(change));
+ modifications.forEach((change) => result.push(change));
+
+ return result;
+};
diff --git a/web/modules/notebook/collab/yjs/undo/history-registry.ts b/web/modules/notebook/collab/yjs/undo/history-registry.ts
new file mode 100644
index 00000000..5ac60f4e
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/undo/history-registry.ts
@@ -0,0 +1,53 @@
+import type { NotebookUndoHistory } from "./notebook-undo-history";
+
+const registry = new Map();
+const subscribers = new Map void>>();
+
+const emit = (notebookId: string, history: NotebookUndoHistory | null): void => {
+ const listeners = subscribers.get(notebookId);
+ if (!listeners) return;
+ listeners.forEach((listener) => listener(history));
+};
+
+export const attachNotebookUndoHistory = (notebookId: string, history: NotebookUndoHistory): void => {
+ if (!history) return;
+ const current = registry.get(notebookId);
+ if (current === history) return;
+ registry.set(notebookId, history);
+ emit(notebookId, history);
+};
+
+export const detachNotebookUndoHistory = (notebookId: string, history: NotebookUndoHistory | null): void => {
+ const current = registry.get(notebookId);
+ if (current && current !== history) return;
+ if (current) {
+ registry.delete(notebookId);
+ }
+ emit(notebookId, null);
+};
+
+export const getNotebookUndoHistory = (notebookId: string): NotebookUndoHistory | null => {
+ return registry.get(notebookId) ?? null;
+};
+
+export const subscribeNotebookUndoHistory = (
+ notebookId: string,
+ listener: (history: NotebookUndoHistory | null) => void,
+): (() => void) => {
+ let listeners = subscribers.get(notebookId);
+ if (!listeners) {
+ listeners = new Set();
+ subscribers.set(notebookId, listeners);
+ }
+ listeners.add(listener);
+
+ // Emit current history immediately for late subscribers.
+ listener(getNotebookUndoHistory(notebookId));
+
+ return () => {
+ listeners?.delete(listener);
+ if (listeners && listeners.size === 0) {
+ subscribers.delete(notebookId);
+ }
+ };
+};
diff --git a/web/modules/notebook/collab/yjs/undo/history-types.ts b/web/modules/notebook/collab/yjs/undo/history-types.ts
new file mode 100644
index 00000000..2784e856
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/undo/history-types.ts
@@ -0,0 +1,25 @@
+import type { HistoryChange } from "@/modules/notebook/components/tabs/utils/history-analyzer";
+import { NOTEBOOK_HISTORY_ENTRY_ORIGIN, type NotebookHistoryEntryOrigin } from "@/atoms/notebook/constants";
+
+export type HistoryEntryOrigin = NotebookHistoryEntryOrigin;
+
+export interface NotebookHistoryEntry {
+ id: string;
+ timestamp: string;
+ origin: HistoryEntryOrigin;
+ changes: HistoryChange[];
+}
+
+export interface NotebookHistoryState {
+ entries: NotebookHistoryEntry[];
+ cursor: number;
+}
+
+export const createEmptyHistoryState = (): NotebookHistoryState => ({ entries: [], cursor: -1 });
+
+export const mapHistoryOrigin = (origin: any): NotebookHistoryEntryOrigin => {
+ if (origin === NOTEBOOK_HISTORY_ENTRY_ORIGIN.INITIAL) {
+ return NOTEBOOK_HISTORY_ENTRY_ORIGIN.INITIAL;
+ }
+ return NOTEBOOK_HISTORY_ENTRY_ORIGIN.USER;
+};
diff --git a/web/modules/notebook/collab/yjs/undo/notebook-undo-history.ts b/web/modules/notebook/collab/yjs/undo/notebook-undo-history.ts
new file mode 100644
index 00000000..af399929
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/undo/notebook-undo-history.ts
@@ -0,0 +1,261 @@
+import { ulid } from "ulid";
+import * as Y from "yjs";
+import { NOTEBOOK_HISTORY_ENTRY_ORIGIN } from "@/atoms/notebook/constants";
+import type { HistoryChange } from "@/modules/notebook/components/tabs/utils/history-analyzer";
+import type { YNotebook } from "@/modules/notebook/collab/yjs/schema/core/types";
+import { analyzeStackItemChanges, type DerivedHistoryChange } from "./change-describer";
+import {
+ createEmptyHistoryState,
+ mapHistoryOrigin,
+ type HistoryEntryOrigin,
+ type NotebookHistoryEntry,
+ type NotebookHistoryState,
+} from "./history-types";
+
+interface StackItemMeta {
+ id: string;
+ timestamp: string;
+ origin: HistoryEntryOrigin;
+ changes: HistoryChange[];
+}
+
+const STACK_ITEM_HISTORY_META_KEY = "rwc:notebook-history";
+
+const readStackItemMeta = (stackItem: any): StackItemMeta | null => {
+ if (!stackItem) return null;
+ const container = (stackItem as any).meta;
+ if (container instanceof Map) {
+ const value = container.get(STACK_ITEM_HISTORY_META_KEY);
+ if (value && typeof value === "object") {
+ return value as StackItemMeta;
+ }
+ }
+ const fallback = (stackItem as Record)[STACK_ITEM_HISTORY_META_KEY];
+ if (fallback && typeof fallback === "object") {
+ return fallback as StackItemMeta;
+ }
+ return null;
+};
+
+const writeStackItemMeta = (stackItem: any, meta: StackItemMeta): void => {
+ if (!stackItem) return;
+ const container = (stackItem as any).meta;
+ if (container instanceof Map) {
+ container.set(STACK_ITEM_HISTORY_META_KEY, meta);
+ return;
+ }
+ (stackItem as Record)[STACK_ITEM_HISTORY_META_KEY] = meta;
+};
+
+const changeKey = (change: { type: HistoryChange["type"]; description: string; details?: HistoryChange["details"] }): string => {
+ const cellId = change.details?.cellId;
+ if (cellId) return `${change.type}::${cellId}`;
+ if (change.details?.changes && Array.isArray(change.details.changes)) {
+ return `${change.type}::${change.details.changes.join("|")}`;
+ }
+ return `${change.type}::${change.description}`;
+};
+
+const mergeChanges = (
+ existing: HistoryChange[],
+ derive: DerivedHistoryChange[],
+ timestamp: string,
+): HistoryChange[] => {
+ const result = [...existing];
+ const indexMap = new Map(result.map((item, idx) => [changeKey(item), idx] as const));
+
+ for (const change of derive) {
+ const key = changeKey(change);
+ const next: HistoryChange = {
+ type: change.type,
+ timestamp,
+ description: change.description,
+ details: change.details,
+ };
+ if (indexMap.has(key)) {
+ const idx = indexMap.get(key)!;
+ result[idx] = next;
+ } else {
+ indexMap.set(key, result.length);
+ result.push(next);
+ }
+ }
+
+ return result;
+};
+
+export class NotebookUndoHistory {
+ private readonly undoManager: Y.UndoManager;
+
+ private readonly root: YNotebook;
+
+ private readonly listeners = new Set<(state: NotebookHistoryState) => void>();
+
+ private state: NotebookHistoryState = createEmptyHistoryState();
+
+ private readonly entryIndex = new Map();
+
+ constructor(options: { undoManager: Y.UndoManager; root: YNotebook }) {
+ this.undoManager = options.undoManager;
+ this.root = options.root;
+
+ this.handleStackItemAdded = this.handleStackItemAdded.bind(this);
+ this.handleStackItemUpdated = this.handleStackItemUpdated.bind(this);
+ this.handleStackItemPopped = this.handleStackItemPopped.bind(this);
+ this.handleStackCleared = this.handleStackCleared.bind(this);
+
+ this.undoManager.on("stack-item-added", this.handleStackItemAdded);
+ this.undoManager.on("stack-item-updated", this.handleStackItemUpdated);
+ this.undoManager.on("stack-item-popped", this.handleStackItemPopped);
+ this.undoManager.on("stack-cleared", this.handleStackCleared);
+
+ this.recompute();
+ }
+
+ destroy(): void {
+ this.undoManager.off("stack-item-added", this.handleStackItemAdded);
+ this.undoManager.off("stack-item-updated", this.handleStackItemUpdated);
+ this.undoManager.off("stack-item-popped", this.handleStackItemPopped);
+ this.undoManager.off("stack-cleared", this.handleStackCleared);
+ this.listeners.clear();
+ this.entryIndex.clear();
+ }
+
+ onSnapshot(listener: (state: NotebookHistoryState) => void): () => void {
+ this.listeners.add(listener);
+ listener(this.state);
+ return () => {
+ this.listeners.delete(listener);
+ };
+ }
+
+ getSnapshot(): NotebookHistoryState {
+ return this.state;
+ }
+
+ restore(entryId: string): void {
+ if (!entryId) return;
+ const targetIndex = this.entryIndex.get(entryId);
+ if (targetIndex === undefined) return;
+
+ const undoLength = this.undoManager.undoStack.length;
+ const cursor = undoLength - 1;
+
+ if (targetIndex === cursor) {
+ return;
+ }
+
+ if (targetIndex < cursor) {
+ for (let i = 0; i < cursor - targetIndex; i += 1) {
+ if (!this.undoManager.canUndo()) break;
+ this.undoManager.undo();
+ }
+ return;
+ }
+
+ const steps = targetIndex - cursor;
+ for (let i = 0; i < steps; i += 1) {
+ if (!this.undoManager.canRedo()) break;
+ this.undoManager.redo();
+ }
+ }
+
+ private getMeta(stackItem: any, origin: any): StackItemMeta {
+ let meta = readStackItemMeta(stackItem);
+ const mappedOrigin = mapHistoryOrigin(origin);
+ if (!meta) {
+ meta = {
+ id: ulid(),
+ timestamp: new Date().toISOString(),
+ origin: mappedOrigin,
+ changes: [],
+ };
+ writeStackItemMeta(stackItem, meta);
+ return meta;
+ }
+
+ if (!meta.id) {
+ meta.id = ulid();
+ }
+ if (!meta.timestamp) {
+ meta.timestamp = new Date().toISOString();
+ }
+ if (!Array.isArray(meta.changes)) {
+ meta.changes = [];
+ }
+ if (!meta.origin) {
+ meta.origin = mappedOrigin;
+ }
+ writeStackItemMeta(stackItem, meta);
+ return meta;
+ }
+
+ private handleStackItemAdded(event: any): void {
+ const meta = this.getMeta(event.stackItem, event.origin);
+ meta.origin = mapHistoryOrigin(event.origin);
+ meta.timestamp = new Date().toISOString();
+ const derived = analyzeStackItemChanges(event.changedParentTypes, { root: this.root });
+ if (derived.length > 0) {
+ meta.changes = mergeChanges(meta.changes, derived, meta.timestamp);
+ }
+ writeStackItemMeta(event.stackItem, meta);
+ this.recompute();
+ }
+
+ private handleStackItemUpdated(event: any): void {
+ const meta = this.getMeta(event.stackItem, event.origin);
+ meta.origin = mapHistoryOrigin(event.origin);
+ meta.timestamp = new Date().toISOString();
+ const derived = analyzeStackItemChanges(event.changedParentTypes, { root: this.root });
+ if (derived.length > 0) {
+ meta.changes = mergeChanges(meta.changes, derived, meta.timestamp);
+ }
+ writeStackItemMeta(event.stackItem, meta);
+ this.recompute();
+ }
+
+ private handleStackItemPopped(): void {
+ this.recompute();
+ }
+
+ private handleStackCleared(): void {
+ this.recompute();
+ }
+
+ private recompute(): void {
+ const entries: NotebookHistoryEntry[] = [];
+ this.entryIndex.clear();
+
+ const appendEntry = (stackItem: any): void => {
+ const meta = this.getMeta(stackItem, NOTEBOOK_HISTORY_ENTRY_ORIGIN.USER);
+ const entry: NotebookHistoryEntry = {
+ id: meta.id,
+ timestamp: meta.timestamp,
+ origin: meta.origin,
+ changes: meta.changes,
+ };
+ this.entryIndex.set(meta.id, entries.length);
+ entries.push(entry);
+ };
+
+ const undoStack = this.undoManager.undoStack;
+ undoStack.forEach((item) => appendEntry(item));
+
+ const redoStack = this.undoManager.redoStack;
+ for (let i = redoStack.length - 1; i >= 0; i -= 1) {
+ appendEntry(redoStack[i]);
+ }
+
+ const cursor = undoStack.length - 1;
+ this.state = {
+ entries,
+ cursor,
+ };
+
+ this.emit();
+ }
+
+ private emit(): void {
+ this.listeners.forEach((listener) => listener(this.state));
+ }
+}
diff --git a/web/modules/notebook/collab/yjs/utils/get-websocket-endpoint.ts b/web/modules/notebook/collab/yjs/utils/get-websocket-endpoint.ts
new file mode 100644
index 00000000..7168d5e0
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/utils/get-websocket-endpoint.ts
@@ -0,0 +1,24 @@
+import { isDev } from "@/lib/is-dev";
+
+/**
+ * Get WebSocket endpoint for Y.js provider.
+ * Handles development, production, and custom endpoints.
+ */
+export function getWebsocketEndpoint(): string {
+ if (typeof window === 'undefined') return 'ws://localhost:30080';
+ if (isDev) return 'ws://localhost:8020';
+
+ try {
+ const anyWindow = window as any;
+ const alias: string | undefined = anyWindow.APP_ENDPOINT;
+ if (alias) {
+ const u = new URL(alias, window.location.origin);
+ u.protocol = u.protocol === 'https:' ? 'wss:' : 'ws:';
+ return `${u.protocol}//${u.host}`;
+ }
+ } catch (err) {
+ console.warn('Failed to get custom WebSocket endpoint:', err);
+ }
+
+ return 'ws://localhost:8020';
+}
\ No newline at end of file
diff --git a/web/modules/notebook/collab/yjs/utils/notebook-resource.ts b/web/modules/notebook/collab/yjs/utils/notebook-resource.ts
new file mode 100644
index 00000000..2d072902
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/utils/notebook-resource.ts
@@ -0,0 +1,76 @@
+import * as Y from "yjs";
+import { WebsocketProvider } from "y-websocket";
+import {
+ bootstrapDoc,
+ migrateNotebookSchema,
+ getNotebookRootAndMeta,
+ MAINT_ORIGIN,
+} from "@/modules/notebook/collab/yjs/schema";
+import { setupSnapshotUploader } from "@/modules/notebook/collab/yjs/snapshot/snapshot-uploader";
+import { uploadNotebookSnapshot } from "@/modules/notebook/collab/yjs/snapshot/upload";
+import { getWebsocketEndpoint } from "./get-websocket-endpoint";
+import { setupUpdateMeta } from "./update-meta";
+
+export interface NotebookResource {
+ readonly doc: Y.Doc;
+ readonly provider: WebsocketProvider;
+ readonly dispose: () => void;
+}
+
+export function createNotebookResource(
+ notebookId: string,
+ options?: { endpoint?: string }
+): NotebookResource {
+ const endpoint = options?.endpoint ?? getWebsocketEndpoint();
+ const doc = new Y.Doc({ guid: notebookId });
+ const provider = new WebsocketProvider(endpoint, "ws", doc, {
+ connect: true,
+ params: { doc: notebookId },
+ });
+ const disposeUpdateMeta = setupUpdateMeta({ doc, provider });
+
+ const teardownSnapshot = setupSnapshotUploader({
+ provider,
+ doc,
+ notebookId,
+ });
+
+ const handleSync = (isSynced: boolean) => {
+ if (!isSynced) return;
+
+ try {
+ bootstrapDoc(doc, { id: notebookId });
+ migrateNotebookSchema(doc, { autoReconcile: true });
+
+ let shouldUploadInitialSnapshot = false;
+
+ doc.transact(() => {
+ const { schemaMeta } = getNotebookRootAndMeta(doc);
+ const flag = schemaMeta.get("initialSnapshotUploaded");
+ if (flag !== true) {
+ schemaMeta.set("initialSnapshotUploaded", true);
+ shouldUploadInitialSnapshot = true;
+ }
+ }, MAINT_ORIGIN);
+
+ if (shouldUploadInitialSnapshot) {
+ uploadNotebookSnapshot(doc, notebookId).catch((err) => {
+ console.error("[notebook-resource] Failed to upload initial snapshot:", err);
+ });
+ }
+ } catch (err) {
+ console.error("Failed to bootstrap/migrate notebook:", err);
+ }
+ };
+ provider.on("sync", handleSync);
+
+ const dispose = () => {
+ provider.off("sync", handleSync);
+ disposeUpdateMeta();
+ teardownSnapshot();
+ provider.destroy();
+ doc.destroy();
+ };
+
+ return { doc, provider, dispose };
+}
diff --git a/web/modules/notebook/collab/yjs/utils/update-meta.ts b/web/modules/notebook/collab/yjs/utils/update-meta.ts
new file mode 100644
index 00000000..8f11adcf
--- /dev/null
+++ b/web/modules/notebook/collab/yjs/utils/update-meta.ts
@@ -0,0 +1,87 @@
+import * as Y from "yjs";
+import * as encoding from "lib0/encoding";
+import * as decoding from "lib0/decoding";
+import { WebsocketProvider } from "y-websocket";
+
+const YWS_MESSAGE_SYNC = 0;
+const YWS_MESSAGE_UPDATE_META = 101;
+const YJS_UPDATE_STEP = 2;
+
+type UpdateMeta = {
+ clientID: number;
+ clock: number;
+};
+
+export type UpdateMetaOptions = {
+ doc: Y.Doc;
+ provider: WebsocketProvider;
+};
+
+export function setupUpdateMeta({ doc, provider }: UpdateMetaOptions): () => void {
+ const pending: UpdateMeta[] = [];
+ const patchedSockets = new WeakSet();
+
+ const encodeMeta = (meta: UpdateMeta): Uint8Array => {
+ const encoder = encoding.createEncoder();
+ encoding.writeVarUint(encoder, YWS_MESSAGE_UPDATE_META);
+ encoding.writeVarUint(encoder, meta.clientID);
+ encoding.writeVarUint(encoder, meta.clock);
+ return encoding.toUint8Array(encoder);
+ };
+
+ const enqueueMeta = () => {
+ const stateVector = Y.decodeStateVector(Y.encodeStateVector(doc));
+ const clock = stateVector.get(doc.clientID) ?? 0;
+ pending.push({ clientID: doc.clientID, clock });
+ };
+
+ const isUpdateMessage = (data: ArrayBuffer | Uint8Array): boolean => {
+ try {
+ const view = data instanceof Uint8Array ? data : new Uint8Array(data);
+ const decoder = decoding.createDecoder(view);
+ const messageType = decoding.readVarUint(decoder);
+ if (messageType !== YWS_MESSAGE_SYNC) return false;
+ const step = decoding.readVarUint(decoder);
+ return step === YJS_UPDATE_STEP;
+ } catch {
+ return false;
+ }
+ };
+
+ const patchSend = (socket: WebSocket) => {
+ if (patchedSockets.has(socket)) return;
+ patchedSockets.add(socket);
+ const originalSend = socket.send.bind(socket);
+ socket.send = (data: any) => {
+ if (isUpdateMessage(data) && pending.length > 0) {
+ const meta = pending.shift();
+ if (meta) {
+ originalSend(encodeMeta(meta));
+ }
+ }
+ originalSend(data);
+ };
+ };
+
+ const handleUpdate = (_update: Uint8Array, origin: unknown) => {
+ if (origin === provider) return;
+ enqueueMeta();
+ };
+
+ const handleStatus = ({ status }: { status: string }) => {
+ if (status === "connected" && provider?.ws) {
+ patchSend(provider.ws);
+ }
+ };
+
+ doc.on("update", handleUpdate);
+ provider.on("status", handleStatus);
+ if (provider.ws && provider.ws.readyState === WebSocket.OPEN) {
+ patchSend(provider.ws);
+ }
+
+ return () => {
+ doc.off("update", handleUpdate);
+ provider.off("status", handleStatus);
+ };
+}
diff --git a/web/modules/notebook/components/Cell/CellContent.tsx b/web/modules/notebook/components/Cell/CellContent.tsx
new file mode 100644
index 00000000..a516d409
--- /dev/null
+++ b/web/modules/notebook/components/Cell/CellContent.tsx
@@ -0,0 +1,172 @@
+'use client'
+
+import { memo, useCallback, useEffect, useRef } from 'react'
+import toast from 'react-hot-toast'
+import type { editor as MonacoEditorNS } from 'monaco-editor'
+import * as monaco from 'monaco-editor'
+import {
+ CollaborativeMonacoEditor,
+ type CollaborativeMonacoEditorHandle,
+} from '@/modules/notebook/components/CollaborativeMonacoEditor'
+import { CodePreview } from './common/CodePreview'
+import { MarkdownRenderer } from '@/components/common/MarkdownRenderer'
+import { useNotebookState, useNotebookOperations } from '@/atoms/notebook/hooks'
+import { useCellSource } from '@/modules/notebook/hooks/use-cell-snapshot'
+import { useUndoRedoActions } from '@/modules/notebook/hooks/use-notebook-undo'
+// Content is sourced directly from Y.Text via the binding; no atom needed
+import type { CellEditorMode } from '@/atoms/notebook/cell-editor-modes'
+
+interface CellContentProps {
+ cellId: string
+ notebookId: string
+ cellType: 'sql' | 'markdown'
+ editorMode: CellEditorMode
+ onToggleEditorMode: () => void
+}
+
+export const CellContent = memo(function CellContent({
+ cellId,
+ notebookId,
+ cellType,
+ editorMode,
+ onToggleEditorMode,
+}: CellContentProps) {
+ // State wiring
+ const { databaseId } = useNotebookState();
+ const source = useCellSource(cellId);
+ const { executeCell } = useNotebookOperations();
+ const { undo, redo } = useUndoRedoActions();
+
+ // No additional text atom is required; CollaborativeMonacoEditor binds Y.Text directly
+
+ // Editor refs
+ const editorRef = useRef(null)
+ const monacoEditorRef = useRef(null)
+ const actionDisposersRef = useRef void }>>([])
+
+ // Cleanup on unmount
+ useEffect(() => {
+ return () => {
+ // Dispose actions we registered
+ actionDisposersRef.current.forEach((d) => d.dispose())
+ actionDisposersRef.current = []
+ // NEVER dispose the child editor instance here; the child component owns it.
+ monacoEditorRef.current = null
+ }
+ }, [])
+
+ // Handlers
+ const handlePreviewDoubleClick = useCallback(() => {
+ if (editorMode === 'preview') onToggleEditorMode()
+ }, [editorMode, onToggleEditorMode])
+
+ const runSqlOrToggle = useCallback(() => {
+ if (cellType === 'sql') {
+ if (!databaseId) {
+ toast.error('Select a database before executing this cell.')
+ return
+ }
+ executeCell(cellId)
+ } else {
+ onToggleEditorMode()
+ }
+ }, [cellType, databaseId, executeCell, cellId, onToggleEditorMode])
+
+ const handleEditorReady = useCallback(
+ (monacoEditor: MonacoEditorNS.IStandaloneCodeEditor) => {
+ // Clear old actions
+ actionDisposersRef.current.forEach((d) => d.dispose())
+ actionDisposersRef.current = []
+
+ monacoEditorRef.current = monacoEditor
+
+ // Register actions instead of low-level onKeyDown to avoid conflicts
+ const runAction = monacoEditor.addAction({
+ id: 'notebook.run-cell-or-toggle-preview',
+ label: cellType === 'sql' ? 'Run SQL Cell' : 'Toggle Preview',
+ keybindings: [monaco.KeyMod.CtrlCmd | monaco.KeyCode.Enter],
+ run: () => runSqlOrToggle(),
+ })
+
+ const undoAction = monacoEditor.addAction({
+ id: 'notebook.undo-app',
+ label: 'Undo (Notebook)',
+ keybindings: [monaco.KeyMod.CtrlCmd | monaco.KeyCode.KeyZ],
+ run: () => {
+ // App-level history
+ undo()
+ },
+ })
+
+ const redoAction = monacoEditor.addAction({
+ id: 'notebook.redo-app',
+ label: 'Redo (Notebook)',
+ keybindings: [
+ monaco.KeyMod.CtrlCmd | monaco.KeyCode.KeyY,
+ monaco.KeyMod.CtrlCmd | monaco.KeyMod.Shift | monaco.KeyCode.KeyZ,
+ ],
+ run: () => {
+ redo()
+ },
+ })
+
+ actionDisposersRef.current.push(runAction, undoAction, redoAction)
+
+ // Ensure focus when entering editor mode
+ if (editorMode === 'editor') {
+ monacoEditor.focus()
+ requestAnimationFrame(() => monacoEditor.focus())
+ }
+ },
+ [editorMode, cellType, runSqlOrToggle, undo, redo]
+ )
+
+ // Keep focus in editor when switching into editor mode
+ useEffect(() => {
+ if (editorMode !== 'editor') return
+ monacoEditorRef.current?.focus()
+ }, [editorMode])
+
+ // Render
+ if (editorMode === 'editor') {
+ return (
+
+ )
+ }
+
+ return (
+
+ {cellType === 'sql' ? (
+
+
+
+ ) : (
+
+
+
+ )}
+
+ )
+})
diff --git a/web/modules/notebook/components/Cell/CellOutput.tsx b/web/modules/notebook/components/Cell/CellOutput.tsx
new file mode 100644
index 00000000..ef06da80
--- /dev/null
+++ b/web/modules/notebook/components/Cell/CellOutput.tsx
@@ -0,0 +1,78 @@
+"use client";
+
+import { memo } from "react";
+import { Loader2, RefreshCw, AlertTriangle } from "lucide-react";
+import { useCellOutput } from "@/modules/notebook/hooks/use-cell-snapshot";
+import { OutputRenderer } from "../outputs";
+import type { QueryResponse } from "@/api-gen";
+
+interface CellOutputProps {
+ cellId: string;
+ notebookId: string;
+ cellType: "sql" | "markdown";
+}
+
+export const CellOutput = memo(function CellOutput({
+ cellId,
+ cellType,
+}: CellOutputProps) {
+ const output = useCellOutput(cellId);
+
+ if (cellType !== "sql") {
+ return null;
+ }
+
+ const renderBody = () => {
+ // If there's no output object, it means the cell has never been run.
+ if (!output) {
+ return null;
+ }
+
+ if (output.running) {
+ return (
+
+ );
+ }
+
+ if (output.result) {
+ return (
+
+
+ Output
+
+ {output.stale && (
+
+
+ Source changed after this run. Execute again to refresh.
+
+ )}
+
+
+ );
+ }
+
+ if (output.stale) {
+ return (
+
+
+
+
Output stale. Run the cell to refresh results.
+
+
+ );
+ }
+
+ return null;
+ };
+
+ return (
+
+ {renderBody()}
+
+ );
+});
diff --git a/web/modules/notebook/components/Cell/CellToolbar.tsx b/web/modules/notebook/components/Cell/CellToolbar.tsx
new file mode 100644
index 00000000..e97dafa6
--- /dev/null
+++ b/web/modules/notebook/components/Cell/CellToolbar.tsx
@@ -0,0 +1,371 @@
+"use client";
+
+import { useState } from "react";
+import toast from "react-hot-toast";
+import { m } from "motion/react";
+import {
+ Eye,
+ Edit3,
+ Play,
+ MoreHorizontal,
+ Trash2,
+ ChevronDown,
+ ChevronUp,
+ ArrowUp,
+ ArrowDown,
+ HelpCircle,
+ Loader2,
+} from "lucide-react";
+import { Button } from "@/components/ui/button";
+import { Checkbox } from "@/components/ui/checkbox";
+import {
+ Tooltip,
+ TooltipContent,
+ TooltipProvider,
+ TooltipTrigger,
+} from "@/components/ui/tooltip";
+import {
+ Popover,
+ PopoverContent,
+ PopoverTrigger,
+} from "@/components/ui/popover";
+import { Spring } from "@/lib/spring";
+import { cn } from "@/lib/utils";
+import { useNotebookState, useNotebookOperations } from "@/atoms/notebook/hooks";
+import { useCellSnapshot } from "@/modules/notebook/hooks/use-cell-snapshot";
+import {
+ useCellEditorModeForCell,
+ toggleCellEditorMode,
+} from "@/atoms/notebook/cell-editor-modes";
+import { useCellCollapsedForCell, toggleCellCollapsedAction } from "@/atoms/notebook/cell-runtime";
+import PresenceAvatars from "@/modules/notebook/components/common/PresenceAvatars";
+
+interface CellToolbarProps {
+ notebookId: string;
+ cellId: string;
+}
+
+function CellToolbar({ notebookId, cellId }: CellToolbarProps) {
+ const [isMoreMenuOpen, setIsMoreMenuOpen] = useState(false);
+ const [isHovered, setIsHovered] = useState(false);
+
+ // Get state from new hooks
+ const { databaseId, cellOrder, deleteCell, moveCell, updateCellMetadata } = useNotebookState();
+ const { kind: cellKind, metadata, output } = useCellSnapshot(cellId);
+ const { executeCell, clearOutputs, executeCells } = useNotebookOperations();
+ const editorMode = useCellEditorModeForCell(notebookId, cellId);
+ const isCollapsed = useCellCollapsedForCell(cellId);
+
+ // Extract backgroundDDL from metadata
+ const backgroundDDL = metadata.backgroundDDL ?? false;
+
+ // Compute derived values
+ const position = cellOrder.indexOf(cellId);
+ const total = cellOrder.length;
+ const isSql = cellKind === "sql";
+ const isRunning = output?.running ?? false;
+ // Presence avatars now handled by PresenceAvatars component
+
+ const canMoveUp = position > 0;
+ const canMoveDown = position < total - 1;
+
+ // Handlers
+ const toggleEditorMode = () => {
+ toggleCellEditorMode(notebookId, cellId);
+ };
+
+ const toggleCollapsed = () => {
+ toggleCellCollapsedAction(cellId);
+ };
+
+ const handleBackgroundDDLChange = (checked: boolean) => {
+ updateCellMetadata(cellId, { backgroundDDL: checked });
+ };
+
+ const handleMoveUp = () => {
+ if (!canMoveUp) return;
+ moveCell(cellId, 'up');
+ };
+
+ const handleMoveDown = () => {
+ if (!canMoveDown) return;
+ moveCell(cellId, 'down');
+ };
+
+ const handleDelete = () => {
+ deleteCell(cellId);
+ };
+
+ const handleClearOutput = () => {
+ clearOutputs();
+ };
+
+ const handleRunRange = (cellIds: string[]) => {
+ if (cellIds.length === 0) return;
+ void executeCells(cellIds, { stopOnError: true });
+ };
+
+ const handleRunCell = () => {
+ if (!isSql) return;
+ if (!databaseId) {
+ toast.error("Select a database before executing this cell.");
+ return;
+ }
+ executeCell(cellId);
+ };
+
+ if (!isSql) {
+ return (
+
+
+
+
+
+
+
+
+
+
+
+
+ );
+ }
+
+ return (
+
+
setIsHovered(true)}
+ onHoverEnd={() => setIsHovered(false)}
+ className="flex items-center gap-2"
+ >
+
+
+
+
+ {cellKind.toUpperCase()}
+
+
+ {isRunning && (
+
+ )}
+
+
+
+ {/* Background DDL Switch */}
+
+
+
+
+ handleBackgroundDDLChange(checked === true)}
+ />
+
+
+
+
+
+ Execute DDL statements in the background
+
+
+
+
+
+
+
+
+
+
+
+ {position > 0 && (
+
+ )}
+
+ {position < total - 1 && (
+
+ )}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ );
+}
+
+export { CellToolbar };
diff --git a/web/modules/notebook/components/Cell/common/CodePreview.tsx b/web/modules/notebook/components/Cell/common/CodePreview.tsx
new file mode 100644
index 00000000..5cdc9abf
--- /dev/null
+++ b/web/modules/notebook/components/Cell/common/CodePreview.tsx
@@ -0,0 +1,43 @@
+'use client'
+
+import { memo } from 'react'
+import { ShikiCodeBlock } from '@/components/ui/shiki/ShikiCodeBlock'
+
+interface CodePreviewProps {
+ code: string
+ language?: string
+ className?: string
+ onDoubleClick?: () => void
+ loading?: string
+ emptyText?: string
+}
+
+const CodePreview = memo(function CodePreview({
+ code,
+ language = 'sql',
+ className,
+ onDoubleClick,
+ emptyText = 'Empty cell'
+}: CodePreviewProps) {
+ return (
+
+ )
+}, (prevProps, nextProps) => {
+ return (
+ prevProps.code === nextProps.code &&
+ prevProps.language === nextProps.language &&
+ prevProps.className === nextProps.className &&
+ prevProps.loading === nextProps.loading &&
+ prevProps.emptyText === nextProps.emptyText &&
+ prevProps.onDoubleClick === nextProps.onDoubleClick
+ )
+})
+
+export { CodePreview }
\ No newline at end of file
diff --git a/web/modules/notebook/components/Cell/index.tsx b/web/modules/notebook/components/Cell/index.tsx
new file mode 100644
index 00000000..ed55d8a6
--- /dev/null
+++ b/web/modules/notebook/components/Cell/index.tsx
@@ -0,0 +1,63 @@
+"use client";
+
+import { cn } from "@/lib/utils";
+import { useCellKind } from "@/modules/notebook/hooks/use-cell-snapshot";
+import {
+ useCellEditorModeForCell,
+ toggleCellEditorMode,
+} from "@/atoms/notebook/cell-editor-modes";
+import {
+ useCellCollapsedForCell,
+} from "@/atoms/notebook/cell-runtime";
+import { CellContent } from "./CellContent";
+import { CellOutput } from "./CellOutput";
+import { CellToolbar } from "./CellToolbar";
+
+interface CellProps {
+ notebookId: string;
+ cellId: string;
+}
+
+export const Cell = function Cell({
+ notebookId,
+ cellId,
+}: CellProps) {
+ const cellKind = useCellKind(cellId);
+ const editorMode = useCellEditorModeForCell(notebookId, cellId);
+ const isCollapsed = useCellCollapsedForCell(cellId);
+
+ const toggleEditorMode = () => {
+ toggleCellEditorMode(notebookId, cellId);
+ };
+
+ const isSql = cellKind === "sql";
+
+ return (
+
+
+ {!isCollapsed && (
+
+ )}
+
+
+ );
+};
diff --git a/web/modules/notebook/components/CollaborativeMonacoEditor.tsx b/web/modules/notebook/components/CollaborativeMonacoEditor.tsx
new file mode 100644
index 00000000..dcb3e529
--- /dev/null
+++ b/web/modules/notebook/components/CollaborativeMonacoEditor.tsx
@@ -0,0 +1,129 @@
+"use client";
+
+import {
+ forwardRef,
+ memo,
+ useCallback,
+ useEffect,
+ useImperativeHandle,
+ useMemo,
+ useRef,
+ useState,
+} from "react";
+import * as Y from "yjs";
+import type { editor as MonacoEditorNS } from "monaco-editor";
+import { MonacoEditor, type MonacoEditorHandle, type MonacoEditorProps } from "@/components/ui/monaco-editor";
+import { useMonacoAwareness } from "@/modules/notebook/hooks/useMonacoAwareness";
+import { useSchemaCompletion } from "@/modules/notebook/hooks/useSchemaCompletion";
+import { useDatabaseSchemaQuery } from "@/modules/database/queries/databases";
+import { useNotebookSnapshot } from "@/modules/notebook/hooks/use-notebook-snapshot";
+import { NB_CELL_MAP, CELL_SOURCE } from "@/modules/notebook/collab/yjs/schema/core/keys";
+import type { YCell } from "@/modules/notebook/collab/yjs/schema/core/types";
+import { useNotebookRuntime } from "@/modules/notebook/providers/notebook-runtime-context";
+import { getNotebookRoot } from "@/modules/notebook/collab/yjs/schema/access/root";
+import { YjsMonacoBinding } from "../lib/collaborative-monaco/y-monaco-binding";
+
+interface CollaborativeMonacoEditorProps extends Omit {
+ notebookId: string;
+ cellId: string;
+ onReady?: (editor: MonacoEditorNS.IStandaloneCodeEditor) => void;
+}
+
+export type CollaborativeMonacoEditorHandle = MonacoEditorHandle;
+
+/**
+ * Collaborative Monaco Editor wrapper.
+ *
+ * Architecture:
+ * - Uses YjsMonacoBinding for Yjs Y.Text ↔ Monaco Model text synchronization
+ * - Uses useMonacoAwareness for collaborative cursors, selections, and presence UI
+ * - Composes base MonacoEditor component
+ *
+ * Separation of concerns:
+ * - Text sync: YjsMonacoBinding (with USER_ACTION_ORIGIN for undo compatibility)
+ * - Awareness UI: useMonacoAwareness hook (decorations, widgets, Jotai integration)
+ */
+const CollaborativeMonacoEditorInner = (
+ { cellId, onReady, ...rest }: CollaborativeMonacoEditorProps,
+ ref: React.Ref
+) => {
+ const baseRef = useRef(null);
+ const [editor, setEditor] = useState(null);
+ const editorsRef = useRef>(new Set());
+ const bindingRef = useRef(null);
+ const { resource } = useNotebookRuntime();
+
+ // Schema completion
+ const { databaseId } = useNotebookSnapshot();
+ const databaseIdNum = databaseId ? parseInt(String(databaseId), 10) : null;
+ const { data: schemas } = useDatabaseSchemaQuery(databaseIdNum);
+ useSchemaCompletion({ schemas });
+
+ const yText = useMemo(() => {
+ const root = getNotebookRoot(resource.doc);
+ const cm = root?.get(NB_CELL_MAP) as Y.Map | undefined;
+ const cell = cm?.get(cellId);
+ const candidate = cell?.get(CELL_SOURCE);
+ return candidate instanceof Y.Text ? candidate : null;
+ }, [resource.doc, cellId]);
+
+ // Awareness wiring
+ useMonacoAwareness({ editor, cellId });
+
+ const handleReady = useCallback(
+ (ed: MonacoEditorNS.IStandaloneCodeEditor) => {
+ setEditor(ed);
+ onReady?.(ed);
+ },
+ [onReady]
+ );
+
+ useImperativeHandle(
+ ref,
+ () => ({
+ focus: () => baseRef.current?.focus(),
+ getValue: () => baseRef.current?.getValue() ?? "",
+ setValue: (v: string) => baseRef.current?.setValue(v),
+ getEditor: () => editor,
+ }),
+ [editor]
+ );
+
+ // Text synchronization: Yjs Y.Text ↔ Monaco Model
+ // Note: awareness is NOT passed here - it's handled separately by useMonacoAwareness
+ useEffect(() => {
+ const monacoEditor = editor;
+ if (!monacoEditor || !yText) return;
+
+ const model = monacoEditor.getModel();
+ if (!model) return;
+
+ const editors = editorsRef.current;
+
+ editors.add(monacoEditor);
+ const binding = new YjsMonacoBinding({
+ yText,
+ monacoModel: model,
+ editors,
+ });
+ bindingRef.current = binding;
+
+ return () => {
+ binding.destroy();
+ if (bindingRef.current === binding) {
+ bindingRef.current = null;
+ }
+ editors.delete(monacoEditor);
+ };
+ }, [editor, yText]);
+
+ return (
+
+ );
+};
+
+export const CollaborativeMonacoEditor = memo(forwardRef(CollaborativeMonacoEditorInner));
diff --git a/web/modules/notebook/components/DatabaseDrawer.tsx b/web/modules/notebook/components/DatabaseDrawer.tsx
new file mode 100644
index 00000000..ff284d31
--- /dev/null
+++ b/web/modules/notebook/components/DatabaseDrawer.tsx
@@ -0,0 +1,39 @@
+'use client'
+
+import { memo, useState } from 'react'
+import { DatabaseManagement } from '@/modules/database/database-management'
+
+interface DatabaseDrawerProps {
+ children: React.ReactNode
+ onDatabaseChange?: () => void
+}
+
+const DatabaseDrawer = memo(function DatabaseDrawer({
+ children,
+ onDatabaseChange,
+}: DatabaseDrawerProps) {
+ const [isOpen, setIsOpen] = useState(false)
+
+ const handleClose = () => {
+ setIsOpen(false)
+ }
+
+ const handleDatabaseChange = () => {
+ onDatabaseChange?.()
+ }
+
+ return (
+ <>
+ setIsOpen(true)}>
+ {children}
+
+
+ >
+ )
+})
+
+export { DatabaseDrawer }
\ No newline at end of file
diff --git a/web/modules/notebook/components/EditableTitle.tsx b/web/modules/notebook/components/EditableTitle.tsx
new file mode 100644
index 00000000..205018a5
--- /dev/null
+++ b/web/modules/notebook/components/EditableTitle.tsx
@@ -0,0 +1,168 @@
+import { AnimatePresence, m } from "motion/react";
+import { Input } from "@/components/ui/input";
+import { cn } from "@/lib/utils";
+import { memo, useState, useRef, useEffect, useCallback, useMemo } from "react";
+import { Spring } from "@/lib/spring";
+import { useUpdateCollabNotebookMutation } from "@/modules/notebook/queries/notebook-mutations";
+import { useSelfEditing, useCellPresence } from "@/modules/notebook/awareness";
+import { NotebookPresenceAvatars } from "@/modules/notebook/components/common/PresenceAvatars";
+import { useNotebookSnapshot, useNotebookStoreState } from "../hooks/use-notebook-snapshot";
+import { useNotebookDirectoryEntry } from "@/modules/notebook/hooks/useNotebookSelection";
+
+interface EditableTitleProps {
+ notebookId: string;
+ className?: string;
+}
+
+export const EditableTitle = memo(function EditableTitle({
+ notebookId,
+ className,
+}: EditableTitleProps) {
+ const snap = useNotebookSnapshot();
+ const title = snap.title;
+ const notebookState = useNotebookStoreState();
+ const notebookEntry = useNotebookDirectoryEntry(notebookId);
+ const updateNotebook = useUpdateCollabNotebookMutation();
+
+ const [, setMyEditingState] = useSelfEditing();
+
+ const titlePresence = useCellPresence("title");
+
+ const [isEditing, setIsEditing] = useState(false);
+ const [editValue, setEditValue] = useState(title);
+ const inputRef = useRef(null);
+
+ const otherEditors = useMemo(
+ () => titlePresence.filter((p) => !p.isSelf && p.editing?.cellId === "title"),
+ [titlePresence]
+ );
+
+ const isLockedByOther = otherEditors.length > 0;
+
+ const presenceLabel =
+ otherEditors.length > 0
+ ? `${otherEditors
+ .map((p) => p.user.name ?? p.user.id ?? `Client ${p.clientId}`)
+ .join(", ")} is editing...`
+ : null;
+
+ useEffect(() => {
+ setEditValue(title);
+ }, [title]);
+
+ useEffect(() => {
+ if (isEditing && inputRef.current) {
+ inputRef.current.focus();
+ inputRef.current.select();
+ }
+ }, [isEditing]);
+
+ useEffect(() => {
+ const desiredState = isEditing ? { cellId: "title", origin: "title" } : null;
+ setMyEditingState(desiredState);
+
+ return () => {
+ setMyEditingState(null);
+ };
+ }, [isEditing, setMyEditingState]);
+
+ const handleSubmit = useCallback(async () => {
+ // Immediately exit editing state to clear awareness
+ setIsEditing(false);
+
+ const trimmedValue = editValue.trim();
+ if (!trimmedValue) {
+ setEditValue(title);
+ return;
+ }
+
+ if (trimmedValue === title) {
+ return;
+ }
+
+ const previousTitle = title;
+ notebookState.title = trimmedValue;
+
+ if (notebookEntry) {
+ try {
+ await updateNotebook.mutateAsync({
+ entry: notebookEntry,
+ overrides: { title: trimmedValue },
+ successMessage: "Notebook renamed",
+ });
+ } catch {
+ notebookState.title = previousTitle;
+ setEditValue(previousTitle);
+ }
+ }
+ }, [editValue, notebookEntry, title, updateNotebook]);
+
+ const handleKeyDown = useCallback(
+ (e: React.KeyboardEvent) => {
+ if (e.key === "Enter" && !e.nativeEvent.isComposing) {
+ void handleSubmit();
+ } else if (e.key === "Escape" && !e.nativeEvent.isComposing) {
+ setEditValue(title);
+ setIsEditing(false);
+ }
+ },
+ [handleSubmit, title]
+ );
+
+ if (isEditing) {
+ return (
+
+ setEditValue(e.target.value)}
+ onBlur={() => void handleSubmit()}
+ onKeyDown={handleKeyDown}
+ disabled={updateNotebook.isPending}
+ className={cn(
+ "h-8 text-2xl font-semibold bg-transparent border-transparent hover:border-input focus:border-input",
+ className
+ )}
+ placeholder="Notebook title..."
+ />
+
+ );
+ }
+
+ return (
+
+
+
{
+ if (isLockedByOther) return;
+ setIsEditing(true);
+ }}
+ className={cn(
+ "w-full text-2xl font-semibold cursor-pointer hover:bg-muted/50 rounded px-2 py-1 transition-all",
+ isLockedByOther && "cursor-not-allowed opacity-70",
+ className
+ )}
+ title={isLockedByOther ? "Another user is editing the title" : "Click to edit title"}
+ aria-disabled={isLockedByOther || undefined}
+ >
+ {title}
+
+
+
+
+
+ {presenceLabel && (
+
+ {presenceLabel}
+
+ )}
+
+
+
+ );
+});
diff --git a/web/modules/notebook/components/KeyboardShortcuts.tsx b/web/modules/notebook/components/KeyboardShortcuts.tsx
new file mode 100644
index 00000000..e5e7fec4
--- /dev/null
+++ b/web/modules/notebook/components/KeyboardShortcuts.tsx
@@ -0,0 +1,56 @@
+'use client'
+
+import { useCallback } from 'react'
+import { useHotkeys } from 'react-hotkeys-hook'
+import toast from 'react-hot-toast'
+import { useUndoRedoActions } from '@/modules/notebook/hooks/use-notebook-undo'
+import { useNotebookRuntime } from '@/modules/notebook/providers/notebook-runtime-context'
+import { uploadNotebookSnapshot } from '@/modules/notebook/collab/yjs/snapshot/upload'
+import { resolveErrorMessage } from '@/lib/errors'
+import { notebookSaveActions } from '@/atoms/notebook/save-status'
+
+export function KeyboardShortcuts() {
+ const { undo, redo } = useUndoRedoActions()
+ const { resource, notebookId } = useNotebookRuntime()
+
+ const handleSave = useCallback(() => {
+ const doc = resource.doc;
+ notebookSaveActions.markSaving(notebookId)
+ void (async () => {
+ try {
+ const res = await uploadNotebookSnapshot(doc, notebookId)
+ if (res === 'uploaded') {
+ notebookSaveActions.markSaved(notebookId)
+ toast.success('Notebook saved')
+ }
+ } catch (err) {
+ notebookSaveActions.markUnsaved(notebookId)
+ toast.error(resolveErrorMessage(err, 'Failed to save notebook'))
+ }
+ })()
+ }, [resource.doc, notebookId])
+
+ useHotkeys('mod+z', () => {
+ undo()
+ }, {
+ preventDefault: true,
+ enableOnFormTags: false,
+ }, [undo])
+
+ useHotkeys('mod+y, mod+shift+z', () => {
+ redo()
+ }, {
+ preventDefault: true,
+ enableOnFormTags: false,
+ }, [redo])
+
+ useHotkeys('mod+s', () => {
+ handleSave()
+ }, {
+ preventDefault: true,
+ enableOnFormTags: true,
+ }, [handleSave])
+
+
+ return null
+}
diff --git a/web/modules/notebook/components/NotebookBottomPanel.tsx b/web/modules/notebook/components/NotebookBottomPanel.tsx
new file mode 100644
index 00000000..2ae6ac7a
--- /dev/null
+++ b/web/modules/notebook/components/NotebookBottomPanel.tsx
@@ -0,0 +1,176 @@
+import React, { useRef, useCallback, useEffect } from "react";
+import { AnimatePresence, m } from "motion/react";
+import { ChevronUp, ChevronDown } from "lucide-react";
+import * as Tabs from "@radix-ui/react-tabs";
+import { cn } from "@/lib/utils";
+import { Spring } from "@/lib/spring";
+import { useResizable } from "@/hooks/useResizable";
+import {
+ notebookUIActions,
+ useNotebookBottomPanelState,
+ type NotebookBottomPanelTab,
+} from "@/atoms/notebook";
+
+// Tab content components
+import { LatestStreamingGraphTab } from "./tabs/LatestStreamingGraphTab";
+import { HistoryTab } from "./tabs/HistoryTab";
+import { ExportTab } from "./tabs/ExportTab";
+import { ProgressTab } from "./tabs/ProgressTab";
+
+interface NotebookBottomPanelProps {
+ className?: string;
+}
+
+export function NotebookBottomPanel({ className }: NotebookBottomPanelProps) {
+ const bottomPanel = useNotebookBottomPanelState();
+
+ const { width: height, resizeHandleProps } = useResizable({
+ defaultWidth: 300,
+ minWidth: 200,
+ maxWidth: 600,
+ storageKey: "notebook-bottom-panel-height",
+ direction: "vertical",
+ handle: "top",
+ });
+
+ const panelRef = useRef(null);
+
+ const togglePanel = useCallback(() => {
+ notebookUIActions.toggleBottomPanel();
+ }, []);
+
+ const openPanel = useCallback((tab?: NotebookBottomPanelTab) => {
+ notebookUIActions.openBottomPanel(tab);
+ }, []);
+
+ const closePanel = useCallback(() => {
+ notebookUIActions.closeBottomPanel();
+ }, []);
+
+
+ // Handle keyboard shortcuts
+ useEffect(() => {
+ const handleKeyDown = (e: KeyboardEvent) => {
+ if ((e.ctrlKey || e.metaKey) && e.key === "j") {
+ e.preventDefault();
+ togglePanel();
+ }
+ // Escape to close panel
+ if (e.key === "Escape" && bottomPanel.isOpen) {
+ closePanel();
+ }
+ };
+
+ document.addEventListener("keydown", handleKeyDown);
+ return () => {
+ document.removeEventListener("keydown", handleKeyDown);
+ };
+ }, [togglePanel, closePanel, bottomPanel.isOpen]);
+
+ const headerHeight = 42;
+
+ return (
+
+
+ {/* Resize handle - only show when expanded */}
+ {bottomPanel.isOpen && (
+
+ )}
+
+ {/* Header with tabs and controls */}
+
+
+
+ openPanel("history")}
+ value="history"
+ className="px-3 py-1 text-sm rounded-md transition-colors data-[state=active]:bg-muted data-[state=active]:text-foreground text-muted-foreground hover:text-foreground"
+ >
+ History
+
+ openPanel("progress")}
+ value="progress"
+ className="px-3 py-1 text-sm rounded-md transition-colors data-[state=active]:bg-muted data-[state=active]:text-foreground text-muted-foreground hover:text-foreground"
+ >
+ Progress
+
+ openPanel("streaming-graph")}
+ value="streaming-graph"
+ className="px-3 py-1 text-sm rounded-md transition-colors data-[state=active]:bg-muted data-[state=active]:text-foreground text-muted-foreground hover:text-foreground"
+ >
+ Latest Streaming Graph
+
+ openPanel("export")}
+ value="export"
+ className="px-3 py-1 text-sm rounded-md transition-colors data-[state=active]:bg-muted data-[state=active]:text-foreground text-muted-foreground hover:text-foreground"
+ >
+ Export & Import
+
+
+
+
+
+
+
+
+
+ {/* Tab content - only show when expanded */}
+
+ {bottomPanel.isOpen && (
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ )}
+
+
+
+ );
+}
diff --git a/web/modules/notebook/components/NotebookEditor.tsx b/web/modules/notebook/components/NotebookEditor.tsx
new file mode 100644
index 00000000..177963c8
--- /dev/null
+++ b/web/modules/notebook/components/NotebookEditor.tsx
@@ -0,0 +1,138 @@
+"use client";
+
+import { Fragment, memo, useCallback, useEffect } from "react";
+import { m } from "motion/react";
+import { cn } from "@/lib/utils";
+import { useNotebookReady, useNotebookState } from "@/atoms/notebook/hooks";
+import {
+ setCellEditorMode,
+ resetCellEditorModes,
+} from "@/atoms/notebook/cell-editor-modes";
+import { useActiveNotebookId } from "@/modules/notebook/hooks/useNotebookSelection";
+import { EditableTitle } from "./EditableTitle";
+import { Cell } from "./Cell";
+
+const EmptyState = () => (
+
+
No notebook selected
+
Create or select a notebook to get started.
+
+);
+
+interface AddCellButtonProps {
+ notebookId: string;
+ index?: number;
+}
+
+const AddCellButton = memo(function AddCellButton({ notebookId, index }: AddCellButtonProps) {
+ const { addCell } = useNotebookState();
+
+ const switchToEditor = useCallback((cellId: string | null) => {
+ if (!cellId) return;
+ setCellEditorMode(notebookId, cellId, "editor");
+ }, [notebookId]);
+
+ const handleAddSql = useCallback(() => {
+ const newCellId = addCell("sql", "", index);
+ switchToEditor(newCellId);
+ }, [addCell, index, switchToEditor]);
+
+ const handleAddMarkdown = useCallback(() => {
+ const newCellId = addCell("markdown", "", index);
+ switchToEditor(newCellId);
+ }, [addCell, index, switchToEditor]);
+
+ return (
+
+
+
+
+
+
+
+
+
+
+ );
+});
+
+interface NotebookEditorInnerProps {
+ notebookId: string;
+}
+
+const NotebookEditorInner = memo(function NotebookEditorInner({ notebookId }: NotebookEditorInnerProps) {
+ const { cellOrder } = useNotebookState();
+ useEffect(() => {
+ resetCellEditorModes(notebookId);
+ return () => {
+ resetCellEditorModes(notebookId);
+ };
+ }, [notebookId]);
+
+ return (
+
+
+ {cellOrder.map((cellId, index) => (
+
+
+ |
+
+
+
+ ))}
+ {cellOrder.length === 0 && (
+
+
No cells yet
+
Use the buttons above to create your first cell.
+
+ )}
+
+ );
+});
+
+interface NotebookEditorProps {
+ className?: string;
+}
+
+export const NotebookEditor = memo(function NotebookEditor({ className }: NotebookEditorProps) {
+ const isReady = useNotebookReady();
+ const notebookId = useActiveNotebookId();
+
+ const renderContent = () => {
+ if (!notebookId) {
+ return ;
+ }
+
+ return (
+ <>
+
+
+
+
+ >
+ );
+ };
+
+ return (
+
+ {renderContent()}
+
+ );
+});
diff --git a/web/modules/notebook/components/NotebookScopeBadge.tsx b/web/modules/notebook/components/NotebookScopeBadge.tsx
new file mode 100644
index 00000000..20c39a42
--- /dev/null
+++ b/web/modules/notebook/components/NotebookScopeBadge.tsx
@@ -0,0 +1,48 @@
+import { Badge } from "@/components/ui/badge";
+import { cn } from "@/lib/utils";
+import { User, Users } from "lucide-react";
+import type { ComponentType, ReactNode } from "react";
+
+export type NotebookScopeVariant = "personal" | "organization";
+
+const SCOPE_META: Record = {
+ personal: {
+ label: "Personal",
+ className: "",
+ },
+ organization: {
+ label: "Team",
+ className: "bg-sky-50 text-sky-700 border-sky-200 dark:bg-sky-500/10 dark:text-sky-200 dark:border-sky-400/30",
+ },
+};
+
+const SCOPE_ICON: Record> = {
+ personal: User,
+ organization: Users,
+};
+
+interface NotebookScopeBadgeProps {
+ scope?: NotebookScopeVariant;
+ className?: string;
+ children?: ReactNode;
+}
+
+export function NotebookScopeBadge({ scope, className, children }: NotebookScopeBadgeProps) {
+ const Icon = SCOPE_ICON[scope ?? "personal"];
+ const meta = SCOPE_META[scope ?? "personal"];
+
+ return (
+
+
+ {meta.label}
+ {children}
+
+ );
+}
diff --git a/web/modules/notebook/components/NotebookScopeMenu.tsx b/web/modules/notebook/components/NotebookScopeMenu.tsx
new file mode 100644
index 00000000..998d8a97
--- /dev/null
+++ b/web/modules/notebook/components/NotebookScopeMenu.tsx
@@ -0,0 +1,477 @@
+"use client";
+
+import { useCallback, useMemo, useState } from "react";
+import toast from "react-hot-toast";
+import {
+ Building2,
+ User,
+ Trash2,
+ AlertCircle,
+ ArrowRight,
+ Share2,
+ ChevronDown,
+} from "lucide-react";
+import { Button } from "@/components/ui/button";
+import { Popover, PopoverContent, PopoverTrigger } from "@/components/ui/popover";
+import {
+ Dialog,
+ DialogContent,
+ DialogDescription,
+ DialogFooter,
+ DialogHeader,
+ DialogTitle,
+} from "@/components/ui/dialog";
+import { Input } from "@/components/ui/input";
+import { Label } from "@/components/ui/label";
+import { Separator } from "@/components/ui/separator";
+import {
+ Tooltip,
+ TooltipContent,
+ TooltipProvider,
+ TooltipTrigger,
+} from "@/components/ui/tooltip";
+import { NotebookScopeBadge, type NotebookScopeVariant } from "./NotebookScopeBadge";
+import { CollabNotebookTransferRequest } from "@/api-gen";
+import { useTransferCollabNotebookMutation, useDeleteCollabNotebookMutation } from "@/modules/notebook/hooks/useCollabNotebooks";
+import { cn } from "@/lib/utils";
+import { NotebookDirectoryEntry } from "../queries/notebook-directory";
+
+interface NotebookScopeMenuProps {
+ notebookId: string;
+ entry: NotebookDirectoryEntry;
+ className?: string;
+}
+
+const scopeDescription: Record = {
+ personal: { text: "Only you can access this notebook", icon: User },
+ organization: { text: "Shared with your organization", icon: Building2 },
+};
+
+const reasonPersonalToOrg = "Only the notebook owner can share it with the organization.";
+const reasonOrgToPersonal =
+ "Only the notebook's creator can move it back to personal space.";
+
+export function NotebookScopeMenu({ notebookId, entry, className }: NotebookScopeMenuProps) {
+ const [popoverOpen, setPopoverOpen] = useState(false);
+ const [dialogOpen, setDialogOpen] = useState(false);
+ const [ownerCandidate, setOwnerCandidate] = useState("");
+ const [deleteDialogOpen, setDeleteDialogOpen] = useState(false);
+ const [deleteStage, setDeleteStage] = useState<0 | 1>(0);
+ const [deleteConfirmation, setDeleteConfirmation] = useState("");
+ // const userContext = useAuthState();
+ // const currentUserId = useMemo(() => {
+ // if (!userContext?.userId) return null;
+ // const maybe = Number(userContext.userId);
+ // return Number.isFinite(maybe) ? maybe : null;
+ // }, [userContext?.userId]);
+ // TODO: replace with real user ID from auth context
+ const currentUserId = 1;
+
+ const transferMutation = useTransferCollabNotebookMutation();
+ const deleteMutation = useDeleteCollabNotebookMutation();
+
+ const scope = entry.scope as NotebookScopeVariant;
+
+ const canPromoteToOrganization = useMemo(() => {
+ if (scope !== "personal") return false;
+ if (currentUserId === null) return false;
+ return entry.ownerUserID === currentUserId || entry.createdByUserID === currentUserId;
+ }, [currentUserId, entry.createdByUserID, entry.ownerUserID, scope]);
+
+ const canMoveToPersonal = useMemo(() => {
+ if (scope !== "organization") return false;
+ if (currentUserId === null) return false;
+ return entry.createdByUserID === currentUserId;
+ }, [currentUserId, entry.createdByUserID, scope]);
+
+ const handlePromote = useCallback(async () => {
+ if (!canPromoteToOrganization) return;
+ try {
+ await transferMutation.mutateAsync({
+ notebookId,
+ payload: {
+ targetScope: CollabNotebookTransferRequest.targetScope.ORGANIZATION,
+ },
+ successMessage: "Notebook is now shared with your organization",
+ });
+ setPopoverOpen(false);
+ } catch {
+ /* handled by mutation */
+ }
+ }, [canPromoteToOrganization, notebookId, transferMutation]);
+
+ const handleMakePersonal = useCallback(async () => {
+ if (!canMoveToPersonal) return;
+ const trimmed = ownerCandidate.trim();
+ let ownerId: number | undefined;
+ if (trimmed.length > 0) {
+ const parsed = Number(trimmed);
+ if (!Number.isFinite(parsed)) {
+ toast.error("Owner ID must be a number");
+ return;
+ }
+ ownerId = parsed;
+ } else if (currentUserId !== null) {
+ ownerId = currentUserId;
+ }
+
+ try {
+ await transferMutation.mutateAsync({
+ notebookId,
+ payload: {
+ targetScope: CollabNotebookTransferRequest.targetScope.PERSONAL,
+ ownerUserID: ownerId,
+ },
+ successMessage: ownerId && ownerId !== currentUserId
+ ? "Notebook transferred to the selected teammate"
+ : "Notebook moved to your personal workspace",
+ });
+ setDialogOpen(false);
+ setPopoverOpen(false);
+ setOwnerCandidate("");
+ } catch {
+ /* handled by mutation */
+ }
+ }, [canMoveToPersonal, currentUserId, notebookId, ownerCandidate, transferMutation]);
+
+ const resetDeleteDialog = useCallback(() => {
+ setDeleteStage(0);
+ setDeleteConfirmation("");
+ }, []);
+
+ const openDeleteDialog = useCallback(() => {
+ resetDeleteDialog();
+ setDeleteDialogOpen(true);
+ setPopoverOpen(false);
+ }, [resetDeleteDialog]);
+
+ const closeDeleteDialog = useCallback(() => {
+ setDeleteDialogOpen(false);
+ resetDeleteDialog();
+ }, [resetDeleteDialog]);
+
+ const handleDeleteNotebook = useCallback(async () => {
+ try {
+ await deleteMutation.mutateAsync({ notebookId });
+ closeDeleteDialog();
+ } catch {
+ /* handled by mutation */
+ }
+ }, [deleteMutation, notebookId, resetDeleteDialog]);
+
+ const moveToPersonalDisabledReason = useMemo(() => {
+ if (scope !== "organization") return "Already personal";
+ if (currentUserId === null) return "Sign in to change scope.";
+ if (!canMoveToPersonal) return reasonOrgToPersonal;
+ return null;
+ }, [canMoveToPersonal, currentUserId, scope]);
+
+ const deleteConfirmationMatches = useMemo(
+ () => deleteConfirmation.trim() === entry.title.trim(),
+ [deleteConfirmation, entry.title],
+ );
+
+ const promoteDisabledReason = useMemo(() => {
+ if (scope !== "personal") return "Already shared";
+ if (currentUserId === null) return "Sign in to change scope.";
+ if (!canPromoteToOrganization) return reasonPersonalToOrg;
+ return null;
+ }, [canPromoteToOrganization, currentUserId, scope]);
+
+ const ScopeIcon = scopeDescription[scope].icon;
+
+ return (
+
+
+
+
+
+
+ {/* Header Section */}
+
+
+
+
+
+
+
+
+ {scope === "personal" ? "Personal" : "Organization"}
+
+
+
+
+ {scopeDescription[scope].text}
+
+
+
+
+
+
+ {/* Actions Section */}
+
+
+ {/* Share to Organization */}
+
+
+
+
+ {promoteDisabledReason && (
+
+ {promoteDisabledReason}
+
+ )}
+
+
+ {/* Move to Personal */}
+
+
+
+
+ {moveToPersonalDisabledReason && (
+
+ {moveToPersonalDisabledReason}
+
+ )}
+
+
+
+ {/* Danger Zone */}
+
+
+
+
+
+
+
+
+
+
+ );
+}
diff --git a/web/modules/notebook/components/NotebookSidebar/SchemaBrowser/SchemaSearch.tsx b/web/modules/notebook/components/NotebookSidebar/SchemaBrowser/SchemaSearch.tsx
new file mode 100644
index 00000000..a4b8ebf2
--- /dev/null
+++ b/web/modules/notebook/components/NotebookSidebar/SchemaBrowser/SchemaSearch.tsx
@@ -0,0 +1,56 @@
+"use client"
+
+import { memo, useCallback } from "react"
+import { Search, X } from "lucide-react"
+import { m, AnimatePresence } from "motion/react"
+import { Input } from "@/components/ui/input"
+import { cn } from "@/lib/utils"
+import { Spring } from "@/lib/spring"
+import { notebookUIActions, useNotebookSidebarState } from "@/atoms/notebook/ui"
+
+interface SchemaSearchProps {
+ className?: string
+}
+
+export const SchemaSearch = memo(function SchemaSearch({
+ className,
+}: SchemaSearchProps) {
+ const { schemaSearchQuery } = useNotebookSidebarState()
+
+ const handleChange = useCallback(
+ (e: React.ChangeEvent) => {
+ notebookUIActions.setSchemaSearchQuery(e.target.value)
+ },
+ []
+ )
+
+ const handleClear = useCallback(() => {
+ notebookUIActions.setSchemaSearchQuery("")
+ }, [])
+
+ return (
+
+
+
+
+ {schemaSearchQuery && (
+
+
+
+ )}
+
+
+ )
+})
diff --git a/web/modules/notebook/components/NotebookSidebar/SchemaBrowser/SchemaTree.tsx b/web/modules/notebook/components/NotebookSidebar/SchemaBrowser/SchemaTree.tsx
new file mode 100644
index 00000000..bbc3d754
--- /dev/null
+++ b/web/modules/notebook/components/NotebookSidebar/SchemaBrowser/SchemaTree.tsx
@@ -0,0 +1,141 @@
+"use client"
+
+import { memo, useMemo } from "react"
+import { m } from "motion/react"
+import type { Schema } from "@/api-gen/models/Schema"
+import { Relation } from "@/api-gen/models/Relation"
+import { Spring } from "@/lib/spring"
+import { SchemaTreeNode } from "./SchemaTreeNode"
+import type { SchemaTreeNodeData, SchemaNodeType, SchemaTreeProps } from "./types"
+
+// Map API relation type to our node type
+function getRelationType(type: Relation.type): SchemaNodeType {
+ switch (type) {
+ case Relation.type.TABLE:
+ return "table"
+ case Relation.type.SOURCE:
+ return "source"
+ case Relation.type.SINK:
+ return "sink"
+ case Relation.type.MATERIALIZED_VIEW:
+ return "materializedView"
+ case Relation.type.SYSTEM_TABLE:
+ return "systemTable"
+ default:
+ return "table"
+ }
+}
+
+function buildTreeData(schemas: Schema[]): SchemaTreeNodeData[] {
+ // Sort schemas: 'public' first, system schemas last, others alphabetically
+ const sortedSchemas = [...schemas].sort((a, b) => {
+ if (a.name === "public") return -1
+ if (b.name === "public") return 1
+ const aIsSystem = a.name.startsWith("pg_") || a.name === "information_schema" || a.name === "rw_catalog"
+ const bIsSystem = b.name.startsWith("pg_") || b.name === "information_schema" || b.name === "rw_catalog"
+ if (aIsSystem && !bIsSystem) return 1
+ if (!aIsSystem && bIsSystem) return -1
+ return a.name.localeCompare(b.name)
+ })
+
+ return sortedSchemas.map((schema) => ({
+ id: `schema:${schema.name}`,
+ name: schema.name,
+ type: "schema" as const,
+ fullPath: schema.name,
+ children: [...schema.relations]
+ .sort((a, b) => a.name.localeCompare(b.name))
+ .map((relation) => ({
+ id: `relation:${schema.name}.${relation.name}`,
+ name: relation.name,
+ type: getRelationType(relation.type),
+ fullPath: `${schema.name}.${relation.name}`,
+ children: relation.columns
+ .filter((col) => !col.isHidden)
+ .map((column) => ({
+ id: `column:${schema.name}.${relation.name}.${column.name}`,
+ name: column.name,
+ type: "column" as const,
+ dataType: column.type,
+ isPrimaryKey: column.isPrimaryKey,
+ isHidden: column.isHidden,
+ fullPath: column.name,
+ })),
+ })),
+ }))
+}
+
+function filterTree(
+ nodes: SchemaTreeNodeData[],
+ query: string
+): SchemaTreeNodeData[] {
+ if (!query.trim()) return nodes
+
+ const lowerQuery = query.toLowerCase()
+ const result: SchemaTreeNodeData[] = []
+
+ for (const node of nodes) {
+ const nameMatches = node.name.toLowerCase().includes(lowerQuery)
+ const filteredChildren = node.children
+ ? filterTree(node.children, query)
+ : undefined
+
+ if (nameMatches || (filteredChildren && filteredChildren.length > 0)) {
+ result.push({
+ ...node,
+ children:
+ filteredChildren && filteredChildren.length > 0
+ ? filteredChildren
+ : node.children,
+ })
+ }
+ }
+
+ return result
+}
+
+export const SchemaTree = memo(function SchemaTree({
+ schemas,
+ searchQuery,
+ onCopyName,
+ onInsertIntoEditor,
+}: SchemaTreeProps) {
+ const treeData = useMemo(() => buildTreeData(schemas), [schemas])
+ const filteredData = useMemo(
+ () => filterTree(treeData, searchQuery),
+ [treeData, searchQuery]
+ )
+
+ if (filteredData.length === 0) {
+ return (
+
+
+ {searchQuery ? "No matching tables or columns" : "No schemas found"}
+
+
+ )
+ }
+
+ return (
+
+ {filteredData.map((node) => (
+
+ ))}
+
+ )
+})
diff --git a/web/modules/notebook/components/NotebookSidebar/SchemaBrowser/SchemaTreeNode.tsx b/web/modules/notebook/components/NotebookSidebar/SchemaBrowser/SchemaTreeNode.tsx
new file mode 100644
index 00000000..c636a5fa
--- /dev/null
+++ b/web/modules/notebook/components/NotebookSidebar/SchemaBrowser/SchemaTreeNode.tsx
@@ -0,0 +1,177 @@
+"use client"
+
+import { memo, useCallback } from "react"
+import { m, AnimatePresence } from "motion/react"
+import {
+ ChevronRight,
+ Table2,
+ Database,
+ Columns3,
+ Key,
+ ArrowRightLeft,
+ ArrowDownToLine,
+ Layers,
+ Server,
+ Copy,
+ Eye,
+} from "lucide-react"
+import { cn } from "@/lib/utils"
+import { Spring } from "@/lib/spring"
+import { notebookUIActions, useNotebookSidebarState } from "@/atoms/notebook/ui"
+import type { SchemaTreeNodeData, SchemaNodeType } from "./types"
+import {
+ Tooltip,
+ TooltipContent,
+ TooltipProvider,
+ TooltipTrigger,
+} from "@/components/ui/tooltip"
+
+const NODE_ICONS: Record = {
+ schema: Database,
+ table: Table2,
+ source: ArrowRightLeft,
+ sink: ArrowDownToLine,
+ materializedView: Layers,
+ systemTable: Server,
+ view: Eye,
+ column: Columns3,
+}
+
+const NODE_COLORS: Record = {
+ schema: "text-blue-500",
+ table: "text-emerald-500",
+ source: "text-orange-500",
+ sink: "text-purple-500",
+ materializedView: "text-cyan-500",
+ systemTable: "text-gray-500",
+ view: "text-indigo-500",
+ column: "text-muted-foreground",
+}
+
+interface SchemaTreeNodeProps {
+ node: SchemaTreeNodeData
+ depth: number
+ onCopyName: (name: string) => void
+ onInsertIntoEditor?: (text: string) => void
+}
+
+export const SchemaTreeNode = memo(function SchemaTreeNode({
+ node,
+ depth,
+ onCopyName,
+ onInsertIntoEditor,
+}: SchemaTreeNodeProps) {
+ const { schemaExpandedNodes } = useNotebookSidebarState()
+ const isExpanded = schemaExpandedNodes[node.id] ?? false
+ const hasChildren = node.children && node.children.length > 0
+ const Icon = NODE_ICONS[node.type]
+ const iconColor = NODE_COLORS[node.type]
+
+ const handleToggle = useCallback(() => {
+ if (hasChildren) {
+ notebookUIActions.toggleSchemaNode(node.id)
+ }
+ }, [hasChildren, node.id])
+
+ const handleCopy = useCallback(
+ (e: React.MouseEvent) => {
+ e.stopPropagation()
+ onCopyName(node.fullPath)
+ },
+ [node.fullPath, onCopyName]
+ )
+
+ const handleDoubleClick = useCallback(() => {
+ onInsertIntoEditor?.(node.fullPath)
+ }, [node.fullPath, onInsertIntoEditor])
+
+ return (
+
+
+ {/* Expand/collapse chevron */}
+ {hasChildren ? (
+
+
+
+ ) : (
+
+ )}
+
+ {/* Icon */}
+
+
+ {/* Name */}
+
+ {node.name}
+ {node.isPrimaryKey && (
+
+ )}
+
+
+ {/* Data type for columns */}
+ {node.dataType && (
+
+ {node.dataType}
+
+ )}
+
+ {/* Copy button */}
+
+
+
+
+
+
+
+
+ Copy name
+
+
+
+
+
+ {/* Children */}
+
+ {isExpanded && hasChildren && (
+
+ {node.children!.map((child) => (
+
+ ))}
+
+ )}
+
+
+ )
+})
diff --git a/web/modules/notebook/components/NotebookSidebar/SchemaBrowser/index.tsx b/web/modules/notebook/components/NotebookSidebar/SchemaBrowser/index.tsx
new file mode 100644
index 00000000..f8dbd678
--- /dev/null
+++ b/web/modules/notebook/components/NotebookSidebar/SchemaBrowser/index.tsx
@@ -0,0 +1,200 @@
+"use client"
+
+import { memo, useCallback } from "react"
+import { m, AnimatePresence } from "motion/react"
+import { Database, Loader2, RefreshCcw, AlertCircle, FileText } from "lucide-react"
+import toast from "react-hot-toast"
+import { cn } from "@/lib/utils"
+import { Button } from "@/components/ui/button"
+import { ScrollArea } from "@/components/ui/scroll-area"
+import { useDatabaseSchemaQuery } from "@/modules/database/queries/databases"
+import { useNotebookSnapshot } from "@/modules/notebook/hooks/use-notebook-snapshot"
+import { useNotebookSidebarState } from "@/atoms/notebook/ui"
+import { useActiveNotebookId } from "@/modules/notebook/hooks/useNotebookSelection"
+import { SchemaSearch } from "./SchemaSearch"
+import { SchemaTree } from "./SchemaTree"
+
+interface SchemaBrowserProps {
+ className?: string
+}
+
+export const SchemaBrowser = memo(function SchemaBrowser({
+ className,
+}: SchemaBrowserProps) {
+ const activeNotebookId = useActiveNotebookId()
+
+ // If no notebook is selected, show a placeholder
+ if (!activeNotebookId) {
+ return
+ }
+
+ return
+})
+
+/**
+ * Placeholder when no notebook is selected
+ */
+function SchemaBrowserNoNotebook({ className }: { className?: string }) {
+ return (
+
+
+
+
+
+ No notebook selected
+
+
+ Select a notebook to browse its database schema
+
+
+ )
+}
+
+/**
+ * Schema browser when a notebook is selected (has NotebookProvider context)
+ */
+const SchemaBrowserReady = memo(function SchemaBrowserReady({
+ className,
+}: {
+ className?: string
+}) {
+ const { databaseId } = useNotebookSnapshot()
+ const { schemaSearchQuery } = useNotebookSidebarState()
+
+ const databaseIdNum = databaseId ? parseInt(String(databaseId), 10) : null
+ const {
+ data: schemas,
+ isLoading,
+ isError,
+ refetch,
+ isFetching,
+ } = useDatabaseSchemaQuery(databaseIdNum)
+
+ const handleCopyName = useCallback(async (name: string) => {
+ try {
+ await navigator.clipboard.writeText(name)
+ toast.success(`Copied: ${name}`)
+ } catch {
+ toast.error("Failed to copy")
+ }
+ }, [])
+
+ // No database selected
+ if (!databaseId) {
+ return (
+
+
+
+
+
+ No database selected
+
+
+ Select a database from the toolbar to browse schemas
+
+
+ )
+ }
+
+ // Loading state
+ if (isLoading) {
+ return (
+
+
+ Loading schemas...
+
+ )
+ }
+
+ // Error state
+ if (isError) {
+ return (
+
+
+
+ Failed to load schemas
+
+
+
+ )
+ }
+
+ return (
+
+ {/* Search */}
+
+
+
+
+ {/* Refresh indicator */}
+
+ {isFetching && !isLoading && (
+
+
+
+ Refreshing...
+
+
+ )}
+
+
+ {/* Tree */}
+
+
+
+
+
+
+ )
+})
diff --git a/web/modules/notebook/components/NotebookSidebar/SchemaBrowser/types.ts b/web/modules/notebook/components/NotebookSidebar/SchemaBrowser/types.ts
new file mode 100644
index 00000000..76220be6
--- /dev/null
+++ b/web/modules/notebook/components/NotebookSidebar/SchemaBrowser/types.ts
@@ -0,0 +1,29 @@
+import type { Schema } from "@/api-gen/models/Schema"
+
+export type SchemaNodeType =
+ | "schema"
+ | "table"
+ | "source"
+ | "sink"
+ | "materializedView"
+ | "systemTable"
+ | "view"
+ | "column"
+
+export interface SchemaTreeNodeData {
+ id: string // "schema:public" | "relation:public.users" | "column:public.users.id"
+ name: string
+ type: SchemaNodeType
+ dataType?: string // For columns
+ isPrimaryKey?: boolean
+ isHidden?: boolean
+ children?: SchemaTreeNodeData[]
+ fullPath: string // For copy: "public.users" or column name
+}
+
+export interface SchemaTreeProps {
+ schemas: Schema[]
+ searchQuery: string
+ onCopyName: (name: string) => void
+ onInsertIntoEditor?: (text: string) => void
+}
diff --git a/web/modules/notebook/components/NotebookSidebar/SidebarTabs.tsx b/web/modules/notebook/components/NotebookSidebar/SidebarTabs.tsx
new file mode 100644
index 00000000..d130824a
--- /dev/null
+++ b/web/modules/notebook/components/NotebookSidebar/SidebarTabs.tsx
@@ -0,0 +1,65 @@
+"use client"
+
+import { memo } from "react"
+import * as Tabs from "@radix-ui/react-tabs"
+import { m } from "motion/react"
+import { FileText, Database } from "lucide-react"
+import { cn } from "@/lib/utils"
+import { Spring } from "@/lib/spring"
+import {
+ notebookUIActions,
+ useNotebookSidebarState,
+ type NotebookSidebarTab,
+} from "@/atoms/notebook/ui"
+
+interface SidebarTabsProps {
+ className?: string
+}
+
+const tabs: { value: NotebookSidebarTab; label: string; icon: typeof FileText }[] = [
+ { value: "notebooks", label: "Notebooks", icon: FileText },
+ { value: "schema", label: "Schema", icon: Database },
+]
+
+export const SidebarTabs = memo(function SidebarTabs({
+ className,
+}: SidebarTabsProps) {
+ const { activeTab } = useNotebookSidebarState()
+
+ return (
+
+
+ {tabs.map((tab) => {
+ const Icon = tab.icon
+ const isActive = activeTab === tab.value
+
+ return (
+ notebookUIActions.setSidebarTab(tab.value)}
+ className={cn(
+ "relative flex-1 flex items-center justify-center gap-1.5 px-3 py-1.5 text-xs font-medium rounded-md transition-colors",
+ isActive
+ ? "text-foreground"
+ : "text-muted-foreground hover:text-foreground"
+ )}
+ >
+ {isActive && (
+
+ )}
+
+
+ {tab.label}
+
+
+ )
+ })}
+
+
+ )
+})
diff --git a/web/modules/notebook/components/NotebookSidebar/index.tsx b/web/modules/notebook/components/NotebookSidebar/index.tsx
new file mode 100644
index 00000000..8731b226
--- /dev/null
+++ b/web/modules/notebook/components/NotebookSidebar/index.tsx
@@ -0,0 +1,411 @@
+"use client";
+
+import { memo, useCallback, useMemo, useRef, useLayoutEffect } from "react";
+import { FileText, Loader2, Plus, RefreshCcw, Sparkles } from "lucide-react";
+import {
+ AnimatePresence,
+ m,
+ useMotionValue,
+ useSpring,
+ useTransform,
+} from "motion/react";
+import { cn } from "@/lib/utils";
+import { Button } from "@/components/ui/button";
+import { ScrollArea } from "@/components/ui/scroll-area";
+import { useResizable } from "@/hooks/useResizable";
+import { Spring } from "@/lib/spring";
+import { useCreateCollabNotebookMutation } from "@/modules/notebook/queries/notebook-mutations";
+import { useActiveNotebookId, useSelectNotebook } from "@/modules/notebook/hooks/useNotebookSelection";
+import { NotebookListItem } from "./item";
+import { useNotebookDirectory } from "../../queries/notebook-directory";
+import { SidebarTabs } from "./SidebarTabs";
+import { SchemaBrowser } from "./SchemaBrowser";
+import { useNotebookSidebarState } from "@/atoms/notebook/ui";
+
+interface NotebookSidebarProps {
+ className?: string;
+}
+
+const INDICATOR_H = 40;
+
+export const NotebookSidebar = memo(function NotebookSidebar({
+ className,
+}: NotebookSidebarProps) {
+ const { activeTab } = useNotebookSidebarState();
+ const { entries: directory, refetch, isFetching } = useNotebookDirectory();
+ const activeNotebookId = useActiveNotebookId();
+ const createNotebookMutation = useCreateCollabNotebookMutation();
+ const selectNotebook = useSelectNotebook();
+ const handleSelectNotebook = useCallback((notebookId: string) => {
+ selectNotebook(notebookId, { history: "push" });
+ }, [selectNotebook]);
+
+ // refs
+ const itemRefs = useRef