diff --git a/api/v1.yaml b/api/v1.yaml index 46f6722b..b4c1e42b 100644 --- a/api/v1.yaml +++ b/api/v1.yaml @@ -805,6 +805,171 @@ paths: schema: $ref: "#/components/schemas/MetricMatrix" + /collab/notebooks: + get: + summary: List collaborative notebooks + description: Retrieve a list of collaborative notebooks + operationId: listCollabNotebooks + security: + - BearerAuth: [] + responses: + "200": + description: Successfully retrieved collaborative notebook list + content: + application/json: + schema: + type: array + items: + $ref: "#/components/schemas/CollabNotebook" + post: + summary: Create a collaborative notebook + description: Create a new collaborative notebook + operationId: createCollabNotebook + security: + - BearerAuth: [] + requestBody: + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/CollabNotebookCreateRequest" + responses: + "201": + description: Successfully created collaborative notebook + content: + application/json: + schema: + $ref: "#/components/schemas/CollabNotebook" + + /collab/notebooks/{notebookID}: + get: + summary: Get collaborative notebook details + description: Retrieve details of a specific collaborative notebook + operationId: getCollabNotebook + security: + - BearerAuth: [] + parameters: + - name: notebookID + in: path + required: true + schema: + type: string + responses: + "200": + description: Successfully retrieved collaborative notebook + content: + application/json: + schema: + $ref: "#/components/schemas/CollabNotebook" + + put: + summary: Update a collaborative notebook + description: Update details of a specific collaborative notebook + operationId: updateCollabNotebook + security: + - BearerAuth: [] + parameters: + - name: notebookID + in: path + required: true + schema: + type: string + requestBody: + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/CollabNotebook" + responses: + "200": + description: Successfully upd-ated collaborative notebook + content: + application/json: + schema: + $ref: "#/components/schemas/CollabNotebook" + + delete: + summary: Delete a collaborative notebook + description: Permanently delete a collaborative notebook + operationId: deleteCollabNotebook + security: + - BearerAuth: [] + parameters: + - name: notebookID + in: path + required: true + schema: + type: string + responses: + "204": + description: Notebook deleted successfully + "403": + description: Operation not permitted + "404": + description: Notebook not found + /collab/notebooks/{notebookID}/transfer: + post: + summary: Transfer collaborative notebook scope + description: Move a collaborative notebook between personal and organization scopes. + operationId: transferCollabNotebook + security: + - BearerAuth: [] + parameters: + - name: notebookID + in: path + required: true + schema: + type: string + requestBody: + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/CollabNotebookTransferRequest" + responses: + "200": + description: Successfully transferred collaborative notebook + content: + application/json: + schema: + $ref: "#/components/schemas/CollabNotebook" + "400": + description: Invalid transfer request + "403": + description: Operation not permitted + "404": + description: Notebook not found + + /collab/notebooks/{notebookID}/snapshot: + put: + summary: Upload a collaborative notebook snapshot + description: Persist the latest Yjs snapshot for a collaborative notebook. + operationId: uploadCollabNotebookSnapshot + security: + - BearerAuth: [] + parameters: + - name: notebookID + in: path + required: true + schema: + type: string + description: Unique identifier of the collaborative notebook. + requestBody: + required: true + content: + application/octet-stream: + schema: + type: string + format: binary + examples: + snapshot: + summary: Encoded Yjs snapshot payload + value: "" + responses: + "204": + description: Snapshot stored successfully. + + components: securitySchemes: BearerAuth: @@ -1648,6 +1813,65 @@ components: type: integer format: int32 + CollabNotebook: + type: object + required: [notebookID, title, createdAt, updatedAt] + properties: + notebookID: + type: string + description: Unique identifier of the collaborative notebook + title: + type: string + description: Title of the collaborative notebook + scope: + type: string + enum: [personal, organization] + description: Ownership scope of the notebook + organizationID: + type: integer + format: int32 + description: Organization that currently owns the notebook + ownerUserID: + type: integer + format: int32 + description: User that owns the notebook when scope is personal + createdByUserID: + type: integer + format: int32 + description: User that originally created the notebook + createdAt: + type: string + format: date-time + description: Creation timestamp + updatedAt: + type: string + format: date-time + description: Last update timestamp + + CollabNotebookCreateRequest: + type: object + required: [title] + properties: + title: + type: string + description: Title of the collaborative notebook + scope: + type: string + enum: [personal, organization] + description: Ownership scope of the notebook; defaults to personal when omitted + + CollabNotebookTransferRequest: + type: object + required: [targetScope] + properties: + targetScope: + type: string + enum: [personal, organization] + description: Desired ownership scope + ownerUserID: + type: integer + format: int32 + description: Target owner when moving to personal scope; defaults to the caller PromdumpOpt: type: object required: [endpoint, start, end, step, query, gzip, parts, memoryRatio] diff --git a/dev/anclax.yaml b/dev/anclax.yaml index 11a6bdea..c8bcb66d 100644 --- a/dev/anclax.yaml +++ b/dev/anclax.yaml @@ -3,7 +3,8 @@ externals: wire: v0.6.0 sqlc: v1.29.0 mockgen: v0.5.0 - anclax: v0.6.6 + # Align with go.mod: dev-v0.7 resolves to v0.6.16 pseudo-version + anclax: dev-v0.7 oapi-codegen: path: api/v1.yaml @@ -46,3 +47,5 @@ mockgen: - source: pkg/conn/http/http.go destination: pkg/conn/http/mock/http_mock_gen.go package: mock + +anclaxdef: dev/anclax \ No newline at end of file diff --git a/dev/anclax/sql/migrations/0001_init.down.sql b/dev/anclax/sql/migrations/0001_init.down.sql new file mode 100644 index 00000000..77daf861 --- /dev/null +++ b/dev/anclax/sql/migrations/0001_init.down.sql @@ -0,0 +1,7 @@ +BEGIN; + +DROP TABLE IF EXISTS anclax.users; +DROP TABLE IF EXISTS anclax.keys; +DROP TABLE IF EXISTS anclax.opaque_tokens; + +COMMIT; diff --git a/dev/anclax/sql/migrations/0001_init.up.sql b/dev/anclax/sql/migrations/0001_init.up.sql new file mode 100644 index 00000000..65d26e2a --- /dev/null +++ b/dev/anclax/sql/migrations/0001_init.up.sql @@ -0,0 +1,120 @@ +BEGIN; + +CREATE SCHEMA IF NOT EXISTS anclax; + +CREATE TABLE IF NOT EXISTS anclax.orgs ( + id SERIAL PRIMARY KEY, + name TEXT NOT NULL, + tz TEXT NOT NULL DEFAULT 'Asia/Shanghai', + created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL, + updated_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL +); + +CREATE TABLE IF NOT EXISTS anclax.users ( + id SERIAL PRIMARY KEY, + name TEXT NOT NULL, + password_hash TEXT NOT NULL, + password_salt TEXT NOT NULL, + created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL, + updated_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL, + deleted_at TIMESTAMPTZ +); + +CREATE TABLE IF NOT EXISTS anclax.user_default_orgs ( + user_id INTEGER NOT NULL REFERENCES anclax.users(id) ON UPDATE CASCADE ON DELETE CASCADE, + org_id INTEGER NOT NULL REFERENCES anclax.orgs(id) ON UPDATE CASCADE ON DELETE CASCADE, + created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL, + + PRIMARY KEY (user_id) +); + +CREATE TABLE IF NOT EXISTS anclax.org_users ( + org_id INTEGER NOT NULL REFERENCES anclax.orgs(id) ON UPDATE CASCADE ON DELETE CASCADE, + user_id INTEGER NOT NULL REFERENCES anclax.users(id) ON UPDATE CASCADE ON DELETE CASCADE, + created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL, + updated_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL, + + PRIMARY KEY (org_id, user_id) +); + +CREATE TABLE IF NOT EXISTS anclax.org_owners ( + org_id INTEGER NOT NULL REFERENCES anclax.orgs(id) ON UPDATE CASCADE ON DELETE CASCADE, + user_id INTEGER NOT NULL REFERENCES anclax.users(id) ON UPDATE CASCADE, + created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL, + + PRIMARY KEY (org_id) +); + +CREATE TABLE IF NOT EXISTS anclax.opaque_keys ( + id BIGSERIAL PRIMARY KEY, + key BYTEA NOT NULL, + user_id INT NOT NULL REFERENCES anclax.users(id) ON DELETE CASCADE ON UPDATE CASCADE, + created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL, + updated_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL +); + +CREATE TABLE IF NOT EXISTS anclax.access_key_pairs ( + access_key VARCHAR(20) NOT NULL, + secret_key VARCHAR(40) NOT NULL, + created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL, + updated_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL, + + PRIMARY KEY (access_key) +); + +CREATE TABLE IF NOT EXISTS anclax.access_rules ( + name VARCHAR(255) NOT NULL, + description TEXT NOT NULL, + created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL, + updated_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL, + + PRIMARY KEY (name) +); + +CREATE TABLE IF NOT EXISTS anclax.roles ( + id SERIAL PRIMARY KEY, + org_id INTEGER NOT NULL REFERENCES anclax.orgs(id) ON UPDATE CASCADE, + name VARCHAR(255) NOT NULL, + description TEXT NOT NULL, + created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL, + updated_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL +); + +CREATE TABLE IF NOT EXISTS anclax.role_access_rules ( + role_id INTEGER NOT NULL, + access_rule_name VARCHAR(255) NOT NULL REFERENCES anclax.access_rules(name) ON UPDATE CASCADE, + created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL, + updated_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL, + + PRIMARY KEY (role_id, access_rule_name) +); + +CREATE TABLE IF NOT EXISTS anclax.users_roles ( + user_id INTEGER NOT NULL, + role_id INTEGER NOT NULL, + created_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL, + updated_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP NOT NULL, + + PRIMARY KEY (user_id, role_id) +); + +CREATE TABLE IF NOT EXISTS anclax.tasks ( + id SERIAL PRIMARY KEY, + attributes JSONB NOT NULL, + spec JSONB NOT NULL, + status VARCHAR(255) NOT NULL, + unique_tag VARCHAR(255), -- for unique task + started_at TIMESTAMPTZ, + created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP, + + UNIQUE (unique_tag) +); + +CREATE TABLE IF NOT EXISTS anclax.events ( + id SERIAL PRIMARY KEY, + spec JSONB NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP +); + +COMMIT; diff --git a/dev/anclax/sql/migrations/0002_retry_count.down.sql b/dev/anclax/sql/migrations/0002_retry_count.down.sql new file mode 100644 index 00000000..44b416b7 --- /dev/null +++ b/dev/anclax/sql/migrations/0002_retry_count.down.sql @@ -0,0 +1,6 @@ +BEGIN; + +ALTER TABLE anclax.tasks DROP COLUMN attempts; + +COMMIT; + diff --git a/dev/anclax/sql/migrations/0002_retry_count.up.sql b/dev/anclax/sql/migrations/0002_retry_count.up.sql new file mode 100644 index 00000000..de08bfb4 --- /dev/null +++ b/dev/anclax/sql/migrations/0002_retry_count.up.sql @@ -0,0 +1,7 @@ +BEGIN; + +ALTER TABLE anclax.tasks ADD COLUMN attempts INTEGER NOT NULL DEFAULT 0; + +UPDATE anclax.tasks SET attributes = jsonb_set(attributes, '{retryPolicy, maxAttempts}', '-1') WHERE (attributes->'retryPolicy'->'always_retry_on_failure')::BOOLEAN; + +COMMIT; diff --git a/dev/anclax/sql/migrations/0003_rename.down.sql b/dev/anclax/sql/migrations/0003_rename.down.sql new file mode 100644 index 00000000..c4c4836d --- /dev/null +++ b/dev/anclax/sql/migrations/0003_rename.down.sql @@ -0,0 +1,3 @@ +BEGIN; + +COMMIT; diff --git a/dev/anclax/sql/migrations/0003_rename.up.sql b/dev/anclax/sql/migrations/0003_rename.up.sql new file mode 100644 index 00000000..b2de0e84 --- /dev/null +++ b/dev/anclax/sql/migrations/0003_rename.up.sql @@ -0,0 +1,15 @@ +BEGIN; + +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 + FROM pg_namespace + WHERE nspname = 'anclax' + ) THEN + EXECUTE 'ALTER SCHEMA anchor RENAME TO anclax'; + END IF; +END; +$$; + +COMMIT; diff --git a/go.mod b/go.mod index e198d003..12f6aec2 100644 --- a/go.mod +++ b/go.mod @@ -11,6 +11,7 @@ require ( github.com/google/wire v0.6.0 github.com/jackc/pgx/v5 v5.7.5 github.com/oapi-codegen/runtime v1.1.1 + github.com/oklog/ulid/v2 v2.1.1 github.com/pkg/errors v0.9.1 github.com/prometheus/client_golang v1.22.0 github.com/prometheus/common v0.63.0 diff --git a/go.sum b/go.sum index 376503da..03f0be5b 100644 --- a/go.sum +++ b/go.sum @@ -124,10 +124,13 @@ github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f h1:KUppIJq7/+ github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/oapi-codegen/runtime v1.1.1 h1:EXLHh0DXIJnWhdRPN2w4MXAzFyE4CskzhNLUmtpMYro= github.com/oapi-codegen/runtime v1.1.1/go.mod h1:SK9X900oXmPWilYR5/WKPzt3Kqxn/uS/+lbpREv+eCg= +github.com/oklog/ulid/v2 v2.1.1 h1:suPZ4ARWLOJLegGFiZZ1dFAkqzhMjL3J1TzI+5wHz8s= +github.com/oklog/ulid/v2 v2.1.1/go.mod h1:rcEKHmBBKfef9DhnvX7y1HZBYxjXb0cP5ExxNsTT1QQ= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug= github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM= +github.com/pborman/getopt v0.0.0-20170112200414-7148bc3a4c30/go.mod h1:85jBQOZwpVEaDAr341tbn15RS4fCAsIst0qp7i8ex1o= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= diff --git a/pkg/controller/controller.go b/pkg/controller/controller.go index 3ddc7409..f1212366 100644 --- a/pkg/controller/controller.go +++ b/pkg/controller/controller.go @@ -2,6 +2,7 @@ package controller import ( "bufio" + "encoding/base64" "errors" "fmt" "strings" @@ -14,6 +15,7 @@ import ( "github.com/risingwavelabs/promdump/pkg/promdump" "github.com/risingwavelabs/risingwave-console/pkg/conn/metricsstore" "github.com/risingwavelabs/risingwave-console/pkg/service" + "github.com/risingwavelabs/risingwave-console/pkg/trd/lib0" "github.com/risingwavelabs/risingwave-console/pkg/utils" "github.com/risingwavelabs/risingwave-console/pkg/zgen/apigen" ) @@ -312,6 +314,206 @@ func (controller *Controller) RestoreClusterSnapshot(c *fiber.Ctx, id int32, sna return c.Status(fiber.StatusOK).SendString("Hello, World!") } +func (controller *Controller) CreateCollabNotebook(c *fiber.Ctx) error { + orgID, err := auth.GetOrgID(c) + if err != nil { + return c.Status(fiber.StatusUnauthorized).SendString("missing orgID in request context") + } + + userID, err := auth.GetUserID(c) + if err != nil { + return c.Status(fiber.StatusUnauthorized).SendString("missing userID in request context") + } + + var payload apigen.CollabNotebook + if err := c.BodyParser(&payload); err != nil { + return c.SendStatus(fiber.StatusBadRequest) + } + + created, err := controller.svc.CreateCollabNotebook(c.Context(), payload, orgID, userID) + if err != nil { + switch { + case errors.Is(err, service.ErrNotebookIDEmpty), errors.Is(err, service.ErrNotebookTitleEmpty), errors.Is(err, service.ErrNotebookInvalidScope), errors.Is(err, service.ErrNotebookInvalidOwner): + return c.Status(fiber.StatusBadRequest).SendString(err.Error()) + case errors.Is(err, service.ErrNotebookAlreadyExists): + return c.Status(fiber.StatusConflict).SendString(err.Error()) + default: + return err + } + } + + return c.Status(fiber.StatusCreated).JSON(created) +} + +func (controller *Controller) ListCollabNotebooks(c *fiber.Ctx) error { + orgID, err := auth.GetOrgID(c) + if err != nil { + return c.Status(fiber.StatusUnauthorized).SendString("missing orgID in request context") + } + + userID, err := auth.GetUserID(c) + if err != nil { + return c.Status(fiber.StatusUnauthorized).SendString("missing userID in request context") + } + + notebooks, err := controller.svc.ListCollabNotebooks(c.Context(), orgID, userID) + if err != nil { + return err + } + + return c.Status(fiber.StatusOK).JSON(notebooks) +} + +func (controller *Controller) GetCollabNotebook(c *fiber.Ctx, notebookID string) error { + orgID, err := auth.GetOrgID(c) + if err != nil { + return c.Status(fiber.StatusUnauthorized).SendString("missing orgID in request context") + } + + userID, err := auth.GetUserID(c) + if err != nil { + return c.Status(fiber.StatusUnauthorized).SendString("missing userID in request context") + } + + notebook, err := controller.svc.GetCollabNotebook(c.Context(), notebookID, orgID, userID) + if err != nil { + if errors.Is(err, service.ErrNotebookNotFound) { + return c.SendStatus(fiber.StatusNotFound) + } + return err + } + + return c.Status(fiber.StatusOK).JSON(notebook) +} + +func (controller *Controller) UpdateCollabNotebook(c *fiber.Ctx, notebookID string) error { + orgID, err := auth.GetOrgID(c) + if err != nil { + return c.Status(fiber.StatusUnauthorized).SendString("missing orgID in request context") + } + + userID, err := auth.GetUserID(c) + if err != nil { + return c.Status(fiber.StatusUnauthorized).SendString("missing userID in request context") + } + + var payload apigen.CollabNotebook + if err := c.BodyParser(&payload); err != nil { + return c.SendStatus(fiber.StatusBadRequest) + } + + if payload.NotebookID != "" && payload.NotebookID != notebookID { + return c.Status(fiber.StatusBadRequest).SendString("notebook id mismatch") + } + + updated, err := controller.svc.UpdateCollabNotebook(c.Context(), notebookID, payload.Title, orgID, userID) + if err != nil { + switch { + case errors.Is(err, service.ErrNotebookNotFound): + return c.SendStatus(fiber.StatusNotFound) + case errors.Is(err, service.ErrNotebookTitleEmpty), errors.Is(err, service.ErrNotebookIDEmpty): + return c.Status(fiber.StatusBadRequest).SendString(err.Error()) + default: + return err + } + } + + return c.Status(fiber.StatusOK).JSON(updated) +} + +func (controller *Controller) DeleteCollabNotebook(c *fiber.Ctx, notebookID string) error { + orgID, err := auth.GetOrgID(c) + if err != nil { + return c.Status(fiber.StatusUnauthorized).SendString("missing orgID in request context") + } + + userID, err := auth.GetUserID(c) + if err != nil { + return c.Status(fiber.StatusUnauthorized).SendString("missing userID in request context") + } + + if err := controller.svc.DeleteCollabNotebook(c.Context(), notebookID, orgID, userID); err != nil { + switch { + case errors.Is(err, service.ErrNotebookNotFound): + return c.SendStatus(fiber.StatusNotFound) + case errors.Is(err, service.ErrNotebookIDEmpty): + return c.Status(fiber.StatusBadRequest).SendString(err.Error()) + case errors.Is(err, service.ErrNotebookDeleteNotAllowed): + return c.Status(fiber.StatusForbidden).SendString(err.Error()) + default: + return err + } + } + + return c.SendStatus(fiber.StatusNoContent) +} + +func (controller *Controller) TransferCollabNotebook(c *fiber.Ctx, notebookID string) error { + orgID, err := auth.GetOrgID(c) + if err != nil { + return c.Status(fiber.StatusUnauthorized).SendString("missing orgID in request context") + } + + userID, err := auth.GetUserID(c) + if err != nil { + return c.Status(fiber.StatusUnauthorized).SendString("missing userID in request context") + } + + var payload apigen.CollabNotebookTransferRequest + if err := c.BodyParser(&payload); err != nil { + return c.SendStatus(fiber.StatusBadRequest) + } + + updated, err := controller.svc.TransferCollabNotebook(c.Context(), notebookID, payload, orgID, userID) + if err != nil { + switch { + case errors.Is(err, service.ErrNotebookNotFound): + return c.SendStatus(fiber.StatusNotFound) + case errors.Is(err, service.ErrNotebookInvalidScope), errors.Is(err, service.ErrNotebookInvalidOwner), errors.Is(err, service.ErrNotebookIDEmpty): + return c.Status(fiber.StatusBadRequest).SendString(err.Error()) + case errors.Is(err, service.ErrNotebookTransferNotAllowed): + return c.Status(fiber.StatusForbidden).SendString(err.Error()) + default: + return err + } + } + + return c.Status(fiber.StatusOK).JSON(updated) +} + +func (controller *Controller) UploadCollabNotebookSnapshot(c *fiber.Ctx, notebookID string) error { + if _, err := auth.GetOrgID(c); err != nil { + return c.Status(fiber.StatusUnauthorized).SendString("missing orgID in request context") + } + + var stateVector map[uint64]uint64 + if header := c.Get("X-Yjs-State-Vector"); header != "" { + raw, err := base64.StdEncoding.DecodeString(header) + if err != nil { + return c.Status(fiber.StatusBadRequest).SendString("invalid X-Yjs-State-Vector header") + } + sv, err := lib0.DecodeYjsStateVector(raw) + if err != nil { + return c.Status(fiber.StatusBadRequest).SendString("failed to decode Yjs state vector") + } + stateVector = sv + } + + payload := c.Body() + if len(payload) == 0 { + return c.Status(fiber.StatusBadRequest).SendString("snapshot payload must not be empty") + } + + if err := controller.svc.UpsertCollabDocSnapshot(c.Context(), notebookID, payload, stateVector); err != nil { + if errors.Is(err, service.ErrSnapshotPayloadEmpty) { + return c.Status(fiber.StatusBadRequest).SendString(err.Error()) + } + return err + } + + return c.SendStatus(fiber.StatusNoContent) +} + func (controller *Controller) ListClusterSnapshots(c *fiber.Ctx, id int32) error { orgID, err := auth.GetOrgID(c) if err != nil { diff --git a/pkg/controller/ws_handler.go b/pkg/controller/ws_handler.go index 0722c6c0..c8f5eb21 100644 --- a/pkg/controller/ws_handler.go +++ b/pkg/controller/ws_handler.go @@ -2,21 +2,22 @@ package controller import ( "github.com/cloudcarver/anclax/pkg/ws" + "github.com/risingwavelabs/risingwave-console/pkg/service" ) type WsController struct { + collab *service.CollaborativeService } -func NewWsController() *WsController { - c := &WsController{} - - return c +func NewWsController(collab *service.CollaborativeService) *WsController { + return &WsController{collab: collab} } func (w *WsController) OnSessionCreated(s *ws.Session) error { - return nil + // Subscribe session to doc room if query param present + return w.collab.JoinDocRoomByQuery(s) } func (w *WsController) Handle(ctx *ws.Ctx, data []byte) error { - return nil + return w.collab.Handle(ctx, data) } diff --git a/pkg/init.go b/pkg/init.go index 039ad9e3..f2d0768e 100644 --- a/pkg/init.go +++ b/pkg/init.go @@ -15,7 +15,7 @@ import ( ) // This will run before the application starts. -func Init(cfg *config.Config, anclaxApp *anclax_app.Application, initService *service.InitService, console anclax_app.Plugin) (*app.App, error) { +func Init(cfg *config.Config, anclaxApp *anclax_app.Application, initService *service.InitService, console anclax_app.Plugin, wsc *controller.WsController) (*app.App, error) { if err := anclaxApp.Plug(console); err != nil { return nil, err } @@ -27,6 +27,9 @@ func Init(cfg *config.Config, anclaxApp *anclax_app.Application, initService *se return nil, err } + anclaxApp.GetServer().Websocket().SetOnSessionCreated(wsc.OnSessionCreated) + anclaxApp.GetServer().Websocket().SetMessageHandler(wsc.Handle) + return &app.App{ AnclaxApp: anclaxApp, }, nil @@ -34,7 +37,7 @@ func Init(cfg *config.Config, anclaxApp *anclax_app.Application, initService *se // InitAnclaxApplication initializes the Anclax application with the provided configuration. // You can modify this function to customize the initialization process, -func InitAnclaxApplication(cfg *config.Config, wsc *controller.WsController) (*anclax_app.Application, error) { +func InitAnclaxApplication(cfg *config.Config) (*anclax_app.Application, error) { anclaxApp, err := anclax_wire.InitializeApplication(&cfg.Server, &anclax_config.LibConfig{ Pg: &anclax_config.PgCfg{ MaxConnections: 10, @@ -48,9 +51,5 @@ func InitAnclaxApplication(cfg *config.Config, wsc *controller.WsController) (*a if err != nil { return nil, err } - - anclaxApp.GetServer().Websocket().SetOnSessionCreated(wsc.OnSessionCreated) - anclaxApp.GetServer().Websocket().SetMessageHandler(wsc.Handle) - return anclaxApp, nil } diff --git a/pkg/service/collaborative_service.go b/pkg/service/collaborative_service.go new file mode 100644 index 00000000..3d99363d --- /dev/null +++ b/pkg/service/collaborative_service.go @@ -0,0 +1,483 @@ +package service + +import ( + "bytes" + "sync" + "time" + + "github.com/cloudcarver/anclax/pkg/ws" + "github.com/jackc/pgx/v5" + "github.com/pkg/errors" + "github.com/risingwavelabs/risingwave-console/pkg/trd/lib0" + "github.com/risingwavelabs/risingwave-console/pkg/zcore/model" +) + +// Query parameter key for document id; agreed with frontend. +const DocIDKey = "doc" + +type SyncStep = uint64 + +// Top-level websocket message types defined by y-websocket/y-protocols +type MessageType = uint64 + +const ( + YwsMessageSync MessageType = 0 + YwsMessageAwareness MessageType = 1 + YwsMessageAuth MessageType = 2 + + // Custom message types + YwsMessageSnapshotRequest MessageType = 100 + YwsMessageUpdateMeta MessageType = 101 +) + +const ( + YjsSyncStep1 SyncStep = 0 + YjsSyncStep2 SyncStep = 1 + YjsUpdate SyncStep = 2 +) + +const ( + snapshotDebounceInterval = 30 * time.Second + snapshotCheckInterval = 5 * time.Second + snapshotRequestRetryWindow = 15 * time.Second + pendingUpdateMaxAge = 10 * time.Minute +) + +// CollaborativeSnapshotRecorder receives notifications when a document snapshot +// has been persisted so the scheduler can clear pending state. +type CollaborativeSnapshotRecorder interface { + RecordSnapshotSaved(docID string, savedAt time.Time, stateVector map[uint64]uint64) +} + +type docUpdate struct { + at time.Time + payload []byte + clientID uint64 + clock uint64 + hasMeta bool +} + +type updateMeta struct { + clientID uint64 + clock uint64 +} + +type collabDocState struct { + sessions map[string]*ws.Session + lastUpdate time.Time + lastSave time.Time + hasUnsavedChanges bool + awaitingSnapshot bool + lastRequest time.Time + pendingUpdates []docUpdate + pendingMeta []updateMeta +} + +// CollaborativeService relays Yjs payloads and handles snapshot persistence/write-back only. +type CollaborativeService struct { + hub *ws.Hub + m model.ModelInterface + + mu sync.Mutex + docs map[string]*collabDocState + now func() time.Time + checkInterval time.Duration + debounce time.Duration + requestBackoff time.Duration +} + +// emptyYDocStateUpdate is a valid Yjs state update for an empty Y.Doc, +// encoded via encodeStateAsUpdate(new Y.Doc()) in the JS implementation. +// Hex representation was generated once via: +// Buffer.from(Y.encodeStateAsUpdate(new Y.Doc())).toString("hex") -> "0000" +var emptyYDocStateUpdate = []byte{0x00, 0x00} + +func encodeYSyncMessage(step SyncStep, payload []byte) ([]byte, error) { + buf := &bytes.Buffer{} + if err := lib0.WriteVarUint(buf, YwsMessageSync); err != nil { + return nil, err + } + if err := lib0.WriteVarUint(buf, step); err != nil { + return nil, err + } + if err := lib0.WriteVarUint(buf, uint64(len(payload))); err != nil { + return nil, err + } + if _, err := buf.Write(payload); err != nil { + return nil, err + } + return buf.Bytes(), nil +} + +func encodeSnapshotRequestMessage(docID string) ([]byte, error) { + buf := &bytes.Buffer{} + if err := lib0.WriteVarUint(buf, YwsMessageSnapshotRequest); err != nil { + return nil, err + } + if err := lib0.WriteVarString(buf, docID); err != nil { + return nil, err + } + return buf.Bytes(), nil +} + +func NewCollaborativeService(hub *ws.Hub, m model.ModelInterface) *CollaborativeService { + svc := &CollaborativeService{ + hub: hub, + m: m, + docs: make(map[string]*collabDocState), + now: time.Now, + checkInterval: snapshotCheckInterval, + debounce: snapshotDebounceInterval, + requestBackoff: snapshotRequestRetryWindow, + } + go svc.run() + return svc +} + +// Handle processes an incoming websocket binary message. +// This service stays a thin relay that persists and replays snapshots without digging into CRDT semantics. +func (s *CollaborativeService) Handle(ctx *ws.Ctx, data []byte) error { + r := bytes.NewReader(data) + // messageType follows lib0 varuint encoding + messageType, err := lib0.ReadVarUintFrom(r) + if err != nil { + return err + } + session := ctx.Session + if session == nil { + return nil + } + switch messageType { + case YwsMessageSync: + // Next varuint in payload is the sync-step (0|1|2) + step, err := lib0.ReadVarUintFrom(r) + if err != nil { + return err + } + + docID := session.Conn().Query(DocIDKey) + if docID == "" { + return nil + } + + // * YjsSyncStep1: Includes the State Set of the sending client. When received, the client should reply with YjsSyncStep2. + // * YjsSyncStep2: Includes all missing structs and the complete delete set. When received, the client is assured that it + // received all information from the remote client. + if step == YjsSyncStep1 { + snapshot, err := s.m.GetCollabDocSnapshot(ctx, docID) + if err != nil { + // For brand-new documents with no snapshot yet, + // empty Yjs state update so that clients + // can still complete the sync handshake (provider.synced=true) + // without hitting decode errors. + if errors.Is(err, pgx.ErrNoRows) { + snapshot = emptyYDocStateUpdate + } else { + return err + } + } + + pending := s.collectPendingUpdates(docID) + + step2, err := encodeYSyncMessage(YjsSyncStep2, snapshot) + if err != nil { + return err + } + if err := session.WriteBinaryMessage(step2); err != nil { + return err + } + + for _, update := range pending { + if err := session.WriteBinaryMessage(update); err != nil { + return err + } + } + + session.BroadcastBinary(docID, data) + } + if step == YjsSyncStep2 { + s.hub.BroadcastBinary(docID, data) + return nil + } + if step == YjsUpdate { + s.hub.BroadcastBinary(docID, data) + s.recordDocUpdate(docID, data) + } + + return nil + case YwsMessageAwareness: + // Forward awareness payload as-is to the doc room + if docID := session.Conn().Query(DocIDKey); docID != "" { + s.hub.BroadcastBinary(docID, data) + } + return nil + case YwsMessageSnapshotRequest: + // Ignore requests initiated by clients + return nil + case YwsMessageUpdateMeta: + docID := session.Conn().Query(DocIDKey) + if docID == "" { + return nil + } + clientID, err := lib0.ReadVarUintFrom(r) + if err != nil { + return err + } + clock, err := lib0.ReadVarUintFrom(r) + if err != nil { + return err + } + s.recordUpdateMeta(docID, clientID, clock) + return nil + default: + return nil + } +} + +func (s *CollaborativeService) JoinDocRoomByQuery(sess *ws.Session) error { + if sess == nil { + return nil + } + docID := sess.Conn().Query(DocIDKey) + if docID == "" { + return nil + } + + if err := s.hub.AddTopic(docID); err != nil && !errors.Is(err, ws.ErrTopicAlreadyExists) { + return err + } + if err := s.hub.Subscribe(docID, sess); err != nil { + return err + } + s.registerSession(docID, sess) + sess.RegisterOnClose(func() error { + s.unregisterSession(docID, sess.ID()) + return s.hub.Unsubscribe(docID, sess) + }) + return nil +} + +func (s *CollaborativeService) run() { + ticker := time.NewTicker(s.checkInterval) + defer ticker.Stop() + for range ticker.C { + s.enqueueSnapshotRequests() + } +} + +func (s *CollaborativeService) enqueueSnapshotRequests() { + now := s.now() + type target struct { + docID string + session *ws.Session + } + + targets := make([]target, 0) + + s.mu.Lock() + for docID, state := range s.docs { + if state == nil { + continue + } + if !state.hasUnsavedChanges { + if len(state.sessions) == 0 { + delete(s.docs, docID) + } + continue + } + if len(state.sessions) == 0 { + // No active clients; wait until someone joins again. + state.awaitingSnapshot = false + continue + } + if state.lastUpdate.IsZero() { + continue + } + + // Debounce relative to the latest mutation we have seen. + if now.Sub(state.lastUpdate) < s.debounce { + continue + } + + if state.awaitingSnapshot && now.Sub(state.lastRequest) < s.requestBackoff { + continue + } + state.awaitingSnapshot = false + + var pick *ws.Session + for _, session := range state.sessions { + pick = session + if pick != nil { + break + } + } + if pick == nil { + continue + } + state.awaitingSnapshot = true + state.lastRequest = now + targets = append(targets, target{docID: docID, session: pick}) + } + s.mu.Unlock() + + for _, t := range targets { + payload, err := encodeSnapshotRequestMessage(t.docID) + if err != nil { + continue + } + if err := t.session.WriteBinaryMessage(payload); err != nil { + s.mu.Lock() + if state, ok := s.docs[t.docID]; ok { + state.awaitingSnapshot = false + } + s.mu.Unlock() + } + } +} + +func (s *CollaborativeService) recordDocUpdate(docID string, payload []byte) { + if docID == "" { + return + } + now := s.now() + s.mu.Lock() + state := s.ensureStateLocked(docID) + state.lastUpdate = now + state.hasUnsavedChanges = true + state.awaitingSnapshot = false + state.pendingUpdates = append(state.pendingUpdates, docUpdate{at: now, payload: cloneBytes(payload)}) + if len(state.pendingMeta) > 0 { + meta := state.pendingMeta[0] + state.pendingMeta = state.pendingMeta[1:] + lastIdx := len(state.pendingUpdates) - 1 + state.pendingUpdates[lastIdx].clientID = meta.clientID + state.pendingUpdates[lastIdx].clock = meta.clock + state.pendingUpdates[lastIdx].hasMeta = true + } + s.mu.Unlock() +} + +func (s *CollaborativeService) recordUpdateMeta(docID string, clientID, clock uint64) { + if docID == "" { + return + } + s.mu.Lock() + defer s.mu.Unlock() + state, ok := s.docs[docID] + if !ok || state == nil { + return + } + for i := range state.pendingUpdates { + if state.pendingUpdates[i].hasMeta { + continue + } + state.pendingUpdates[i].clientID = clientID + state.pendingUpdates[i].clock = clock + state.pendingUpdates[i].hasMeta = true + return + } + state.pendingMeta = append(state.pendingMeta, updateMeta{clientID: clientID, clock: clock}) +} + +func (s *CollaborativeService) registerSession(docID string, sess *ws.Session) { + if docID == "" || sess == nil { + return + } + s.mu.Lock() + state := s.ensureStateLocked(docID) + if state.sessions == nil { + state.sessions = make(map[string]*ws.Session) + } + state.sessions[sess.ID()] = sess + s.mu.Unlock() +} + +func (s *CollaborativeService) unregisterSession(docID, sessionID string) { + if docID == "" || sessionID == "" { + return + } + s.mu.Lock() + defer s.mu.Unlock() + if state, ok := s.docs[docID]; ok { + delete(state.sessions, sessionID) + if len(state.sessions) == 0 && !state.hasUnsavedChanges { + delete(s.docs, docID) + } + } +} + +func (s *CollaborativeService) ensureStateLocked(docID string) *collabDocState { + if state, ok := s.docs[docID]; ok && state != nil { + return state + } + state := &collabDocState{ + sessions: make(map[string]*ws.Session), + } + s.docs[docID] = state + return state +} + +func (s *CollaborativeService) collectPendingUpdates(docID string) [][]byte { + s.mu.Lock() + defer s.mu.Unlock() + state, ok := s.docs[docID] + if !ok || state == nil || len(state.pendingUpdates) == 0 { + return nil + } + updates := make([][]byte, 0, len(state.pendingUpdates)) + for _, update := range state.pendingUpdates { + updates = append(updates, cloneBytes(update.payload)) + } + return updates +} + +func cloneBytes(data []byte) []byte { + if len(data) == 0 { + return nil + } + dup := make([]byte, len(data)) + copy(dup, data) + return dup +} + +// RecordSnapshotSaved updates in-memory doc bookkeeping after persisting a snapshot. +func (s *CollaborativeService) RecordSnapshotSaved(docID string, savedAt time.Time, stateVector map[uint64]uint64) { + if docID == "" { + return + } + if savedAt.IsZero() { + savedAt = s.now() + } + s.mu.Lock() + defer s.mu.Unlock() + state := s.ensureStateLocked(docID) + state.lastSave = savedAt + state.awaitingSnapshot = false + state.lastRequest = time.Time{} + if len(state.pendingUpdates) == 0 { + state.hasUnsavedChanges = false + return + } + cutoff := savedAt.Add(-pendingUpdateMaxAge) + filtered := state.pendingUpdates[:0] + for _, update := range state.pendingUpdates { + snapshotClock, ok := stateVector[update.clientID] + if ok && update.hasMeta && snapshotClock >= update.clock { + // Precisely covered by snapshot via state vector semantics. + continue + } + if (!update.hasMeta || !ok) && !cutoff.IsZero() && (update.at.Before(cutoff) || update.at.Equal(cutoff)) { + // Fallback: legacy or malformed clients that never send metadata can + // otherwise cause unbounded growth of pendingUpdates. For such updates, + // we apply a conservative time-based window and eventually drop very old + // entries to avoid memory leaks. + continue + } + filtered = append(filtered, update) + } + state.pendingUpdates = nil + if len(filtered) > 0 { + state.pendingUpdates = filtered + } + state.hasUnsavedChanges = len(state.pendingUpdates) > 0 +} diff --git a/pkg/service/init_service.go b/pkg/service/init_service.go index 0d1de65e..09524510 100644 --- a/pkg/service/init_service.go +++ b/pkg/service/init_service.go @@ -6,6 +6,10 @@ import ( "net/http" "os" + "github.com/cloudcarver/anclax/core" + anclax_app "github.com/cloudcarver/anclax/pkg/app" + "github.com/cloudcarver/anclax/pkg/logger" + anclax_svc "github.com/cloudcarver/anclax/pkg/service" "github.com/go-playground/validator/v10" "github.com/gofiber/fiber/v2" "github.com/gofiber/fiber/v2/middleware/filesystem" @@ -18,11 +22,6 @@ import ( "github.com/risingwavelabs/risingwave-console/pkg/zgen/querier" "go.uber.org/zap" "gopkg.in/yaml.v3" - - "github.com/cloudcarver/anclax/core" - anclax_app "github.com/cloudcarver/anclax/pkg/app" - "github.com/cloudcarver/anclax/pkg/logger" - anclax_svc "github.com/cloudcarver/anclax/pkg/service" ) var initLog = logger.NewLogAgent("init") diff --git a/pkg/service/notebook_service.go b/pkg/service/notebook_service.go new file mode 100644 index 00000000..a23fb5ec --- /dev/null +++ b/pkg/service/notebook_service.go @@ -0,0 +1,426 @@ +package service + +import ( + "context" + "strings" + "time" + + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgconn" + "github.com/oklog/ulid/v2" + "github.com/pkg/errors" + "github.com/risingwavelabs/risingwave-console/pkg/zgen/apigen" + "github.com/risingwavelabs/risingwave-console/pkg/zgen/querier" +) + +func (s *Service) CreateCollabNotebook(ctx context.Context, params apigen.CollabNotebook, orgID int32, userID int32) (*apigen.CollabNotebook, error) { + title := strings.TrimSpace(params.Title) + if title == "" { + return nil, ErrNotebookTitleEmpty + } + + notebookID := ulid.Make().String() + + var scope querier.NotebookScope + if params.Scope == nil { + scope = querier.NotebookScopePersonal + } else { + switch *params.Scope { + case apigen.CollabNotebookScopePersonal: + scope = querier.NotebookScopePersonal + case apigen.CollabNotebookScopeOrganization: + scope = querier.NotebookScopeOrganization + default: + return nil, ErrNotebookInvalidScope + } + } + + var ownerUserID *int32 + if scope == querier.NotebookScopePersonal { + ownerCandidate := userID + if params.OwnerUserID != nil && *params.OwnerUserID != 0 { + ownerCandidate = *params.OwnerUserID + } + + if ownerCandidate == 0 { + return nil, ErrNotebookInvalidOwner + } + + ownerOrg, err := s.m.GetOrgSettings(ctx, ownerCandidate) + if err != nil { + if errors.Is(err, pgx.ErrNoRows) { + return nil, ErrNotebookInvalidOwner + } + return nil, errors.Wrap(err, "failed to lookup target owner organization") + } + if ownerOrg.OrgID != orgID { + return nil, ErrNotebookInvalidOwner + } + + owner := ownerCandidate + ownerUserID = &owner + } + + var createdBy *int32 + if userID != 0 { + creator := userID + createdBy = &creator + } + + created, err := s.m.CreateNotebook(ctx, querier.CreateNotebookParams{ + ID: notebookID, + Scope: scope, + OrgID: orgID, + OwnerUserID: ownerUserID, + Title: title, + CreatedBy: createdBy, + }) + if err != nil { + var pgErr *pgconn.PgError + if errors.As(err, &pgErr) && pgErr.Code == "23505" { + return nil, ErrNotebookAlreadyExists + } + return nil, errors.Wrap(err, "failed to create collaborative notebook") + } + + return mapNotebookToAPI( + created.ID, + created.Title, + created.Scope, + created.OrganizationID, + created.OwnerUserID, + created.CreatedBy, + created.CreatedAt, + created.UpdatedAt, + ), nil +} + +func (s *Service) ListCollabNotebooks(ctx context.Context, orgID int32, userID int32) ([]*apigen.CollabNotebook, error) { + var ownerUserID *int32 + if userID != 0 { + ownerUserID = &userID + } + + notebooks, err := s.m.ListAccessibleNotebooks(ctx, querier.ListAccessibleNotebooksParams{ + OrgID: orgID, + OwnerUserID: ownerUserID, + }) + if err != nil { + return nil, errors.Wrap(err, "failed to list collaborative notebooks") + } + + result := make([]*apigen.CollabNotebook, 0, len(notebooks)) + for _, nb := range notebooks { + result = append(result, mapNotebookToAPI( + nb.ID, + nb.Title, + nb.Scope, + nb.OrganizationID, + nb.OwnerUserID, + nb.CreatedBy, + nb.CreatedAt, + nb.UpdatedAt, + )) + } + return result, nil +} + +func (s *Service) GetCollabNotebook(ctx context.Context, notebookID string, orgID int32, userID int32) (*apigen.CollabNotebook, error) { + nb, err := s.m.GetNotebook(ctx, notebookID) + if err != nil { + if errors.Is(err, pgx.ErrNoRows) { + return nil, ErrNotebookNotFound + } + return nil, errors.Wrap(err, "failed to get collaborative notebook") + } + + if nb.OrganizationID != orgID { + return nil, ErrNotebookNotFound + } + + if nb.Scope == querier.NotebookScopePersonal { + if nb.OwnerUserID == nil || userID == 0 || *nb.OwnerUserID != userID { + return nil, ErrNotebookNotFound + } + } + + return mapNotebookToAPI( + nb.ID, + nb.Title, + nb.Scope, + nb.OrganizationID, + nb.OwnerUserID, + nb.CreatedBy, + nb.CreatedAt, + nb.UpdatedAt, + ), nil +} + +func (s *Service) UpsertCollabDocSnapshot(ctx context.Context, notebookID string, snapshot []byte, stateVector map[uint64]uint64) error { + if strings.TrimSpace(notebookID) == "" { + return ErrNotebookIDEmpty + } + if len(snapshot) == 0 { + return ErrSnapshotPayloadEmpty + } + + if err := s.m.UpsertCollabDocSnapshot(ctx, querier.UpsertCollabDocSnapshotParams{ + NotebookID: notebookID, + Snapshot: snapshot, + }); err != nil { + return errors.Wrap(err, "failed to upsert collab doc snapshot") + } + if s.collab != nil { + s.collab.RecordSnapshotSaved(notebookID, s.now(), stateVector) + } + return nil +} + +func (s *Service) UpdateCollabNotebook(ctx context.Context, notebookID string, title string, orgID int32, userID int32) (*apigen.CollabNotebook, error) { + if strings.TrimSpace(notebookID) == "" { + return nil, ErrNotebookIDEmpty + } + + nb, err := s.m.GetNotebook(ctx, notebookID) + if err != nil { + if errors.Is(err, pgx.ErrNoRows) { + return nil, ErrNotebookNotFound + } + return nil, errors.Wrap(err, "failed to get collaborative notebook") + } + + if nb.OrganizationID != orgID { + return nil, ErrNotebookNotFound + } + + if nb.Scope == querier.NotebookScopePersonal { + if nb.OwnerUserID == nil || userID == 0 || *nb.OwnerUserID != userID { + return nil, ErrNotebookNotFound + } + } + + if strings.TrimSpace(title) == "" { + return nil, ErrNotebookTitleEmpty + } + + updated, err := s.m.UpdateNotebookTitle(ctx, querier.UpdateNotebookTitleParams{ + ID: notebookID, + Title: title, + }) + if err != nil { + return nil, errors.Wrap(err, "failed to update collaborative notebook") + } + + return mapNotebookToAPI( + updated.ID, + updated.Title, + updated.Scope, + updated.OrganizationID, + updated.OwnerUserID, + updated.CreatedBy, + updated.CreatedAt, + updated.UpdatedAt, + ), nil +} + +func (s *Service) TransferCollabNotebook(ctx context.Context, notebookID string, payload apigen.CollabNotebookTransferRequest, orgID int32, userID int32) (*apigen.CollabNotebook, error) { + if strings.TrimSpace(notebookID) == "" { + return nil, ErrNotebookIDEmpty + } + if userID == 0 { + return nil, ErrNotebookTransferNotAllowed + } + + nb, err := s.m.GetNotebook(ctx, notebookID) + if err != nil { + if errors.Is(err, pgx.ErrNoRows) { + return nil, ErrNotebookNotFound + } + return nil, errors.Wrap(err, "failed to get collaborative notebook") + } + + if nb.OrganizationID != orgID { + return nil, ErrNotebookNotFound + } + + var targetScope querier.NotebookScope + switch payload.TargetScope { + case apigen.CollabNotebookTransferRequestTargetScope(apigen.CollabNotebookScopePersonal): + targetScope = querier.NotebookScopePersonal + case apigen.CollabNotebookTransferRequestTargetScope(apigen.CollabNotebookScopeOrganization): + targetScope = querier.NotebookScopeOrganization + default: + return nil, ErrNotebookInvalidScope + } + + if targetScope == nb.Scope { + return mapNotebookToAPI( + nb.ID, + nb.Title, + nb.Scope, + nb.OrganizationID, + nb.OwnerUserID, + nb.CreatedBy, + nb.CreatedAt, + nb.UpdatedAt, + ), nil + } + + isOrgOwner := false + if userID != 0 { + isOrgOwner, err = s.m.IsOrgOwner(ctx, querier.IsOrgOwnerParams{UserID: userID, OrgID: orgID}) + if err != nil { + return nil, errors.Wrap(err, "failed to check organization owner") + } + } + + switch targetScope { + case querier.NotebookScopeOrganization: + if nb.Scope != querier.NotebookScopePersonal { + return nil, ErrNotebookTransferNotAllowed + } + + ownsNotebook := nb.OwnerUserID != nil && *nb.OwnerUserID == userID + if !ownsNotebook && !isOrgOwner { + return nil, ErrNotebookTransferNotAllowed + } + + updated, err := s.m.UpdateNotebookScope(ctx, querier.UpdateNotebookScopeParams{ + ID: notebookID, + Scope: querier.NotebookScopeOrganization, + OwnerUserID: nil, + }) + if err != nil { + return nil, errors.Wrap(err, "failed to update notebook scope") + } + + return mapNotebookToAPI( + updated.ID, + updated.Title, + updated.Scope, + updated.OrganizationID, + updated.OwnerUserID, + updated.CreatedBy, + updated.CreatedAt, + updated.UpdatedAt, + ), nil + + case querier.NotebookScopePersonal: + if nb.Scope != querier.NotebookScopeOrganization { + return nil, ErrNotebookTransferNotAllowed + } + + var ownerCandidate int32 + if payload.OwnerUserID != nil { + ownerCandidate = *payload.OwnerUserID + } else { + ownerCandidate = userID + } + + if ownerCandidate == 0 { + return nil, ErrNotebookInvalidOwner + } + + // Ensure the target owner belongs to the organization + ownerOrgID, err := s.m.GetUserOrganization(ctx, ownerCandidate) + if err != nil { + if errors.Is(err, pgx.ErrNoRows) { + return nil, ErrNotebookInvalidOwner + } + return nil, errors.Wrap(err, "failed to lookup target owner organization") + } + if ownerOrgID != orgID { + return nil, ErrNotebookInvalidOwner + } + + isCreator := nb.CreatedBy != nil && *nb.CreatedBy == userID + if !isCreator && !isOrgOwner { + return nil, ErrNotebookTransferNotAllowed + } + + ownerPtr := ownerCandidate + updated, err := s.m.UpdateNotebookScope(ctx, querier.UpdateNotebookScopeParams{ + ID: notebookID, + Scope: querier.NotebookScopePersonal, + OwnerUserID: &ownerPtr, + }) + if err != nil { + return nil, errors.Wrap(err, "failed to update notebook scope") + } + + return mapNotebookToAPI( + updated.ID, + updated.Title, + updated.Scope, + updated.OrganizationID, + updated.OwnerUserID, + updated.CreatedBy, + updated.CreatedAt, + updated.UpdatedAt, + ), nil + } + + return nil, ErrNotebookInvalidScope +} +func (s *Service) DeleteCollabNotebook(ctx context.Context, notebookID string, orgID int32, userID int32) error { + if strings.TrimSpace(notebookID) == "" { + return ErrNotebookIDEmpty + } + + nb, err := s.m.GetNotebook(ctx, notebookID) + if err != nil { + if errors.Is(err, pgx.ErrNoRows) { + return ErrNotebookNotFound + } + return errors.Wrap(err, "failed to get collaborative notebook") + } + + if nb.OrganizationID != orgID { + return ErrNotebookNotFound + } + + isOrgOwner := false + if userID != 0 { + isOrgOwner, err = s.m.IsOrgOwner(ctx, querier.IsOrgOwnerParams{UserID: userID, OrgID: orgID}) + if err != nil { + return errors.Wrap(err, "failed to check organization owner") + } + } + + switch nb.Scope { + case querier.NotebookScopePersonal: + ownsNotebook := nb.OwnerUserID != nil && userID != 0 && *nb.OwnerUserID == userID + if !ownsNotebook && !isOrgOwner { + return ErrNotebookDeleteNotAllowed + } + case querier.NotebookScopeOrganization: + isCreator := nb.CreatedBy != nil && userID != 0 && *nb.CreatedBy == userID + if !isCreator && !isOrgOwner { + return ErrNotebookDeleteNotAllowed + } + default: + return ErrNotebookInvalidScope + } + + if err := s.m.DeleteNotebook(ctx, notebookID); err != nil { + return errors.Wrap(err, "failed to delete collaborative notebook") + } + + return nil +} + +func mapNotebookToAPI(id string, title string, scope querier.NotebookScope, orgID int32, ownerUserID *int32, createdBy *int32, createdAt time.Time, updatedAt time.Time) *apigen.CollabNotebook { + scopeVal := apigen.CollabNotebookScope(scope) + orgIDCopy := orgID + + return &apigen.CollabNotebook{ + NotebookID: id, + Title: title, + Scope: &scopeVal, + OrganizationID: &orgIDCopy, + OwnerUserID: ownerUserID, + CreatedByUserID: createdBy, + CreatedAt: createdAt, + UpdatedAt: updatedAt, + } +} diff --git a/pkg/service/service.go b/pkg/service/service.go index d6708837..6a6a024c 100644 --- a/pkg/service/service.go +++ b/pkg/service/service.go @@ -40,6 +40,15 @@ var ( ErrClusterNotFound = errors.New("cluster not found") ErrClusterHasDatabaseConnections = errors.New("cluster has database connections") ErrDiagnosticNotFound = errors.New("diagnostic not found") + ErrNotebookNotFound = errors.New("notebook not found") + ErrNotebookIDEmpty = errors.New("notebook id must not be empty") + ErrNotebookTitleEmpty = errors.New("notebook title must not be empty") + ErrSnapshotPayloadEmpty = errors.New("snapshot payload is empty") + ErrNotebookInvalidScope = errors.New("invalid notebook scope") + ErrNotebookTransferNotAllowed = errors.New("notebook transfer not allowed") + ErrNotebookInvalidOwner = errors.New("invalid notebook owner") + ErrNotebookAlreadyExists = errors.New("notebook already exists") + ErrNotebookDeleteNotAllowed = errors.New("notebook delete not allowed") ) const ( @@ -152,6 +161,26 @@ type ServiceInterface interface { // ListClustersByMetricsStoreID lists all clusters by metrics store ID ListClustersByMetricsStoreID(ctx context.Context, id int32) ([]*apigen.Cluster, error) + // CreateCollabNotebook creates a new collaborative notebook + CreateCollabNotebook(ctx context.Context, params apigen.CollabNotebook, orgID int32, userID int32) (*apigen.CollabNotebook, error) + + // ListCollabNotebooks lists all collaborative notebooks for a user in an organization + ListCollabNotebooks(ctx context.Context, orgID int32, userID int32) ([]*apigen.CollabNotebook, error) + + // GetCollabNotebook gets a collaborative notebook by its ID + GetCollabNotebook(ctx context.Context, notebookID string, orgID int32, userID int32) (*apigen.CollabNotebook, error) + + // UpdateCollabNotebook updates a collaborative notebook's title + UpdateCollabNotebook(ctx context.Context, notebookID string, title string, orgID int32, userID int32) (*apigen.CollabNotebook, error) + + // TransferCollabNotebook transfers ownership of a collaborative notebook to another user + TransferCollabNotebook(ctx context.Context, notebookID string, params apigen.CollabNotebookTransferRequest, orgID int32, userID int32) (*apigen.CollabNotebook, error) + + // DeleteCollabNotebook deletes a collaborative notebook + DeleteCollabNotebook(ctx context.Context, notebookID string, orgID int32, userID int32) error + + // UpsertCollabDocSnapshot persists the latest collaborative notebook snapshot + UpsertCollabDocSnapshot(ctx context.Context, notebookID string, snapshot []byte, stateVector map[uint64]uint64) error // PromDump(ctx context.Context, w *bufio.Writer, cfg *promdump.DumpMultipartCfg) } @@ -166,6 +195,7 @@ type Service struct { taskRunner taskgen.TaskRunner taskstore taskcore.TaskStoreInterface anclaxSvc anclax_svc.ServiceInterface + collab CollaborativeSnapshotRecorder now func() time.Time generateHashAndSalt func(password string) (string, string, error) @@ -182,6 +212,7 @@ func NewService( taskRunner taskgen.TaskRunner, taskstore taskcore.TaskStoreInterface, anclaxSvc anclax_svc.ServiceInterface, + collab CollaborativeSnapshotRecorder, ) (ServiceInterface, error) { s := &Service{ m: m, @@ -195,6 +226,7 @@ func NewService( taskRunner: taskRunner, taskstore: taskstore, anclaxSvc: anclaxSvc, + collab: collab, } return s, nil } diff --git a/pkg/service/service_mock_gen.go b/pkg/service/service_mock_gen.go index c4a4c76b..fdc75806 100644 --- a/pkg/service/service_mock_gen.go +++ b/pkg/service/service_mock_gen.go @@ -88,6 +88,21 @@ func (mr *MockServiceInterfaceMockRecorder) CreateClusterSnapshot(ctx, id, name, return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateClusterSnapshot", reflect.TypeOf((*MockServiceInterface)(nil).CreateClusterSnapshot), ctx, id, name, orgID) } +// CreateCollabNotebook mocks base method. +func (m *MockServiceInterface) CreateCollabNotebook(ctx context.Context, params apigen.CollabNotebook, orgID, userID int32) (*apigen.CollabNotebook, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CreateCollabNotebook", ctx, params, orgID, userID) + ret0, _ := ret[0].(*apigen.CollabNotebook) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CreateCollabNotebook indicates an expected call of CreateCollabNotebook. +func (mr *MockServiceInterfaceMockRecorder) CreateCollabNotebook(ctx, params, orgID, userID any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateCollabNotebook", reflect.TypeOf((*MockServiceInterface)(nil).CreateCollabNotebook), ctx, params, orgID, userID) +} + // DeleteCluster mocks base method. func (m *MockServiceInterface) DeleteCluster(ctx context.Context, id int32, cascade bool, orgID int32) error { m.ctrl.T.Helper() @@ -116,6 +131,20 @@ func (mr *MockServiceInterfaceMockRecorder) DeleteClusterSnapshot(ctx, id, snaps return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteClusterSnapshot", reflect.TypeOf((*MockServiceInterface)(nil).DeleteClusterSnapshot), ctx, id, snapshotID, orgID) } +// DeleteCollabNotebook mocks base method. +func (m *MockServiceInterface) DeleteCollabNotebook(ctx context.Context, notebookID string, orgID, userID int32) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeleteCollabNotebook", ctx, notebookID, orgID, userID) + ret0, _ := ret[0].(error) + return ret0 +} + +// DeleteCollabNotebook indicates an expected call of DeleteCollabNotebook. +func (mr *MockServiceInterfaceMockRecorder) DeleteCollabNotebook(ctx, notebookID, orgID, userID any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteCollabNotebook", reflect.TypeOf((*MockServiceInterface)(nil).DeleteCollabNotebook), ctx, notebookID, orgID, userID) +} + // DeleteDatabase mocks base method. func (m *MockServiceInterface) DeleteDatabase(ctx context.Context, id, orgID int32) error { m.ctrl.T.Helper() @@ -204,6 +233,21 @@ func (mr *MockServiceInterfaceMockRecorder) GetClusterDiagnostic(ctx, id, diagno return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetClusterDiagnostic", reflect.TypeOf((*MockServiceInterface)(nil).GetClusterDiagnostic), ctx, id, diagnosticID, orgID) } +// GetCollabNotebook mocks base method. +func (m *MockServiceInterface) GetCollabNotebook(ctx context.Context, notebookID string, orgID, userID int32) (*apigen.CollabNotebook, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetCollabNotebook", ctx, notebookID, orgID, userID) + ret0, _ := ret[0].(*apigen.CollabNotebook) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetCollabNotebook indicates an expected call of GetCollabNotebook. +func (mr *MockServiceInterfaceMockRecorder) GetCollabNotebook(ctx, notebookID, orgID, userID any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetCollabNotebook", reflect.TypeOf((*MockServiceInterface)(nil).GetCollabNotebook), ctx, notebookID, orgID, userID) +} + // GetDDLProgress mocks base method. func (m *MockServiceInterface) GetDDLProgress(ctx context.Context, id, orgID int32) ([]apigen.DDLProgress, error) { m.ctrl.T.Helper() @@ -384,6 +428,21 @@ func (mr *MockServiceInterfaceMockRecorder) ListClustersByMetricsStoreID(ctx, id return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListClustersByMetricsStoreID", reflect.TypeOf((*MockServiceInterface)(nil).ListClustersByMetricsStoreID), ctx, id) } +// ListCollabNotebooks mocks base method. +func (m *MockServiceInterface) ListCollabNotebooks(ctx context.Context, orgID, userID int32) ([]*apigen.CollabNotebook, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ListCollabNotebooks", ctx, orgID, userID) + ret0, _ := ret[0].([]*apigen.CollabNotebook) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListCollabNotebooks indicates an expected call of ListCollabNotebooks. +func (mr *MockServiceInterfaceMockRecorder) ListCollabNotebooks(ctx, orgID, userID any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListCollabNotebooks", reflect.TypeOf((*MockServiceInterface)(nil).ListCollabNotebooks), ctx, orgID, userID) +} + // ListDatabases mocks base method. func (m *MockServiceInterface) ListDatabases(ctx context.Context, orgID int32) ([]apigen.Database, error) { m.ctrl.T.Helper() @@ -486,6 +545,21 @@ func (mr *MockServiceInterfaceMockRecorder) TestDatabaseConnection(ctx, params, return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "TestDatabaseConnection", reflect.TypeOf((*MockServiceInterface)(nil).TestDatabaseConnection), ctx, params, orgID) } +// TransferCollabNotebook mocks base method. +func (m *MockServiceInterface) TransferCollabNotebook(ctx context.Context, notebookID string, params apigen.CollabNotebookTransferRequest, orgID, userID int32) (*apigen.CollabNotebook, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "TransferCollabNotebook", ctx, notebookID, params, orgID, userID) + ret0, _ := ret[0].(*apigen.CollabNotebook) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// TransferCollabNotebook indicates an expected call of TransferCollabNotebook. +func (mr *MockServiceInterfaceMockRecorder) TransferCollabNotebook(ctx, notebookID, params, orgID, userID any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "TransferCollabNotebook", reflect.TypeOf((*MockServiceInterface)(nil).TransferCollabNotebook), ctx, notebookID, params, orgID, userID) +} + // UpdateCluster mocks base method. func (m *MockServiceInterface) UpdateCluster(ctx context.Context, id int32, params apigen.ClusterImport, orgID int32) (*apigen.Cluster, error) { m.ctrl.T.Helper() @@ -529,6 +603,21 @@ func (mr *MockServiceInterfaceMockRecorder) UpdateClusterAutoDiagnosticConfig(ct return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateClusterAutoDiagnosticConfig", reflect.TypeOf((*MockServiceInterface)(nil).UpdateClusterAutoDiagnosticConfig), ctx, id, params, orgID) } +// UpdateCollabNotebook mocks base method. +func (m *MockServiceInterface) UpdateCollabNotebook(ctx context.Context, notebookID, title string, orgID, userID int32) (*apigen.CollabNotebook, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "UpdateCollabNotebook", ctx, notebookID, title, orgID, userID) + ret0, _ := ret[0].(*apigen.CollabNotebook) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// UpdateCollabNotebook indicates an expected call of UpdateCollabNotebook. +func (mr *MockServiceInterfaceMockRecorder) UpdateCollabNotebook(ctx, notebookID, title, orgID, userID any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateCollabNotebook", reflect.TypeOf((*MockServiceInterface)(nil).UpdateCollabNotebook), ctx, notebookID, title, orgID, userID) +} + // UpdateDatabase mocks base method. func (m *MockServiceInterface) UpdateDatabase(ctx context.Context, id int32, params apigen.DatabaseConnectInfo, orgID int32) (*apigen.Database, error) { m.ctrl.T.Helper() @@ -558,3 +647,17 @@ func (mr *MockServiceInterfaceMockRecorder) UpdateMetricsStore(ctx, id, req, Org mr.mock.ctrl.T.Helper() return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateMetricsStore", reflect.TypeOf((*MockServiceInterface)(nil).UpdateMetricsStore), ctx, id, req, OrgID) } + +// UpsertCollabDocSnapshot mocks base method. +func (m *MockServiceInterface) UpsertCollabDocSnapshot(ctx context.Context, notebookID string, snapshot []byte, stateVector map[uint64]uint64) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "UpsertCollabDocSnapshot", ctx, notebookID, snapshot, stateVector) + ret0, _ := ret[0].(error) + return ret0 +} + +// UpsertCollabDocSnapshot indicates an expected call of UpsertCollabDocSnapshot. +func (mr *MockServiceInterfaceMockRecorder) UpsertCollabDocSnapshot(ctx, notebookID, snapshot, stateVector any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpsertCollabDocSnapshot", reflect.TypeOf((*MockServiceInterface)(nil).UpsertCollabDocSnapshot), ctx, notebookID, snapshot, stateVector) +} diff --git a/pkg/trd/lib0/binary.go b/pkg/trd/lib0/binary.go new file mode 100644 index 00000000..688e0424 --- /dev/null +++ b/pkg/trd/lib0/binary.go @@ -0,0 +1,74 @@ +package lib0 + +/** + * Bit constants + * @reference https://github.com/dmonad/lib0/blob/cc6612569e17a1ec37ca76dba8593aa3d9cc1753/binary.js + */ +const ( + Bit1 uint = 1 << 0 + Bit2 uint = 1 << 1 + Bit3 uint = 1 << 2 + Bit4 uint = 1 << 3 + Bit5 uint = 1 << 4 + Bit6 uint = 1 << 5 + Bit7 uint = 1 << 6 + Bit8 uint = 1 << 7 + Bit9 uint = 1 << 8 + Bit10 uint = 1 << 9 + Bit11 uint = 1 << 10 + Bit12 uint = 1 << 11 + Bit13 uint = 1 << 12 + Bit14 uint = 1 << 13 + Bit15 uint = 1 << 14 + Bit16 uint = 1 << 15 + Bit17 uint = 1 << 16 + Bit18 uint = 1 << 17 + Bit19 uint = 1 << 18 + Bit20 uint = 1 << 19 + Bit21 uint = 1 << 20 + Bit22 uint = 1 << 21 + Bit23 uint = 1 << 22 + Bit24 uint = 1 << 23 + Bit25 uint = 1 << 24 + Bit26 uint = 1 << 25 + Bit27 uint = 1 << 26 + Bit28 uint = 1 << 27 + Bit29 uint = 1 << 28 + Bit30 uint = 1 << 29 + Bit31 uint = 1 << 30 + Bit32 int = 1 << 31 + + Bits0 uint = (1 << 0) - 1 + Bits1 uint = (1 << 1) - 1 + Bits2 uint = (1 << 2) - 1 + Bits3 uint = (1 << 3) - 1 + Bits4 uint = (1 << 4) - 1 + Bits5 uint = (1 << 5) - 1 + Bits6 uint = (1 << 6) - 1 + Bits7 uint = (1 << 7) - 1 + Bits8 uint = (1 << 8) - 1 + Bits9 uint = (1 << 9) - 1 + Bits10 uint = (1 << 10) - 1 + Bits11 uint = (1 << 11) - 1 + Bits12 uint = (1 << 12) - 1 + Bits13 uint = (1 << 13) - 1 + Bits14 uint = (1 << 14) - 1 + Bits15 uint = (1 << 15) - 1 + Bits16 uint = (1 << 16) - 1 + Bits17 uint = (1 << 17) - 1 + Bits18 uint = (1 << 18) - 1 + Bits19 uint = (1 << 19) - 1 + Bits20 uint = (1 << 20) - 1 + Bits21 uint = (1 << 21) - 1 + Bits22 uint = (1 << 22) - 1 + Bits23 uint = (1 << 23) - 1 + Bits24 uint = (1 << 24) - 1 + Bits25 uint = (1 << 25) - 1 + Bits26 uint = (1 << 26) - 1 + Bits27 uint = (1 << 27) - 1 + Bits28 uint = (1 << 28) - 1 + Bits29 uint = (1 << 29) - 1 + Bits30 uint = (1 << 30) - 1 + Bits31 uint = 0x7FFFFFFF + Bits32 uint = 0xFFFFFFFF +) diff --git a/pkg/trd/lib0/reader.go b/pkg/trd/lib0/reader.go new file mode 100644 index 00000000..9c3e569f --- /dev/null +++ b/pkg/trd/lib0/reader.go @@ -0,0 +1,47 @@ +package lib0 + +import ( + "bufio" + "bytes" + "encoding/binary" + "io" +) + +// @reference https://github.com/dmonad/lib0/blob/cc6612569e17a1ec37ca76dba8593aa3d9cc1753/decoding.js + +// ReadVarUint reads a variable-length unsigned integer using Go's native +// Uvarint format (LEB128). This matches JS lib0's varuint layout. +func ReadVarUint(r io.ByteReader) (uint64, error) { + return binary.ReadUvarint(r) +} + +// ReadVarUintFrom is a convenience wrapper that accepts any io.Reader. +// It upgrades to a buffered ByteReader when needed. +func ReadVarUintFrom(r io.Reader) (uint64, error) { + if br, ok := r.(io.ByteReader); ok { + return ReadVarUint(br) + } + return ReadVarUint(bufio.NewReader(r)) +} + +// DecodeYjsStateVector parses Yjs state vector encoding (varuint count followed by clientID/clock pairs). +func DecodeYjsStateVector(payload []byte) (map[uint64]uint64, error) { + r := bytes.NewReader(payload) + count, err := ReadVarUintFrom(r) + if err != nil { + return nil, err + } + result := make(map[uint64]uint64, int(count)) + for i := uint64(0); i < count; i++ { + clientID, err := ReadVarUintFrom(r) + if err != nil { + return nil, err + } + clock, err := ReadVarUintFrom(r) + if err != nil { + return nil, err + } + result[clientID] = clock + } + return result, nil +} diff --git a/pkg/trd/lib0/writer.go b/pkg/trd/lib0/writer.go new file mode 100644 index 00000000..f5d19cf6 --- /dev/null +++ b/pkg/trd/lib0/writer.go @@ -0,0 +1,75 @@ +package lib0 + +import ( + "encoding/binary" + "io" +) + +// @reference https://github.com/dmonad/lib0/blob/cc6612569e17a1ec37ca76dba8593aa3d9cc1753/encoding.js + +// WriteUint16 writes v as little-endian uint16 using a small stack buffer. +func WriteUint16(w io.Writer, v uint16) error { + var buf [2]byte + binary.LittleEndian.PutUint16(buf[:], v) + _, err := w.Write(buf[:]) + return err +} + +// WriteUint8 writes v as a single byte. +// Uses io.ByteWriter fast-path where available. +func WriteUint8(w io.Writer, v uint8) error { + if bw, ok := w.(interface{ WriteByte(byte) error }); ok { + return bw.WriteByte(v) + } + var buf [1]byte + buf[0] = v + _, err := w.Write(buf[:]) + return err +} + +// WriteUint32 writes v as little-endian uint32 using a small stack buffer. +func WriteUint32(w io.Writer, v uint32) error { + var buf [4]byte + binary.LittleEndian.PutUint32(buf[:], v) + _, err := w.Write(buf[:]) + return err +} + +// [sync with JS] +// WriteVarUint writes v as a variable-length unsigned integer (LEB128/Uvarint). +// Encodes 7 bits per byte with MSB as continuation bit. Matches JS lib0 varuint +// and encoding/binary.Uvarint. Prefer this for wire-compat with JS. +func WriteVarUint(w io.Writer, v uint64) error { + var buf [10]byte + n := binary.PutUvarint(buf[:], v) + _, err := w.Write(buf[:n]) + return err +} + +// In Go, callers usually compose WriteVarUint + Write(b) directly for slices. + +// [sync with JS] +// WriteVarString writes a variable-length string using JS lib0's layout: +// varuint byte-length prefix followed by UTF-8 bytes. In Go, strings are +// UTF-8 already; io.WriteString avoids extra []byte allocation for writers +// like *bufio.Writer. Prefer this for wire-compat with JS. +func WriteVarString(w io.Writer, s string) error { + if err := WriteVarUint(w, uint64(len(s))); err != nil { + return err + } + if len(s) == 0 { + return nil + } + _, err := io.WriteString(w, s) + return err +} + +// WriteVarInt writes a signed varint using Go's native encoding +// (encoding/binary.PutVarint). Prefer this Go scheme unless specific +// cross-language compatibility is required. +func WriteVarInt(w io.Writer, v int64) error { + var buf [10]byte + n := binary.PutVarint(buf[:], v) + _, err := w.Write(buf[:n]) + return err +} diff --git a/pkg/zcore/injection/injection.go b/pkg/zcore/injection/injection.go index 9fc4b17d..8636f661 100644 --- a/pkg/zcore/injection/injection.go +++ b/pkg/zcore/injection/injection.go @@ -8,6 +8,7 @@ import ( "github.com/cloudcarver/anclax/pkg/hooks" "github.com/cloudcarver/anclax/pkg/service" "github.com/cloudcarver/anclax/pkg/taskcore" + "github.com/cloudcarver/anclax/pkg/ws" ) func InjectAuth(anclaxApp *anclax_app.Application) auth.AuthInterface { @@ -26,6 +27,10 @@ func InjectAnclaxHooks(anclaxApp *anclax_app.Application) hooks.AnclaxHookInterf return anclaxApp.GetHooks() } +func InjectHub(anclaxApp *anclax_app.Application) *ws.Hub { + return anclaxApp.GetServer().Websocket().Hub() +} + func InjectCloserManager(anclaxApp *anclax_app.Application) *closer.CloserManager { return anclaxApp.GetCloserManager() } diff --git a/pkg/zcore/model/mock_gen.go b/pkg/zcore/model/mock_gen.go index 51425c91..7d530db2 100644 --- a/pkg/zcore/model/mock_gen.go +++ b/pkg/zcore/model/mock_gen.go @@ -144,6 +144,21 @@ func (mr *MockModelInterfaceMockRecorder) CreateMetricsStore(ctx, arg any) *gomo return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateMetricsStore", reflect.TypeOf((*MockModelInterface)(nil).CreateMetricsStore), ctx, arg) } +// CreateNotebook mocks base method. +func (m *MockModelInterface) CreateNotebook(ctx context.Context, arg querier.CreateNotebookParams) (*querier.CreateNotebookRow, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CreateNotebook", ctx, arg) + ret0, _ := ret[0].(*querier.CreateNotebookRow) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CreateNotebook indicates an expected call of CreateNotebook. +func (mr *MockModelInterfaceMockRecorder) CreateNotebook(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateNotebook", reflect.TypeOf((*MockModelInterface)(nil).CreateNotebook), ctx, arg) +} + // CreateOrgSettings mocks base method. func (m *MockModelInterface) CreateOrgSettings(ctx context.Context, arg querier.CreateOrgSettingsParams) error { m.ctrl.T.Helper() @@ -214,6 +229,20 @@ func (mr *MockModelInterfaceMockRecorder) DeleteMetricsStore(ctx, arg any) *gomo return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteMetricsStore", reflect.TypeOf((*MockModelInterface)(nil).DeleteMetricsStore), ctx, arg) } +// DeleteNotebook mocks base method. +func (m *MockModelInterface) DeleteNotebook(ctx context.Context, id string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeleteNotebook", ctx, id) + ret0, _ := ret[0].(error) + return ret0 +} + +// DeleteNotebook indicates an expected call of DeleteNotebook. +func (mr *MockModelInterfaceMockRecorder) DeleteNotebook(ctx, id any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteNotebook", reflect.TypeOf((*MockModelInterface)(nil).DeleteNotebook), ctx, id) +} + // DeleteOrgCluster mocks base method. func (m *MockModelInterface) DeleteOrgCluster(ctx context.Context, arg querier.DeleteOrgClusterParams) error { m.ctrl.T.Helper() @@ -317,6 +346,21 @@ func (mr *MockModelInterfaceMockRecorder) GetClusterDiagnostic(ctx, id any) *gom return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetClusterDiagnostic", reflect.TypeOf((*MockModelInterface)(nil).GetClusterDiagnostic), ctx, id) } +// GetCollabDocSnapshot mocks base method. +func (m *MockModelInterface) GetCollabDocSnapshot(ctx context.Context, notebookID string) ([]byte, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetCollabDocSnapshot", ctx, notebookID) + ret0, _ := ret[0].([]byte) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetCollabDocSnapshot indicates an expected call of GetCollabDocSnapshot. +func (mr *MockModelInterfaceMockRecorder) GetCollabDocSnapshot(ctx, notebookID any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetCollabDocSnapshot", reflect.TypeOf((*MockModelInterface)(nil).GetCollabDocSnapshot), ctx, notebookID) +} + // GetDatabaseConnectionByID mocks base method. func (m *MockModelInterface) GetDatabaseConnectionByID(ctx context.Context, id int32) (*querier.DatabaseConnection, error) { m.ctrl.T.Helper() @@ -362,6 +406,21 @@ func (mr *MockModelInterfaceMockRecorder) GetMetricsStoreByIDAndOrgID(ctx, arg a return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetMetricsStoreByIDAndOrgID", reflect.TypeOf((*MockModelInterface)(nil).GetMetricsStoreByIDAndOrgID), ctx, arg) } +// GetNotebook mocks base method. +func (m *MockModelInterface) GetNotebook(ctx context.Context, id string) (*querier.GetNotebookRow, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetNotebook", ctx, id) + ret0, _ := ret[0].(*querier.GetNotebookRow) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetNotebook indicates an expected call of GetNotebook. +func (mr *MockModelInterfaceMockRecorder) GetNotebook(ctx, id any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetNotebook", reflect.TypeOf((*MockModelInterface)(nil).GetNotebook), ctx, id) +} + // GetOrgCluster mocks base method. func (m *MockModelInterface) GetOrgCluster(ctx context.Context, arg querier.GetOrgClusterParams) (*querier.Cluster, error) { m.ctrl.T.Helper() @@ -422,6 +481,21 @@ func (mr *MockModelInterfaceMockRecorder) GetOrgSettings(ctx, orgID any) *gomock return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetOrgSettings", reflect.TypeOf((*MockModelInterface)(nil).GetOrgSettings), ctx, orgID) } +// GetUserOrganization mocks base method. +func (m *MockModelInterface) GetUserOrganization(ctx context.Context, userID int32) (int32, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetUserOrganization", ctx, userID) + ret0, _ := ret[0].(int32) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetUserOrganization indicates an expected call of GetUserOrganization. +func (mr *MockModelInterfaceMockRecorder) GetUserOrganization(ctx, userID any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetUserOrganization", reflect.TypeOf((*MockModelInterface)(nil).GetUserOrganization), ctx, userID) +} + // InTransaction mocks base method. func (m *MockModelInterface) InTransaction() bool { m.ctrl.T.Helper() @@ -481,6 +555,36 @@ func (mr *MockModelInterfaceMockRecorder) InitMetricsStore(ctx, arg any) *gomock return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "InitMetricsStore", reflect.TypeOf((*MockModelInterface)(nil).InitMetricsStore), ctx, arg) } +// IsOrgOwner mocks base method. +func (m *MockModelInterface) IsOrgOwner(ctx context.Context, arg querier.IsOrgOwnerParams) (bool, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "IsOrgOwner", ctx, arg) + ret0, _ := ret[0].(bool) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// IsOrgOwner indicates an expected call of IsOrgOwner. +func (mr *MockModelInterfaceMockRecorder) IsOrgOwner(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "IsOrgOwner", reflect.TypeOf((*MockModelInterface)(nil).IsOrgOwner), ctx, arg) +} + +// ListAccessibleNotebooks mocks base method. +func (m *MockModelInterface) ListAccessibleNotebooks(ctx context.Context, arg querier.ListAccessibleNotebooksParams) ([]*querier.ListAccessibleNotebooksRow, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ListAccessibleNotebooks", ctx, arg) + ret0, _ := ret[0].([]*querier.ListAccessibleNotebooksRow) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListAccessibleNotebooks indicates an expected call of ListAccessibleNotebooks. +func (mr *MockModelInterfaceMockRecorder) ListAccessibleNotebooks(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListAccessibleNotebooks", reflect.TypeOf((*MockModelInterface)(nil).ListAccessibleNotebooks), ctx, arg) +} + // ListClusterDiagnostics mocks base method. func (m *MockModelInterface) ListClusterDiagnostics(ctx context.Context, clusterID int32) ([]*querier.ListClusterDiagnosticsRow, error) { m.ctrl.T.Helper() @@ -670,6 +774,36 @@ func (mr *MockModelInterfaceMockRecorder) UpdateMetricsStore(ctx, arg any) *gomo return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateMetricsStore", reflect.TypeOf((*MockModelInterface)(nil).UpdateMetricsStore), ctx, arg) } +// UpdateNotebookScope mocks base method. +func (m *MockModelInterface) UpdateNotebookScope(ctx context.Context, arg querier.UpdateNotebookScopeParams) (*querier.UpdateNotebookScopeRow, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "UpdateNotebookScope", ctx, arg) + ret0, _ := ret[0].(*querier.UpdateNotebookScopeRow) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// UpdateNotebookScope indicates an expected call of UpdateNotebookScope. +func (mr *MockModelInterfaceMockRecorder) UpdateNotebookScope(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateNotebookScope", reflect.TypeOf((*MockModelInterface)(nil).UpdateNotebookScope), ctx, arg) +} + +// UpdateNotebookTitle mocks base method. +func (m *MockModelInterface) UpdateNotebookTitle(ctx context.Context, arg querier.UpdateNotebookTitleParams) (*querier.UpdateNotebookTitleRow, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "UpdateNotebookTitle", ctx, arg) + ret0, _ := ret[0].(*querier.UpdateNotebookTitleRow) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// UpdateNotebookTitle indicates an expected call of UpdateNotebookTitle. +func (mr *MockModelInterfaceMockRecorder) UpdateNotebookTitle(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateNotebookTitle", reflect.TypeOf((*MockModelInterface)(nil).UpdateNotebookTitle), ctx, arg) +} + // UpdateOrgCluster mocks base method. func (m *MockModelInterface) UpdateOrgCluster(ctx context.Context, arg querier.UpdateOrgClusterParams) (*querier.Cluster, error) { m.ctrl.T.Helper() @@ -699,3 +833,17 @@ func (mr *MockModelInterfaceMockRecorder) UpdateOrgDatabaseConnection(ctx, arg a mr.mock.ctrl.T.Helper() return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateOrgDatabaseConnection", reflect.TypeOf((*MockModelInterface)(nil).UpdateOrgDatabaseConnection), ctx, arg) } + +// UpsertCollabDocSnapshot mocks base method. +func (m *MockModelInterface) UpsertCollabDocSnapshot(ctx context.Context, arg querier.UpsertCollabDocSnapshotParams) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "UpsertCollabDocSnapshot", ctx, arg) + ret0, _ := ret[0].(error) + return ret0 +} + +// UpsertCollabDocSnapshot indicates an expected call of UpsertCollabDocSnapshot. +func (mr *MockModelInterfaceMockRecorder) UpsertCollabDocSnapshot(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpsertCollabDocSnapshot", reflect.TypeOf((*MockModelInterface)(nil).UpsertCollabDocSnapshot), ctx, arg) +} diff --git a/pkg/zgen/apigen/scopes_extend_gen.go b/pkg/zgen/apigen/scopes_extend_gen.go index 2983cdc3..08d89b2f 100644 --- a/pkg/zgen/apigen/scopes_extend_gen.go +++ b/pkg/zgen/apigen/scopes_extend_gen.go @@ -302,6 +302,111 @@ func (x *XMiddleware) RestoreClusterSnapshot(c *fiber.Ctx, id int32, snapshotId } return x.ServerInterface.RestoreClusterSnapshot(c, id, snapshotId) } +// List collaborative notebooks +// (GET /collab/notebooks) +func (x *XMiddleware) ListCollabNotebooks(c *fiber.Ctx) error { + if err := x.AuthFunc(c); err != nil { + return c.Status(fiber.StatusUnauthorized).SendString(err.Error()) + } + if err := x.PreValidate(c); err != nil { + return c.Status(fiber.StatusForbidden).SendString(err.Error()) + } + + if err := x.PostValidate(c); err != nil { + return c.Status(fiber.StatusForbidden).SendString(err.Error()) + } + return x.ServerInterface.ListCollabNotebooks(c) +} +// Create a collaborative notebook +// (POST /collab/notebooks) +func (x *XMiddleware) CreateCollabNotebook(c *fiber.Ctx) error { + if err := x.AuthFunc(c); err != nil { + return c.Status(fiber.StatusUnauthorized).SendString(err.Error()) + } + if err := x.PreValidate(c); err != nil { + return c.Status(fiber.StatusForbidden).SendString(err.Error()) + } + + if err := x.PostValidate(c); err != nil { + return c.Status(fiber.StatusForbidden).SendString(err.Error()) + } + return x.ServerInterface.CreateCollabNotebook(c) +} +// Delete a collaborative notebook +// (DELETE /collab/notebooks/{notebookID}) +func (x *XMiddleware) DeleteCollabNotebook(c *fiber.Ctx, notebookID string) error { + if err := x.AuthFunc(c); err != nil { + return c.Status(fiber.StatusUnauthorized).SendString(err.Error()) + } + if err := x.PreValidate(c); err != nil { + return c.Status(fiber.StatusForbidden).SendString(err.Error()) + } + + if err := x.PostValidate(c); err != nil { + return c.Status(fiber.StatusForbidden).SendString(err.Error()) + } + return x.ServerInterface.DeleteCollabNotebook(c, notebookID) +} +// Get collaborative notebook details +// (GET /collab/notebooks/{notebookID}) +func (x *XMiddleware) GetCollabNotebook(c *fiber.Ctx, notebookID string) error { + if err := x.AuthFunc(c); err != nil { + return c.Status(fiber.StatusUnauthorized).SendString(err.Error()) + } + if err := x.PreValidate(c); err != nil { + return c.Status(fiber.StatusForbidden).SendString(err.Error()) + } + + if err := x.PostValidate(c); err != nil { + return c.Status(fiber.StatusForbidden).SendString(err.Error()) + } + return x.ServerInterface.GetCollabNotebook(c, notebookID) +} +// Update a collaborative notebook +// (PUT /collab/notebooks/{notebookID}) +func (x *XMiddleware) UpdateCollabNotebook(c *fiber.Ctx, notebookID string) error { + if err := x.AuthFunc(c); err != nil { + return c.Status(fiber.StatusUnauthorized).SendString(err.Error()) + } + if err := x.PreValidate(c); err != nil { + return c.Status(fiber.StatusForbidden).SendString(err.Error()) + } + + if err := x.PostValidate(c); err != nil { + return c.Status(fiber.StatusForbidden).SendString(err.Error()) + } + return x.ServerInterface.UpdateCollabNotebook(c, notebookID) +} +// Upload a collaborative notebook snapshot +// (PUT /collab/notebooks/{notebookID}/snapshot) +func (x *XMiddleware) UploadCollabNotebookSnapshot(c *fiber.Ctx, notebookID string) error { + if err := x.AuthFunc(c); err != nil { + return c.Status(fiber.StatusUnauthorized).SendString(err.Error()) + } + if err := x.PreValidate(c); err != nil { + return c.Status(fiber.StatusForbidden).SendString(err.Error()) + } + + if err := x.PostValidate(c); err != nil { + return c.Status(fiber.StatusForbidden).SendString(err.Error()) + } + return x.ServerInterface.UploadCollabNotebookSnapshot(c, notebookID) +} +// Transfer collaborative notebook scope +// (POST /collab/notebooks/{notebookID}/transfer) +func (x *XMiddleware) TransferCollabNotebook(c *fiber.Ctx, notebookID string) error { + if err := x.AuthFunc(c); err != nil { + return c.Status(fiber.StatusUnauthorized).SendString(err.Error()) + } + if err := x.PreValidate(c); err != nil { + return c.Status(fiber.StatusForbidden).SendString(err.Error()) + } + + if err := x.PostValidate(c); err != nil { + return c.Status(fiber.StatusForbidden).SendString(err.Error()) + } + return x.ServerInterface.TransferCollabNotebook(c, notebookID) +} // List all databases // (GET /databases) func (x *XMiddleware) ListDatabases(c *fiber.Ctx) error { diff --git a/pkg/zgen/apigen/spec_gen.go b/pkg/zgen/apigen/spec_gen.go index b15ff693..b23410a3 100644 --- a/pkg/zgen/apigen/spec_gen.go +++ b/pkg/zgen/apigen/spec_gen.go @@ -22,6 +22,24 @@ const ( BearerAuthScopes = "BearerAuth.Scopes" ) +// Defines values for CollabNotebookScope. +const ( + CollabNotebookScopeOrganization CollabNotebookScope = "organization" + CollabNotebookScopePersonal CollabNotebookScope = "personal" +) + +// Defines values for CollabNotebookCreateRequestScope. +const ( + CollabNotebookCreateRequestScopeOrganization CollabNotebookCreateRequestScope = "organization" + CollabNotebookCreateRequestScopePersonal CollabNotebookCreateRequestScope = "personal" +) + +// Defines values for CollabNotebookTransferRequestTargetScope. +const ( + Organization CollabNotebookTransferRequestTargetScope = "organization" + Personal CollabNotebookTransferRequestTargetScope = "personal" +) + // Defines values for EventSpecType. const ( TaskCompleted EventSpecType = "TaskCompleted" @@ -139,6 +157,60 @@ type ClusterImport struct { Version string `json:"version"` } +// CollabNotebook defines model for CollabNotebook. +type CollabNotebook struct { + // CreatedAt Creation timestamp + CreatedAt time.Time `json:"createdAt"` + + // CreatedByUserID User that originally created the notebook + CreatedByUserID *int32 `json:"createdByUserID,omitempty"` + + // NotebookID Unique identifier of the collaborative notebook + NotebookID string `json:"notebookID"` + + // OrganizationID Organization that currently owns the notebook + OrganizationID *int32 `json:"organizationID,omitempty"` + + // OwnerUserID User that owns the notebook when scope is personal + OwnerUserID *int32 `json:"ownerUserID,omitempty"` + + // Scope Ownership scope of the notebook + Scope *CollabNotebookScope `json:"scope,omitempty"` + + // Title Title of the collaborative notebook + Title string `json:"title"` + + // UpdatedAt Last update timestamp + UpdatedAt time.Time `json:"updatedAt"` +} + +// CollabNotebookScope Ownership scope of the notebook +type CollabNotebookScope string + +// CollabNotebookCreateRequest defines model for CollabNotebookCreateRequest. +type CollabNotebookCreateRequest struct { + // Scope Ownership scope of the notebook; defaults to personal when omitted + Scope *CollabNotebookCreateRequestScope `json:"scope,omitempty"` + + // Title Title of the collaborative notebook + Title string `json:"title"` +} + +// CollabNotebookCreateRequestScope Ownership scope of the notebook; defaults to personal when omitted +type CollabNotebookCreateRequestScope string + +// CollabNotebookTransferRequest defines model for CollabNotebookTransferRequest. +type CollabNotebookTransferRequest struct { + // OwnerUserID Target owner when moving to personal scope; defaults to the caller + OwnerUserID *int32 `json:"ownerUserID,omitempty"` + + // TargetScope Desired ownership scope + TargetScope CollabNotebookTransferRequestTargetScope `json:"targetScope"` +} + +// CollabNotebookTransferRequestTargetScope Desired ownership scope +type CollabNotebookTransferRequestTargetScope string + // Column defines model for Column. type Column struct { // IsHidden Whether the column is hidden @@ -624,6 +696,15 @@ type RunRisectlCommandJSONRequestBody = RisectlCommand // CreateClusterSnapshotJSONRequestBody defines body for CreateClusterSnapshot for application/json ContentType. type CreateClusterSnapshotJSONRequestBody = SnapshotCreate +// CreateCollabNotebookJSONRequestBody defines body for CreateCollabNotebook for application/json ContentType. +type CreateCollabNotebookJSONRequestBody = CollabNotebookCreateRequest + +// UpdateCollabNotebookJSONRequestBody defines body for UpdateCollabNotebook for application/json ContentType. +type UpdateCollabNotebookJSONRequestBody = CollabNotebook + +// TransferCollabNotebookJSONRequestBody defines body for TransferCollabNotebook for application/json ContentType. +type TransferCollabNotebookJSONRequestBody = CollabNotebookTransferRequest + // ImportDatabaseJSONRequestBody defines body for ImportDatabase for application/json ContentType. type ImportDatabaseJSONRequestBody = DatabaseConnectInfo @@ -794,6 +875,33 @@ type ClientInterface interface { // RestoreClusterSnapshot request RestoreClusterSnapshot(ctx context.Context, id int32, snapshotId int64, reqEditors ...RequestEditorFn) (*http.Response, error) + // ListCollabNotebooks request + ListCollabNotebooks(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreateCollabNotebookWithBody request with any body + CreateCollabNotebookWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + CreateCollabNotebook(ctx context.Context, body CreateCollabNotebookJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteCollabNotebook request + DeleteCollabNotebook(ctx context.Context, notebookID string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetCollabNotebook request + GetCollabNotebook(ctx context.Context, notebookID string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // UpdateCollabNotebookWithBody request with any body + UpdateCollabNotebookWithBody(ctx context.Context, notebookID string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + UpdateCollabNotebook(ctx context.Context, notebookID string, body UpdateCollabNotebookJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // UploadCollabNotebookSnapshotWithBody request with any body + UploadCollabNotebookSnapshotWithBody(ctx context.Context, notebookID string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + // TransferCollabNotebookWithBody request with any body + TransferCollabNotebookWithBody(ctx context.Context, notebookID string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + TransferCollabNotebook(ctx context.Context, notebookID string, body TransferCollabNotebookJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + // ListDatabases request ListDatabases(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) @@ -1192,6 +1300,126 @@ func (c *Client) RestoreClusterSnapshot(ctx context.Context, id int32, snapshotI return c.Client.Do(req) } +func (c *Client) ListCollabNotebooks(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewListCollabNotebooksRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateCollabNotebookWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateCollabNotebookRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateCollabNotebook(ctx context.Context, body CreateCollabNotebookJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateCollabNotebookRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteCollabNotebook(ctx context.Context, notebookID string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteCollabNotebookRequest(c.Server, notebookID) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetCollabNotebook(ctx context.Context, notebookID string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetCollabNotebookRequest(c.Server, notebookID) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateCollabNotebookWithBody(ctx context.Context, notebookID string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateCollabNotebookRequestWithBody(c.Server, notebookID, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateCollabNotebook(ctx context.Context, notebookID string, body UpdateCollabNotebookJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateCollabNotebookRequest(c.Server, notebookID, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UploadCollabNotebookSnapshotWithBody(ctx context.Context, notebookID string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUploadCollabNotebookSnapshotRequestWithBody(c.Server, notebookID, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) TransferCollabNotebookWithBody(ctx context.Context, notebookID string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewTransferCollabNotebookRequestWithBody(c.Server, notebookID, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) TransferCollabNotebook(ctx context.Context, notebookID string, body TransferCollabNotebookJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewTransferCollabNotebookRequest(c.Server, notebookID, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + func (c *Client) ListDatabases(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { req, err := NewListDatabasesRequest(c.Server) if err != nil { @@ -2351,8 +2579,8 @@ func NewRestoreClusterSnapshotRequest(server string, id int32, snapshotId int64) return req, nil } -// NewListDatabasesRequest generates requests for ListDatabases -func NewListDatabasesRequest(server string) (*http.Request, error) { +// NewListCollabNotebooksRequest generates requests for ListCollabNotebooks +func NewListCollabNotebooksRequest(server string) (*http.Request, error) { var err error serverURL, err := url.Parse(server) @@ -2360,7 +2588,7 @@ func NewListDatabasesRequest(server string) (*http.Request, error) { return nil, err } - operationPath := fmt.Sprintf("/databases") + operationPath := fmt.Sprintf("/collab/notebooks") if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -2378,19 +2606,19 @@ func NewListDatabasesRequest(server string) (*http.Request, error) { return req, nil } -// NewImportDatabaseRequest calls the generic ImportDatabase builder with application/json body -func NewImportDatabaseRequest(server string, body ImportDatabaseJSONRequestBody) (*http.Request, error) { +// NewCreateCollabNotebookRequest calls the generic CreateCollabNotebook builder with application/json body +func NewCreateCollabNotebookRequest(server string, body CreateCollabNotebookJSONRequestBody) (*http.Request, error) { var bodyReader io.Reader buf, err := json.Marshal(body) if err != nil { return nil, err } bodyReader = bytes.NewReader(buf) - return NewImportDatabaseRequestWithBody(server, "application/json", bodyReader) + return NewCreateCollabNotebookRequestWithBody(server, "application/json", bodyReader) } -// NewImportDatabaseRequestWithBody generates requests for ImportDatabase with any type of body -func NewImportDatabaseRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { +// NewCreateCollabNotebookRequestWithBody generates requests for CreateCollabNotebook with any type of body +func NewCreateCollabNotebookRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { var err error serverURL, err := url.Parse(server) @@ -2398,7 +2626,7 @@ func NewImportDatabaseRequestWithBody(server string, contentType string, body io return nil, err } - operationPath := fmt.Sprintf("/databases/import") + operationPath := fmt.Sprintf("/collab/notebooks") if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -2418,27 +2646,23 @@ func NewImportDatabaseRequestWithBody(server string, contentType string, body io return req, nil } -// NewTestDatabaseConnectionRequest calls the generic TestDatabaseConnection builder with application/json body -func NewTestDatabaseConnectionRequest(server string, body TestDatabaseConnectionJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) +// NewDeleteCollabNotebookRequest generates requests for DeleteCollabNotebook +func NewDeleteCollabNotebookRequest(server string, notebookID string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "notebookID", runtime.ParamLocationPath, notebookID) if err != nil { return nil, err } - bodyReader = bytes.NewReader(buf) - return NewTestDatabaseConnectionRequestWithBody(server, "application/json", bodyReader) -} - -// NewTestDatabaseConnectionRequestWithBody generates requests for TestDatabaseConnection with any type of body -func NewTestDatabaseConnectionRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { - var err error serverURL, err := url.Parse(server) if err != nil { return nil, err } - operationPath := fmt.Sprintf("/databases/test-connection") + operationPath := fmt.Sprintf("/collab/notebooks/%s", pathParam0) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -2448,23 +2672,21 @@ func NewTestDatabaseConnectionRequestWithBody(server string, contentType string, return nil, err } - req, err := http.NewRequest("POST", queryURL.String(), body) + req, err := http.NewRequest("DELETE", queryURL.String(), nil) if err != nil { return nil, err } - req.Header.Add("Content-Type", contentType) - return req, nil } -// NewDeleteDatabaseRequest generates requests for DeleteDatabase -func NewDeleteDatabaseRequest(server string, id int32) (*http.Request, error) { +// NewGetCollabNotebookRequest generates requests for GetCollabNotebook +func NewGetCollabNotebookRequest(server string, notebookID string) (*http.Request, error) { var err error var pathParam0 string - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ID", runtime.ParamLocationPath, id) + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "notebookID", runtime.ParamLocationPath, notebookID) if err != nil { return nil, err } @@ -2474,7 +2696,7 @@ func NewDeleteDatabaseRequest(server string, id int32) (*http.Request, error) { return nil, err } - operationPath := fmt.Sprintf("/databases/%s", pathParam0) + operationPath := fmt.Sprintf("/collab/notebooks/%s", pathParam0) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -2484,7 +2706,7 @@ func NewDeleteDatabaseRequest(server string, id int32) (*http.Request, error) { return nil, err } - req, err := http.NewRequest("DELETE", queryURL.String(), nil) + req, err := http.NewRequest("GET", queryURL.String(), nil) if err != nil { return nil, err } @@ -2492,13 +2714,24 @@ func NewDeleteDatabaseRequest(server string, id int32) (*http.Request, error) { return req, nil } -// NewGetDatabaseRequest generates requests for GetDatabase -func NewGetDatabaseRequest(server string, id int32) (*http.Request, error) { +// NewUpdateCollabNotebookRequest calls the generic UpdateCollabNotebook builder with application/json body +func NewUpdateCollabNotebookRequest(server string, notebookID string, body UpdateCollabNotebookJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewUpdateCollabNotebookRequestWithBody(server, notebookID, "application/json", bodyReader) +} + +// NewUpdateCollabNotebookRequestWithBody generates requests for UpdateCollabNotebook with any type of body +func NewUpdateCollabNotebookRequestWithBody(server string, notebookID string, contentType string, body io.Reader) (*http.Request, error) { var err error var pathParam0 string - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ID", runtime.ParamLocationPath, id) + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "notebookID", runtime.ParamLocationPath, notebookID) if err != nil { return nil, err } @@ -2508,7 +2741,7 @@ func NewGetDatabaseRequest(server string, id int32) (*http.Request, error) { return nil, err } - operationPath := fmt.Sprintf("/databases/%s", pathParam0) + operationPath := fmt.Sprintf("/collab/notebooks/%s", pathParam0) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -2518,32 +2751,23 @@ func NewGetDatabaseRequest(server string, id int32) (*http.Request, error) { return nil, err } - req, err := http.NewRequest("GET", queryURL.String(), nil) + req, err := http.NewRequest("PUT", queryURL.String(), body) if err != nil { return nil, err } - return req, nil -} + req.Header.Add("Content-Type", contentType) -// NewUpdateDatabaseRequest calls the generic UpdateDatabase builder with application/json body -func NewUpdateDatabaseRequest(server string, id int32, body UpdateDatabaseJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) - if err != nil { - return nil, err - } - bodyReader = bytes.NewReader(buf) - return NewUpdateDatabaseRequestWithBody(server, id, "application/json", bodyReader) + return req, nil } -// NewUpdateDatabaseRequestWithBody generates requests for UpdateDatabase with any type of body -func NewUpdateDatabaseRequestWithBody(server string, id int32, contentType string, body io.Reader) (*http.Request, error) { +// NewUploadCollabNotebookSnapshotRequestWithBody generates requests for UploadCollabNotebookSnapshot with any type of body +func NewUploadCollabNotebookSnapshotRequestWithBody(server string, notebookID string, contentType string, body io.Reader) (*http.Request, error) { var err error var pathParam0 string - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ID", runtime.ParamLocationPath, id) + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "notebookID", runtime.ParamLocationPath, notebookID) if err != nil { return nil, err } @@ -2553,7 +2777,7 @@ func NewUpdateDatabaseRequestWithBody(server string, id int32, contentType strin return nil, err } - operationPath := fmt.Sprintf("/databases/%s", pathParam0) + operationPath := fmt.Sprintf("/collab/notebooks/%s/snapshot", pathParam0) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -2573,13 +2797,24 @@ func NewUpdateDatabaseRequestWithBody(server string, id int32, contentType strin return req, nil } -// NewGetDDLProgressRequest generates requests for GetDDLProgress -func NewGetDDLProgressRequest(server string, id int32) (*http.Request, error) { +// NewTransferCollabNotebookRequest calls the generic TransferCollabNotebook builder with application/json body +func NewTransferCollabNotebookRequest(server string, notebookID string, body TransferCollabNotebookJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewTransferCollabNotebookRequestWithBody(server, notebookID, "application/json", bodyReader) +} + +// NewTransferCollabNotebookRequestWithBody generates requests for TransferCollabNotebook with any type of body +func NewTransferCollabNotebookRequestWithBody(server string, notebookID string, contentType string, body io.Reader) (*http.Request, error) { var err error var pathParam0 string - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ID", runtime.ParamLocationPath, id) + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "notebookID", runtime.ParamLocationPath, notebookID) if err != nil { return nil, err } @@ -2589,7 +2824,7 @@ func NewGetDDLProgressRequest(server string, id int32) (*http.Request, error) { return nil, err } - operationPath := fmt.Sprintf("/databases/%s/ddl-progress", pathParam0) + operationPath := fmt.Sprintf("/collab/notebooks/%s/transfer", pathParam0) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -2599,38 +2834,26 @@ func NewGetDDLProgressRequest(server string, id int32) (*http.Request, error) { return nil, err } - req, err := http.NewRequest("GET", queryURL.String(), nil) + req, err := http.NewRequest("POST", queryURL.String(), body) if err != nil { return nil, err } + req.Header.Add("Content-Type", contentType) + return req, nil } -// NewCancelDDLProgressRequest generates requests for CancelDDLProgress -func NewCancelDDLProgressRequest(server string, id int32, ddlID int64) (*http.Request, error) { +// NewListDatabasesRequest generates requests for ListDatabases +func NewListDatabasesRequest(server string) (*http.Request, error) { var err error - var pathParam0 string - - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ID", runtime.ParamLocationPath, id) - if err != nil { - return nil, err - } - - var pathParam1 string - - pathParam1, err = runtime.StyleParamWithLocation("simple", false, "ddlID", runtime.ParamLocationPath, ddlID) - if err != nil { - return nil, err - } - serverURL, err := url.Parse(server) if err != nil { return nil, err } - operationPath := fmt.Sprintf("/databases/%s/ddl-progress/%s/cancel", pathParam0, pathParam1) + operationPath := fmt.Sprintf("/databases") if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -2640,7 +2863,7 @@ func NewCancelDDLProgressRequest(server string, id int32, ddlID int64) (*http.Re return nil, err } - req, err := http.NewRequest("POST", queryURL.String(), nil) + req, err := http.NewRequest("GET", queryURL.String(), nil) if err != nil { return nil, err } @@ -2648,19 +2871,289 @@ func NewCancelDDLProgressRequest(server string, id int32, ddlID int64) (*http.Re return req, nil } -// NewQueryDatabaseRequest calls the generic QueryDatabase builder with application/json body -func NewQueryDatabaseRequest(server string, id int32, body QueryDatabaseJSONRequestBody) (*http.Request, error) { +// NewImportDatabaseRequest calls the generic ImportDatabase builder with application/json body +func NewImportDatabaseRequest(server string, body ImportDatabaseJSONRequestBody) (*http.Request, error) { var bodyReader io.Reader buf, err := json.Marshal(body) if err != nil { return nil, err } bodyReader = bytes.NewReader(buf) - return NewQueryDatabaseRequestWithBody(server, id, "application/json", bodyReader) + return NewImportDatabaseRequestWithBody(server, "application/json", bodyReader) } -// NewQueryDatabaseRequestWithBody generates requests for QueryDatabase with any type of body -func NewQueryDatabaseRequestWithBody(server string, id int32, contentType string, body io.Reader) (*http.Request, error) { +// NewImportDatabaseRequestWithBody generates requests for ImportDatabase with any type of body +func NewImportDatabaseRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/databases/import") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewTestDatabaseConnectionRequest calls the generic TestDatabaseConnection builder with application/json body +func NewTestDatabaseConnectionRequest(server string, body TestDatabaseConnectionJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewTestDatabaseConnectionRequestWithBody(server, "application/json", bodyReader) +} + +// NewTestDatabaseConnectionRequestWithBody generates requests for TestDatabaseConnection with any type of body +func NewTestDatabaseConnectionRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/databases/test-connection") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteDatabaseRequest generates requests for DeleteDatabase +func NewDeleteDatabaseRequest(server string, id int32) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ID", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/databases/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetDatabaseRequest generates requests for GetDatabase +func NewGetDatabaseRequest(server string, id int32) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ID", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/databases/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewUpdateDatabaseRequest calls the generic UpdateDatabase builder with application/json body +func NewUpdateDatabaseRequest(server string, id int32, body UpdateDatabaseJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewUpdateDatabaseRequestWithBody(server, id, "application/json", bodyReader) +} + +// NewUpdateDatabaseRequestWithBody generates requests for UpdateDatabase with any type of body +func NewUpdateDatabaseRequestWithBody(server string, id int32, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ID", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/databases/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetDDLProgressRequest generates requests for GetDDLProgress +func NewGetDDLProgressRequest(server string, id int32) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ID", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/databases/%s/ddl-progress", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewCancelDDLProgressRequest generates requests for CancelDDLProgress +func NewCancelDDLProgressRequest(server string, id int32, ddlID int64) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ID", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "ddlID", runtime.ParamLocationPath, ddlID) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/databases/%s/ddl-progress/%s/cancel", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewQueryDatabaseRequest calls the generic QueryDatabase builder with application/json body +func NewQueryDatabaseRequest(server string, id int32, body QueryDatabaseJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewQueryDatabaseRequestWithBody(server, id, "application/json", bodyReader) +} + +// NewQueryDatabaseRequestWithBody generates requests for QueryDatabase with any type of body +func NewQueryDatabaseRequestWithBody(server string, id int32, contentType string, body io.Reader) (*http.Request, error) { var err error var pathParam0 string @@ -3179,6 +3672,33 @@ type ClientWithResponsesInterface interface { // RestoreClusterSnapshotWithResponse request RestoreClusterSnapshotWithResponse(ctx context.Context, id int32, snapshotId int64, reqEditors ...RequestEditorFn) (*RestoreClusterSnapshotResponse, error) + // ListCollabNotebooksWithResponse request + ListCollabNotebooksWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*ListCollabNotebooksResponse, error) + + // CreateCollabNotebookWithBodyWithResponse request with any body + CreateCollabNotebookWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateCollabNotebookResponse, error) + + CreateCollabNotebookWithResponse(ctx context.Context, body CreateCollabNotebookJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateCollabNotebookResponse, error) + + // DeleteCollabNotebookWithResponse request + DeleteCollabNotebookWithResponse(ctx context.Context, notebookID string, reqEditors ...RequestEditorFn) (*DeleteCollabNotebookResponse, error) + + // GetCollabNotebookWithResponse request + GetCollabNotebookWithResponse(ctx context.Context, notebookID string, reqEditors ...RequestEditorFn) (*GetCollabNotebookResponse, error) + + // UpdateCollabNotebookWithBodyWithResponse request with any body + UpdateCollabNotebookWithBodyWithResponse(ctx context.Context, notebookID string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateCollabNotebookResponse, error) + + UpdateCollabNotebookWithResponse(ctx context.Context, notebookID string, body UpdateCollabNotebookJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateCollabNotebookResponse, error) + + // UploadCollabNotebookSnapshotWithBodyWithResponse request with any body + UploadCollabNotebookSnapshotWithBodyWithResponse(ctx context.Context, notebookID string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UploadCollabNotebookSnapshotResponse, error) + + // TransferCollabNotebookWithBodyWithResponse request with any body + TransferCollabNotebookWithBodyWithResponse(ctx context.Context, notebookID string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*TransferCollabNotebookResponse, error) + + TransferCollabNotebookWithResponse(ctx context.Context, notebookID string, body TransferCollabNotebookJSONRequestBody, reqEditors ...RequestEditorFn) (*TransferCollabNotebookResponse, error) + // ListDatabasesWithResponse request ListDatabasesWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*ListDatabasesResponse, error) @@ -3665,6 +4185,158 @@ func (r RestoreClusterSnapshotResponse) StatusCode() int { return 0 } +type ListCollabNotebooksResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *[]CollabNotebook +} + +// Status returns HTTPResponse.Status +func (r ListCollabNotebooksResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r ListCollabNotebooksResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type CreateCollabNotebookResponse struct { + Body []byte + HTTPResponse *http.Response + JSON201 *CollabNotebook +} + +// Status returns HTTPResponse.Status +func (r CreateCollabNotebookResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r CreateCollabNotebookResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteCollabNotebookResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r DeleteCollabNotebookResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteCollabNotebookResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetCollabNotebookResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *CollabNotebook +} + +// Status returns HTTPResponse.Status +func (r GetCollabNotebookResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetCollabNotebookResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type UpdateCollabNotebookResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *CollabNotebook +} + +// Status returns HTTPResponse.Status +func (r UpdateCollabNotebookResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r UpdateCollabNotebookResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type UploadCollabNotebookSnapshotResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r UploadCollabNotebookSnapshotResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r UploadCollabNotebookSnapshotResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type TransferCollabNotebookResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *CollabNotebook +} + +// Status returns HTTPResponse.Status +func (r TransferCollabNotebookResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r TransferCollabNotebookResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + type ListDatabasesResponse struct { Body []byte HTTPResponse *http.Response @@ -4214,103 +4886,190 @@ func (c *ClientWithResponses) CreateClusterDiagnosticWithResponse(ctx context.Co if err != nil { return nil, err } - return ParseCreateClusterDiagnosticResponse(rsp) + return ParseCreateClusterDiagnosticResponse(rsp) +} + +// GetClusterAutoDiagnosticConfigWithResponse request returning *GetClusterAutoDiagnosticConfigResponse +func (c *ClientWithResponses) GetClusterAutoDiagnosticConfigWithResponse(ctx context.Context, id int32, reqEditors ...RequestEditorFn) (*GetClusterAutoDiagnosticConfigResponse, error) { + rsp, err := c.GetClusterAutoDiagnosticConfig(ctx, id, reqEditors...) + if err != nil { + return nil, err + } + return ParseGetClusterAutoDiagnosticConfigResponse(rsp) +} + +// UpdateClusterAutoDiagnosticConfigWithBodyWithResponse request with arbitrary body returning *UpdateClusterAutoDiagnosticConfigResponse +func (c *ClientWithResponses) UpdateClusterAutoDiagnosticConfigWithBodyWithResponse(ctx context.Context, id int32, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateClusterAutoDiagnosticConfigResponse, error) { + rsp, err := c.UpdateClusterAutoDiagnosticConfigWithBody(ctx, id, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseUpdateClusterAutoDiagnosticConfigResponse(rsp) +} + +func (c *ClientWithResponses) UpdateClusterAutoDiagnosticConfigWithResponse(ctx context.Context, id int32, body UpdateClusterAutoDiagnosticConfigJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateClusterAutoDiagnosticConfigResponse, error) { + rsp, err := c.UpdateClusterAutoDiagnosticConfig(ctx, id, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseUpdateClusterAutoDiagnosticConfigResponse(rsp) +} + +// GetClusterDiagnosticWithResponse request returning *GetClusterDiagnosticResponse +func (c *ClientWithResponses) GetClusterDiagnosticWithResponse(ctx context.Context, id int32, diagnosticId int32, reqEditors ...RequestEditorFn) (*GetClusterDiagnosticResponse, error) { + rsp, err := c.GetClusterDiagnostic(ctx, id, diagnosticId, reqEditors...) + if err != nil { + return nil, err + } + return ParseGetClusterDiagnosticResponse(rsp) +} + +// RunRisectlCommandWithBodyWithResponse request with arbitrary body returning *RunRisectlCommandResponse +func (c *ClientWithResponses) RunRisectlCommandWithBodyWithResponse(ctx context.Context, id int32, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*RunRisectlCommandResponse, error) { + rsp, err := c.RunRisectlCommandWithBody(ctx, id, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseRunRisectlCommandResponse(rsp) +} + +func (c *ClientWithResponses) RunRisectlCommandWithResponse(ctx context.Context, id int32, body RunRisectlCommandJSONRequestBody, reqEditors ...RequestEditorFn) (*RunRisectlCommandResponse, error) { + rsp, err := c.RunRisectlCommand(ctx, id, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseRunRisectlCommandResponse(rsp) +} + +// ListClusterSnapshotsWithResponse request returning *ListClusterSnapshotsResponse +func (c *ClientWithResponses) ListClusterSnapshotsWithResponse(ctx context.Context, id int32, reqEditors ...RequestEditorFn) (*ListClusterSnapshotsResponse, error) { + rsp, err := c.ListClusterSnapshots(ctx, id, reqEditors...) + if err != nil { + return nil, err + } + return ParseListClusterSnapshotsResponse(rsp) +} + +// CreateClusterSnapshotWithBodyWithResponse request with arbitrary body returning *CreateClusterSnapshotResponse +func (c *ClientWithResponses) CreateClusterSnapshotWithBodyWithResponse(ctx context.Context, id int32, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateClusterSnapshotResponse, error) { + rsp, err := c.CreateClusterSnapshotWithBody(ctx, id, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseCreateClusterSnapshotResponse(rsp) +} + +func (c *ClientWithResponses) CreateClusterSnapshotWithResponse(ctx context.Context, id int32, body CreateClusterSnapshotJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateClusterSnapshotResponse, error) { + rsp, err := c.CreateClusterSnapshot(ctx, id, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseCreateClusterSnapshotResponse(rsp) +} + +// DeleteClusterSnapshotWithResponse request returning *DeleteClusterSnapshotResponse +func (c *ClientWithResponses) DeleteClusterSnapshotWithResponse(ctx context.Context, id int32, snapshotId int64, reqEditors ...RequestEditorFn) (*DeleteClusterSnapshotResponse, error) { + rsp, err := c.DeleteClusterSnapshot(ctx, id, snapshotId, reqEditors...) + if err != nil { + return nil, err + } + return ParseDeleteClusterSnapshotResponse(rsp) } -// GetClusterAutoDiagnosticConfigWithResponse request returning *GetClusterAutoDiagnosticConfigResponse -func (c *ClientWithResponses) GetClusterAutoDiagnosticConfigWithResponse(ctx context.Context, id int32, reqEditors ...RequestEditorFn) (*GetClusterAutoDiagnosticConfigResponse, error) { - rsp, err := c.GetClusterAutoDiagnosticConfig(ctx, id, reqEditors...) +// RestoreClusterSnapshotWithResponse request returning *RestoreClusterSnapshotResponse +func (c *ClientWithResponses) RestoreClusterSnapshotWithResponse(ctx context.Context, id int32, snapshotId int64, reqEditors ...RequestEditorFn) (*RestoreClusterSnapshotResponse, error) { + rsp, err := c.RestoreClusterSnapshot(ctx, id, snapshotId, reqEditors...) if err != nil { return nil, err } - return ParseGetClusterAutoDiagnosticConfigResponse(rsp) + return ParseRestoreClusterSnapshotResponse(rsp) } -// UpdateClusterAutoDiagnosticConfigWithBodyWithResponse request with arbitrary body returning *UpdateClusterAutoDiagnosticConfigResponse -func (c *ClientWithResponses) UpdateClusterAutoDiagnosticConfigWithBodyWithResponse(ctx context.Context, id int32, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateClusterAutoDiagnosticConfigResponse, error) { - rsp, err := c.UpdateClusterAutoDiagnosticConfigWithBody(ctx, id, contentType, body, reqEditors...) +// ListCollabNotebooksWithResponse request returning *ListCollabNotebooksResponse +func (c *ClientWithResponses) ListCollabNotebooksWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*ListCollabNotebooksResponse, error) { + rsp, err := c.ListCollabNotebooks(ctx, reqEditors...) if err != nil { return nil, err } - return ParseUpdateClusterAutoDiagnosticConfigResponse(rsp) + return ParseListCollabNotebooksResponse(rsp) } -func (c *ClientWithResponses) UpdateClusterAutoDiagnosticConfigWithResponse(ctx context.Context, id int32, body UpdateClusterAutoDiagnosticConfigJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateClusterAutoDiagnosticConfigResponse, error) { - rsp, err := c.UpdateClusterAutoDiagnosticConfig(ctx, id, body, reqEditors...) +// CreateCollabNotebookWithBodyWithResponse request with arbitrary body returning *CreateCollabNotebookResponse +func (c *ClientWithResponses) CreateCollabNotebookWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateCollabNotebookResponse, error) { + rsp, err := c.CreateCollabNotebookWithBody(ctx, contentType, body, reqEditors...) if err != nil { return nil, err } - return ParseUpdateClusterAutoDiagnosticConfigResponse(rsp) + return ParseCreateCollabNotebookResponse(rsp) } -// GetClusterDiagnosticWithResponse request returning *GetClusterDiagnosticResponse -func (c *ClientWithResponses) GetClusterDiagnosticWithResponse(ctx context.Context, id int32, diagnosticId int32, reqEditors ...RequestEditorFn) (*GetClusterDiagnosticResponse, error) { - rsp, err := c.GetClusterDiagnostic(ctx, id, diagnosticId, reqEditors...) +func (c *ClientWithResponses) CreateCollabNotebookWithResponse(ctx context.Context, body CreateCollabNotebookJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateCollabNotebookResponse, error) { + rsp, err := c.CreateCollabNotebook(ctx, body, reqEditors...) if err != nil { return nil, err } - return ParseGetClusterDiagnosticResponse(rsp) + return ParseCreateCollabNotebookResponse(rsp) } -// RunRisectlCommandWithBodyWithResponse request with arbitrary body returning *RunRisectlCommandResponse -func (c *ClientWithResponses) RunRisectlCommandWithBodyWithResponse(ctx context.Context, id int32, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*RunRisectlCommandResponse, error) { - rsp, err := c.RunRisectlCommandWithBody(ctx, id, contentType, body, reqEditors...) +// DeleteCollabNotebookWithResponse request returning *DeleteCollabNotebookResponse +func (c *ClientWithResponses) DeleteCollabNotebookWithResponse(ctx context.Context, notebookID string, reqEditors ...RequestEditorFn) (*DeleteCollabNotebookResponse, error) { + rsp, err := c.DeleteCollabNotebook(ctx, notebookID, reqEditors...) if err != nil { return nil, err } - return ParseRunRisectlCommandResponse(rsp) + return ParseDeleteCollabNotebookResponse(rsp) } -func (c *ClientWithResponses) RunRisectlCommandWithResponse(ctx context.Context, id int32, body RunRisectlCommandJSONRequestBody, reqEditors ...RequestEditorFn) (*RunRisectlCommandResponse, error) { - rsp, err := c.RunRisectlCommand(ctx, id, body, reqEditors...) +// GetCollabNotebookWithResponse request returning *GetCollabNotebookResponse +func (c *ClientWithResponses) GetCollabNotebookWithResponse(ctx context.Context, notebookID string, reqEditors ...RequestEditorFn) (*GetCollabNotebookResponse, error) { + rsp, err := c.GetCollabNotebook(ctx, notebookID, reqEditors...) if err != nil { return nil, err } - return ParseRunRisectlCommandResponse(rsp) + return ParseGetCollabNotebookResponse(rsp) } -// ListClusterSnapshotsWithResponse request returning *ListClusterSnapshotsResponse -func (c *ClientWithResponses) ListClusterSnapshotsWithResponse(ctx context.Context, id int32, reqEditors ...RequestEditorFn) (*ListClusterSnapshotsResponse, error) { - rsp, err := c.ListClusterSnapshots(ctx, id, reqEditors...) +// UpdateCollabNotebookWithBodyWithResponse request with arbitrary body returning *UpdateCollabNotebookResponse +func (c *ClientWithResponses) UpdateCollabNotebookWithBodyWithResponse(ctx context.Context, notebookID string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateCollabNotebookResponse, error) { + rsp, err := c.UpdateCollabNotebookWithBody(ctx, notebookID, contentType, body, reqEditors...) if err != nil { return nil, err } - return ParseListClusterSnapshotsResponse(rsp) + return ParseUpdateCollabNotebookResponse(rsp) } -// CreateClusterSnapshotWithBodyWithResponse request with arbitrary body returning *CreateClusterSnapshotResponse -func (c *ClientWithResponses) CreateClusterSnapshotWithBodyWithResponse(ctx context.Context, id int32, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateClusterSnapshotResponse, error) { - rsp, err := c.CreateClusterSnapshotWithBody(ctx, id, contentType, body, reqEditors...) +func (c *ClientWithResponses) UpdateCollabNotebookWithResponse(ctx context.Context, notebookID string, body UpdateCollabNotebookJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateCollabNotebookResponse, error) { + rsp, err := c.UpdateCollabNotebook(ctx, notebookID, body, reqEditors...) if err != nil { return nil, err } - return ParseCreateClusterSnapshotResponse(rsp) + return ParseUpdateCollabNotebookResponse(rsp) } -func (c *ClientWithResponses) CreateClusterSnapshotWithResponse(ctx context.Context, id int32, body CreateClusterSnapshotJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateClusterSnapshotResponse, error) { - rsp, err := c.CreateClusterSnapshot(ctx, id, body, reqEditors...) +// UploadCollabNotebookSnapshotWithBodyWithResponse request with arbitrary body returning *UploadCollabNotebookSnapshotResponse +func (c *ClientWithResponses) UploadCollabNotebookSnapshotWithBodyWithResponse(ctx context.Context, notebookID string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UploadCollabNotebookSnapshotResponse, error) { + rsp, err := c.UploadCollabNotebookSnapshotWithBody(ctx, notebookID, contentType, body, reqEditors...) if err != nil { return nil, err } - return ParseCreateClusterSnapshotResponse(rsp) + return ParseUploadCollabNotebookSnapshotResponse(rsp) } -// DeleteClusterSnapshotWithResponse request returning *DeleteClusterSnapshotResponse -func (c *ClientWithResponses) DeleteClusterSnapshotWithResponse(ctx context.Context, id int32, snapshotId int64, reqEditors ...RequestEditorFn) (*DeleteClusterSnapshotResponse, error) { - rsp, err := c.DeleteClusterSnapshot(ctx, id, snapshotId, reqEditors...) +// TransferCollabNotebookWithBodyWithResponse request with arbitrary body returning *TransferCollabNotebookResponse +func (c *ClientWithResponses) TransferCollabNotebookWithBodyWithResponse(ctx context.Context, notebookID string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*TransferCollabNotebookResponse, error) { + rsp, err := c.TransferCollabNotebookWithBody(ctx, notebookID, contentType, body, reqEditors...) if err != nil { return nil, err } - return ParseDeleteClusterSnapshotResponse(rsp) + return ParseTransferCollabNotebookResponse(rsp) } -// RestoreClusterSnapshotWithResponse request returning *RestoreClusterSnapshotResponse -func (c *ClientWithResponses) RestoreClusterSnapshotWithResponse(ctx context.Context, id int32, snapshotId int64, reqEditors ...RequestEditorFn) (*RestoreClusterSnapshotResponse, error) { - rsp, err := c.RestoreClusterSnapshot(ctx, id, snapshotId, reqEditors...) +func (c *ClientWithResponses) TransferCollabNotebookWithResponse(ctx context.Context, notebookID string, body TransferCollabNotebookJSONRequestBody, reqEditors ...RequestEditorFn) (*TransferCollabNotebookResponse, error) { + rsp, err := c.TransferCollabNotebook(ctx, notebookID, body, reqEditors...) if err != nil { return nil, err } - return ParseRestoreClusterSnapshotResponse(rsp) + return ParseTransferCollabNotebookResponse(rsp) } // ListDatabasesWithResponse request returning *ListDatabasesResponse @@ -4982,6 +5741,168 @@ func ParseRestoreClusterSnapshotResponse(rsp *http.Response) (*RestoreClusterSna return response, nil } +// ParseListCollabNotebooksResponse parses an HTTP response from a ListCollabNotebooksWithResponse call +func ParseListCollabNotebooksResponse(rsp *http.Response) (*ListCollabNotebooksResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &ListCollabNotebooksResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest []CollabNotebook + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil +} + +// ParseCreateCollabNotebookResponse parses an HTTP response from a CreateCollabNotebookWithResponse call +func ParseCreateCollabNotebookResponse(rsp *http.Response) (*CreateCollabNotebookResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &CreateCollabNotebookResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest CollabNotebook + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + } + + return response, nil +} + +// ParseDeleteCollabNotebookResponse parses an HTTP response from a DeleteCollabNotebookWithResponse call +func ParseDeleteCollabNotebookResponse(rsp *http.Response) (*DeleteCollabNotebookResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &DeleteCollabNotebookResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil +} + +// ParseGetCollabNotebookResponse parses an HTTP response from a GetCollabNotebookWithResponse call +func ParseGetCollabNotebookResponse(rsp *http.Response) (*GetCollabNotebookResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &GetCollabNotebookResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest CollabNotebook + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil +} + +// ParseUpdateCollabNotebookResponse parses an HTTP response from a UpdateCollabNotebookWithResponse call +func ParseUpdateCollabNotebookResponse(rsp *http.Response) (*UpdateCollabNotebookResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &UpdateCollabNotebookResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest CollabNotebook + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil +} + +// ParseUploadCollabNotebookSnapshotResponse parses an HTTP response from a UploadCollabNotebookSnapshotWithResponse call +func ParseUploadCollabNotebookSnapshotResponse(rsp *http.Response) (*UploadCollabNotebookSnapshotResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &UploadCollabNotebookSnapshotResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil +} + +// ParseTransferCollabNotebookResponse parses an HTTP response from a TransferCollabNotebookWithResponse call +func ParseTransferCollabNotebookResponse(rsp *http.Response) (*TransferCollabNotebookResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &TransferCollabNotebookResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest CollabNotebook + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil +} + // ParseListDatabasesResponse parses an HTTP response from a ListDatabasesWithResponse call func ParseListDatabasesResponse(rsp *http.Response) (*ListDatabasesResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) @@ -5485,6 +6406,27 @@ type ServerInterface interface { // Restore snapshot // (POST /clusters/{ID}/snapshots/{snapshotId}) RestoreClusterSnapshot(c *fiber.Ctx, id int32, snapshotId int64) error + // List collaborative notebooks + // (GET /collab/notebooks) + ListCollabNotebooks(c *fiber.Ctx) error + // Create a collaborative notebook + // (POST /collab/notebooks) + CreateCollabNotebook(c *fiber.Ctx) error + // Delete a collaborative notebook + // (DELETE /collab/notebooks/{notebookID}) + DeleteCollabNotebook(c *fiber.Ctx, notebookID string) error + // Get collaborative notebook details + // (GET /collab/notebooks/{notebookID}) + GetCollabNotebook(c *fiber.Ctx, notebookID string) error + // Update a collaborative notebook + // (PUT /collab/notebooks/{notebookID}) + UpdateCollabNotebook(c *fiber.Ctx, notebookID string) error + // Upload a collaborative notebook snapshot + // (PUT /collab/notebooks/{notebookID}/snapshot) + UploadCollabNotebookSnapshot(c *fiber.Ctx, notebookID string) error + // Transfer collaborative notebook scope + // (POST /collab/notebooks/{notebookID}/transfer) + TransferCollabNotebook(c *fiber.Ctx, notebookID string) error // List all databases // (GET /databases) ListDatabases(c *fiber.Ctx) error @@ -5928,6 +6870,112 @@ func (siw *ServerInterfaceWrapper) RestoreClusterSnapshot(c *fiber.Ctx) error { return siw.Handler.RestoreClusterSnapshot(c, id, snapshotId) } +// ListCollabNotebooks operation middleware +func (siw *ServerInterfaceWrapper) ListCollabNotebooks(c *fiber.Ctx) error { + + c.Context().SetUserValue(BearerAuthScopes, []string{}) + + return siw.Handler.ListCollabNotebooks(c) +} + +// CreateCollabNotebook operation middleware +func (siw *ServerInterfaceWrapper) CreateCollabNotebook(c *fiber.Ctx) error { + + c.Context().SetUserValue(BearerAuthScopes, []string{}) + + return siw.Handler.CreateCollabNotebook(c) +} + +// DeleteCollabNotebook operation middleware +func (siw *ServerInterfaceWrapper) DeleteCollabNotebook(c *fiber.Ctx) error { + + var err error + + // ------------- Path parameter "notebookID" ------------- + var notebookID string + + err = runtime.BindStyledParameterWithOptions("simple", "notebookID", c.Params("notebookID"), ¬ebookID, runtime.BindStyledParameterOptions{Explode: false, Required: true}) + if err != nil { + return fiber.NewError(fiber.StatusBadRequest, fmt.Errorf("Invalid format for parameter notebookID: %w", err).Error()) + } + + c.Context().SetUserValue(BearerAuthScopes, []string{}) + + return siw.Handler.DeleteCollabNotebook(c, notebookID) +} + +// GetCollabNotebook operation middleware +func (siw *ServerInterfaceWrapper) GetCollabNotebook(c *fiber.Ctx) error { + + var err error + + // ------------- Path parameter "notebookID" ------------- + var notebookID string + + err = runtime.BindStyledParameterWithOptions("simple", "notebookID", c.Params("notebookID"), ¬ebookID, runtime.BindStyledParameterOptions{Explode: false, Required: true}) + if err != nil { + return fiber.NewError(fiber.StatusBadRequest, fmt.Errorf("Invalid format for parameter notebookID: %w", err).Error()) + } + + c.Context().SetUserValue(BearerAuthScopes, []string{}) + + return siw.Handler.GetCollabNotebook(c, notebookID) +} + +// UpdateCollabNotebook operation middleware +func (siw *ServerInterfaceWrapper) UpdateCollabNotebook(c *fiber.Ctx) error { + + var err error + + // ------------- Path parameter "notebookID" ------------- + var notebookID string + + err = runtime.BindStyledParameterWithOptions("simple", "notebookID", c.Params("notebookID"), ¬ebookID, runtime.BindStyledParameterOptions{Explode: false, Required: true}) + if err != nil { + return fiber.NewError(fiber.StatusBadRequest, fmt.Errorf("Invalid format for parameter notebookID: %w", err).Error()) + } + + c.Context().SetUserValue(BearerAuthScopes, []string{}) + + return siw.Handler.UpdateCollabNotebook(c, notebookID) +} + +// UploadCollabNotebookSnapshot operation middleware +func (siw *ServerInterfaceWrapper) UploadCollabNotebookSnapshot(c *fiber.Ctx) error { + + var err error + + // ------------- Path parameter "notebookID" ------------- + var notebookID string + + err = runtime.BindStyledParameterWithOptions("simple", "notebookID", c.Params("notebookID"), ¬ebookID, runtime.BindStyledParameterOptions{Explode: false, Required: true}) + if err != nil { + return fiber.NewError(fiber.StatusBadRequest, fmt.Errorf("Invalid format for parameter notebookID: %w", err).Error()) + } + + c.Context().SetUserValue(BearerAuthScopes, []string{}) + + return siw.Handler.UploadCollabNotebookSnapshot(c, notebookID) +} + +// TransferCollabNotebook operation middleware +func (siw *ServerInterfaceWrapper) TransferCollabNotebook(c *fiber.Ctx) error { + + var err error + + // ------------- Path parameter "notebookID" ------------- + var notebookID string + + err = runtime.BindStyledParameterWithOptions("simple", "notebookID", c.Params("notebookID"), ¬ebookID, runtime.BindStyledParameterOptions{Explode: false, Required: true}) + if err != nil { + return fiber.NewError(fiber.StatusBadRequest, fmt.Errorf("Invalid format for parameter notebookID: %w", err).Error()) + } + + c.Context().SetUserValue(BearerAuthScopes, []string{}) + + return siw.Handler.TransferCollabNotebook(c, notebookID) +} + // ListDatabases operation middleware func (siw *ServerInterfaceWrapper) ListDatabases(c *fiber.Ctx) error { @@ -6271,6 +7319,20 @@ func RegisterHandlersWithOptions(router fiber.Router, si ServerInterface, option router.Post(options.BaseURL+"/clusters/:ID/snapshots/:snapshotId", wrapper.RestoreClusterSnapshot) + router.Get(options.BaseURL+"/collab/notebooks", wrapper.ListCollabNotebooks) + + router.Post(options.BaseURL+"/collab/notebooks", wrapper.CreateCollabNotebook) + + router.Delete(options.BaseURL+"/collab/notebooks/:notebookID", wrapper.DeleteCollabNotebook) + + router.Get(options.BaseURL+"/collab/notebooks/:notebookID", wrapper.GetCollabNotebook) + + router.Put(options.BaseURL+"/collab/notebooks/:notebookID", wrapper.UpdateCollabNotebook) + + router.Put(options.BaseURL+"/collab/notebooks/:notebookID/snapshot", wrapper.UploadCollabNotebookSnapshot) + + router.Post(options.BaseURL+"/collab/notebooks/:notebookID/transfer", wrapper.TransferCollabNotebook) + router.Get(options.BaseURL+"/databases", wrapper.ListDatabases) router.Post(options.BaseURL+"/databases/import", wrapper.ImportDatabase) diff --git a/pkg/zgen/querier/collab.sql.gen.go b/pkg/zgen/querier/collab.sql.gen.go new file mode 100644 index 00000000..442d0598 --- /dev/null +++ b/pkg/zgen/querier/collab.sql.gen.go @@ -0,0 +1,301 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.29.0 +// source: collab.sql + +package querier + +import ( + "context" + "time" +) + +const createNotebook = `-- name: CreateNotebook :one +INSERT INTO notebooks (id, scope, org_id, owner_user_id, title, created_by) +VALUES ($1, $2, $3, $4, $5, $6) +RETURNING id, scope, org_id AS organization_id, owner_user_id, title, created_by, created_at, updated_at +` + +type CreateNotebookParams struct { + ID string + Scope NotebookScope + OrgID int32 + OwnerUserID *int32 + Title string + CreatedBy *int32 +} + +type CreateNotebookRow struct { + ID string + Scope NotebookScope + OrganizationID int32 + OwnerUserID *int32 + Title string + CreatedBy *int32 + CreatedAt time.Time + UpdatedAt time.Time +} + +func (q *Queries) CreateNotebook(ctx context.Context, arg CreateNotebookParams) (*CreateNotebookRow, error) { + row := q.db.QueryRow(ctx, createNotebook, + arg.ID, + arg.Scope, + arg.OrgID, + arg.OwnerUserID, + arg.Title, + arg.CreatedBy, + ) + var i CreateNotebookRow + err := row.Scan( + &i.ID, + &i.Scope, + &i.OrganizationID, + &i.OwnerUserID, + &i.Title, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedAt, + ) + return &i, err +} + +const deleteNotebook = `-- name: DeleteNotebook :exec +DELETE FROM notebooks +WHERE id = $1 +` + +func (q *Queries) DeleteNotebook(ctx context.Context, id string) error { + _, err := q.db.Exec(ctx, deleteNotebook, id) + return err +} + +const getCollabDocSnapshot = `-- name: GetCollabDocSnapshot :one +SELECT snapshot FROM collab_doc_snapshots WHERE notebook_id = $1 +` + +func (q *Queries) GetCollabDocSnapshot(ctx context.Context, notebookID string) ([]byte, error) { + row := q.db.QueryRow(ctx, getCollabDocSnapshot, notebookID) + var snapshot []byte + err := row.Scan(&snapshot) + return snapshot, err +} + +const getNotebook = `-- name: GetNotebook :one +SELECT id, scope, org_id AS organization_id, owner_user_id, title, created_by, created_at, updated_at +FROM notebooks +WHERE id = $1 +` + +type GetNotebookRow struct { + ID string + Scope NotebookScope + OrganizationID int32 + OwnerUserID *int32 + Title string + CreatedBy *int32 + CreatedAt time.Time + UpdatedAt time.Time +} + +func (q *Queries) GetNotebook(ctx context.Context, id string) (*GetNotebookRow, error) { + row := q.db.QueryRow(ctx, getNotebook, id) + var i GetNotebookRow + err := row.Scan( + &i.ID, + &i.Scope, + &i.OrganizationID, + &i.OwnerUserID, + &i.Title, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedAt, + ) + return &i, err +} + +const getUserOrganization = `-- name: GetUserOrganization :one +SELECT org_id AS organization_id +FROM anclax.user_default_orgs +WHERE user_id = $1 +` + +func (q *Queries) GetUserOrganization(ctx context.Context, userID int32) (int32, error) { + row := q.db.QueryRow(ctx, getUserOrganization, userID) + var organization_id int32 + err := row.Scan(&organization_id) + return organization_id, err +} + +const isOrgOwner = `-- name: IsOrgOwner :one +SELECT EXISTS( + SELECT 1 FROM anclax.org_owners WHERE user_id = $1 AND org_id = $2 +) +` + +type IsOrgOwnerParams struct { + UserID int32 + OrgID int32 +} + +func (q *Queries) IsOrgOwner(ctx context.Context, arg IsOrgOwnerParams) (bool, error) { + row := q.db.QueryRow(ctx, isOrgOwner, arg.UserID, arg.OrgID) + var exists bool + err := row.Scan(&exists) + return exists, err +} + +const listAccessibleNotebooks = `-- name: ListAccessibleNotebooks :many +SELECT id, scope, org_id AS organization_id, owner_user_id, title, created_by, created_at, updated_at +FROM notebooks +WHERE org_id = $1 + AND ( + scope = 'organization'::notebook_scope OR + (scope = 'personal'::notebook_scope AND owner_user_id = $2) + ) +ORDER BY updated_at DESC +` + +type ListAccessibleNotebooksParams struct { + OrgID int32 + OwnerUserID *int32 +} + +type ListAccessibleNotebooksRow struct { + ID string + Scope NotebookScope + OrganizationID int32 + OwnerUserID *int32 + Title string + CreatedBy *int32 + CreatedAt time.Time + UpdatedAt time.Time +} + +func (q *Queries) ListAccessibleNotebooks(ctx context.Context, arg ListAccessibleNotebooksParams) ([]*ListAccessibleNotebooksRow, error) { + rows, err := q.db.Query(ctx, listAccessibleNotebooks, arg.OrgID, arg.OwnerUserID) + if err != nil { + return nil, err + } + defer rows.Close() + var items []*ListAccessibleNotebooksRow + for rows.Next() { + var i ListAccessibleNotebooksRow + if err := rows.Scan( + &i.ID, + &i.Scope, + &i.OrganizationID, + &i.OwnerUserID, + &i.Title, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedAt, + ); err != nil { + return nil, err + } + items = append(items, &i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const updateNotebookScope = `-- name: UpdateNotebookScope :one +UPDATE notebooks +SET scope = $2, + owner_user_id = $3, + updated_at = CURRENT_TIMESTAMP +WHERE id = $1 +RETURNING id, scope, org_id AS organization_id, owner_user_id, title, created_by, created_at, updated_at +` + +type UpdateNotebookScopeParams struct { + ID string + Scope NotebookScope + OwnerUserID *int32 +} + +type UpdateNotebookScopeRow struct { + ID string + Scope NotebookScope + OrganizationID int32 + OwnerUserID *int32 + Title string + CreatedBy *int32 + CreatedAt time.Time + UpdatedAt time.Time +} + +func (q *Queries) UpdateNotebookScope(ctx context.Context, arg UpdateNotebookScopeParams) (*UpdateNotebookScopeRow, error) { + row := q.db.QueryRow(ctx, updateNotebookScope, arg.ID, arg.Scope, arg.OwnerUserID) + var i UpdateNotebookScopeRow + err := row.Scan( + &i.ID, + &i.Scope, + &i.OrganizationID, + &i.OwnerUserID, + &i.Title, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedAt, + ) + return &i, err +} + +const updateNotebookTitle = `-- name: UpdateNotebookTitle :one +UPDATE notebooks +SET title = $2, + updated_at = CURRENT_TIMESTAMP +WHERE id = $1 +RETURNING id, scope, org_id AS organization_id, owner_user_id, title, created_by, created_at, updated_at +` + +type UpdateNotebookTitleParams struct { + ID string + Title string +} + +type UpdateNotebookTitleRow struct { + ID string + Scope NotebookScope + OrganizationID int32 + OwnerUserID *int32 + Title string + CreatedBy *int32 + CreatedAt time.Time + UpdatedAt time.Time +} + +func (q *Queries) UpdateNotebookTitle(ctx context.Context, arg UpdateNotebookTitleParams) (*UpdateNotebookTitleRow, error) { + row := q.db.QueryRow(ctx, updateNotebookTitle, arg.ID, arg.Title) + var i UpdateNotebookTitleRow + err := row.Scan( + &i.ID, + &i.Scope, + &i.OrganizationID, + &i.OwnerUserID, + &i.Title, + &i.CreatedBy, + &i.CreatedAt, + &i.UpdatedAt, + ) + return &i, err +} + +const upsertCollabDocSnapshot = `-- name: UpsertCollabDocSnapshot :exec +INSERT INTO collab_doc_snapshots (notebook_id, snapshot) +VALUES ($1, $2) +ON CONFLICT (notebook_id) +DO UPDATE SET snapshot = EXCLUDED.snapshot, + updated_at = CURRENT_TIMESTAMP +` + +type UpsertCollabDocSnapshotParams struct { + NotebookID string + Snapshot []byte +} + +func (q *Queries) UpsertCollabDocSnapshot(ctx context.Context, arg UpsertCollabDocSnapshotParams) error { + _, err := q.db.Exec(ctx, upsertCollabDocSnapshot, arg.NotebookID, arg.Snapshot) + return err +} diff --git a/pkg/zgen/querier/models_gen.go b/pkg/zgen/querier/models_gen.go index 1e261f9b..f0b19a36 100644 --- a/pkg/zgen/querier/models_gen.go +++ b/pkg/zgen/querier/models_gen.go @@ -5,12 +5,157 @@ package querier import ( + "database/sql/driver" + "encoding/json" + "fmt" "time" "github.com/jackc/pgx/v5/pgtype" "github.com/risingwavelabs/risingwave-console/pkg/zgen/apigen" ) +type NotebookScope string + +const ( + NotebookScopePersonal NotebookScope = "personal" + NotebookScopeOrganization NotebookScope = "organization" +) + +func (e *NotebookScope) Scan(src interface{}) error { + switch s := src.(type) { + case []byte: + *e = NotebookScope(s) + case string: + *e = NotebookScope(s) + default: + return fmt.Errorf("unsupported scan type for NotebookScope: %T", src) + } + return nil +} + +type NullNotebookScope struct { + NotebookScope NotebookScope + Valid bool // Valid is true if NotebookScope is not NULL +} + +// Scan implements the Scanner interface. +func (ns *NullNotebookScope) Scan(value interface{}) error { + if value == nil { + ns.NotebookScope, ns.Valid = "", false + return nil + } + ns.Valid = true + return ns.NotebookScope.Scan(value) +} + +// Value implements the driver Valuer interface. +func (ns NullNotebookScope) Value() (driver.Value, error) { + if !ns.Valid { + return nil, nil + } + return string(ns.NotebookScope), nil +} + +type AnclaxAccessKeyPair struct { + AccessKey string + SecretKey string + CreatedAt time.Time + UpdatedAt time.Time +} + +type AnclaxAccessRule struct { + Name string + Description string + CreatedAt time.Time + UpdatedAt time.Time +} + +type AnclaxEvent struct { + ID int32 + Spec json.RawMessage + CreatedAt time.Time +} + +type AnclaxOpaqueKey struct { + ID int64 + Key []byte + UserID int32 + CreatedAt time.Time + UpdatedAt time.Time +} + +type AnclaxOrg struct { + ID int32 + Name string + Tz string + CreatedAt time.Time + UpdatedAt time.Time +} + +type AnclaxOrgOwner struct { + OrgID int32 + UserID int32 + CreatedAt time.Time +} + +type AnclaxOrgUser struct { + OrgID int32 + UserID int32 + CreatedAt time.Time + UpdatedAt time.Time +} + +type AnclaxRole struct { + ID int32 + OrgID int32 + Name string + Description string + CreatedAt time.Time + UpdatedAt time.Time +} + +type AnclaxRoleAccessRule struct { + RoleID int32 + AccessRuleName string + CreatedAt time.Time + UpdatedAt time.Time +} + +type AnclaxTask struct { + ID int32 + Attributes json.RawMessage + Spec json.RawMessage + Status string + UniqueTag *string + StartedAt *time.Time + CreatedAt time.Time + UpdatedAt time.Time + Attempts int32 +} + +type AnclaxUser struct { + ID int32 + Name string + PasswordHash string + PasswordSalt string + CreatedAt time.Time + UpdatedAt time.Time + DeletedAt *time.Time +} + +type AnclaxUserDefaultOrg struct { + UserID int32 + OrgID int32 + CreatedAt time.Time +} + +type AnclaxUsersRole struct { + UserID int32 + RoleID int32 + CreatedAt time.Time + UpdatedAt time.Time +} + type AutoBackupConfig struct { ClusterID int32 Enabled bool @@ -60,6 +205,13 @@ type ClusterSnapshot struct { UpdatedAt time.Time } +type CollabDocSnapshot struct { + NotebookID string + Snapshot []byte + CreatedAt time.Time + UpdatedAt time.Time +} + type DatabaseConnection struct { ID int32 OrgID int32 @@ -82,6 +234,17 @@ type MetricsStore struct { UpdatedAt pgtype.Timestamp } +type Notebook struct { + ID string + Scope NotebookScope + OrgID int32 + OwnerUserID *int32 + CreatedBy *int32 + Title string + CreatedAt time.Time + UpdatedAt time.Time +} + type OpaqueKey struct { ID int64 Key []byte diff --git a/pkg/zgen/querier/querier_gen.go b/pkg/zgen/querier/querier_gen.go index 378506fe..1f8cb8cc 100644 --- a/pkg/zgen/querier/querier_gen.go +++ b/pkg/zgen/querier/querier_gen.go @@ -16,11 +16,13 @@ type Querier interface { CreateClusterSnapshot(ctx context.Context, arg CreateClusterSnapshotParams) error CreateDatabaseConnection(ctx context.Context, arg CreateDatabaseConnectionParams) (*DatabaseConnection, error) CreateMetricsStore(ctx context.Context, arg CreateMetricsStoreParams) (*MetricsStore, error) + CreateNotebook(ctx context.Context, arg CreateNotebookParams) (*CreateNotebookRow, error) CreateOrgSettings(ctx context.Context, arg CreateOrgSettingsParams) error DeleteAllOrgDatabaseConnectionsByClusterID(ctx context.Context, arg DeleteAllOrgDatabaseConnectionsByClusterIDParams) error DeleteClusterDiagnostic(ctx context.Context, id int32) error DeleteClusterSnapshot(ctx context.Context, arg DeleteClusterSnapshotParams) error DeleteMetricsStore(ctx context.Context, arg DeleteMetricsStoreParams) error + DeleteNotebook(ctx context.Context, id string) error DeleteOrgCluster(ctx context.Context, arg DeleteOrgClusterParams) error DeleteOrgDatabaseConnection(ctx context.Context, arg DeleteOrgDatabaseConnectionParams) error GetAllOrgDatabseConnectionsByClusterID(ctx context.Context, arg GetAllOrgDatabseConnectionsByClusterIDParams) ([]*DatabaseConnection, error) @@ -28,16 +30,21 @@ type Querier interface { GetAutoDiagnosticsConfig(ctx context.Context, clusterID int32) (*AutoDiagnosticsConfig, error) GetClusterByID(ctx context.Context, id int32) (*Cluster, error) GetClusterDiagnostic(ctx context.Context, id int32) (*ClusterDiagnostic, error) + GetCollabDocSnapshot(ctx context.Context, notebookID string) ([]byte, error) GetDatabaseConnectionByID(ctx context.Context, id int32) (*DatabaseConnection, error) GetMetricsStore(ctx context.Context, id int32) (*MetricsStore, error) GetMetricsStoreByIDAndOrgID(ctx context.Context, arg GetMetricsStoreByIDAndOrgIDParams) (*MetricsStore, error) + GetNotebook(ctx context.Context, id string) (*GetNotebookRow, error) GetOrgCluster(ctx context.Context, arg GetOrgClusterParams) (*Cluster, error) GetOrgDatabaseByID(ctx context.Context, arg GetOrgDatabaseByIDParams) (*DatabaseConnection, error) GetOrgDatabaseConnection(ctx context.Context, arg GetOrgDatabaseConnectionParams) (*DatabaseConnection, error) GetOrgSettings(ctx context.Context, orgID int32) (*OrgSetting, error) + GetUserOrganization(ctx context.Context, userID int32) (int32, error) InitCluster(ctx context.Context, arg InitClusterParams) (*Cluster, error) InitDatabaseConnection(ctx context.Context, arg InitDatabaseConnectionParams) (*DatabaseConnection, error) InitMetricsStore(ctx context.Context, arg InitMetricsStoreParams) (*MetricsStore, error) + IsOrgOwner(ctx context.Context, arg IsOrgOwnerParams) (bool, error) + ListAccessibleNotebooks(ctx context.Context, arg ListAccessibleNotebooksParams) ([]*ListAccessibleNotebooksRow, error) ListClusterDiagnostics(ctx context.Context, clusterID int32) ([]*ListClusterDiagnosticsRow, error) ListClusterSnapshots(ctx context.Context, clusterID int32) ([]*ClusterSnapshot, error) ListClustersByMetricsStoreID(ctx context.Context, metricsStoreID *int32) ([]*Cluster, error) @@ -48,8 +55,11 @@ type Querier interface { UpdateAutoBackupConfig(ctx context.Context, arg UpdateAutoBackupConfigParams) error UpdateAutoDiagnosticsConfig(ctx context.Context, arg UpdateAutoDiagnosticsConfigParams) error UpdateMetricsStore(ctx context.Context, arg UpdateMetricsStoreParams) (*MetricsStore, error) + UpdateNotebookScope(ctx context.Context, arg UpdateNotebookScopeParams) (*UpdateNotebookScopeRow, error) + UpdateNotebookTitle(ctx context.Context, arg UpdateNotebookTitleParams) (*UpdateNotebookTitleRow, error) UpdateOrgCluster(ctx context.Context, arg UpdateOrgClusterParams) (*Cluster, error) UpdateOrgDatabaseConnection(ctx context.Context, arg UpdateOrgDatabaseConnectionParams) (*DatabaseConnection, error) + UpsertCollabDocSnapshot(ctx context.Context, arg UpsertCollabDocSnapshotParams) error } var _ Querier = (*Queries)(nil) diff --git a/pkg/zgen/taskgen/taskgen_gen.go b/pkg/zgen/taskgen/taskgen_gen.go index 530df3a8..4952a5c6 100644 --- a/pkg/zgen/taskgen/taskgen_gen.go +++ b/pkg/zgen/taskgen/taskgen_gen.go @@ -260,10 +260,10 @@ type DeleteClusterDiagnosticParameters struct { type DeleteSnapshotParameters struct { // - SnapshotID int64 `json:"snapshotID" yaml:"snapshotID"` + ClusterID int32 `json:"clusterID" yaml:"clusterID"` // - ClusterID int32 `json:"clusterID" yaml:"clusterID"` + SnapshotID int64 `json:"snapshotID" yaml:"snapshotID"` } func (r *AutoBackupParameters) Parse(spec json.RawMessage) error { diff --git a/sql/migrations/0008_collab_doc_snapshots.down.sql b/sql/migrations/0008_collab_doc_snapshots.down.sql new file mode 100644 index 00000000..6409104a --- /dev/null +++ b/sql/migrations/0008_collab_doc_snapshots.down.sql @@ -0,0 +1,7 @@ +BEGIN; + +DROP TABLE IF EXISTS collab_doc_snapshots; +DROP TABLE IF EXISTS notebooks; +DROP TYPE IF EXISTS notebook_scope; + +COMMIT; diff --git a/sql/migrations/0008_collab_doc_snapshots.up.sql b/sql/migrations/0008_collab_doc_snapshots.up.sql new file mode 100644 index 00000000..a269729d --- /dev/null +++ b/sql/migrations/0008_collab_doc_snapshots.up.sql @@ -0,0 +1,33 @@ +BEGIN; + +CREATE TYPE notebook_scope AS ENUM ('personal', 'organization'); + +CREATE TABLE notebooks ( + id TEXT NOT NULL, + scope notebook_scope NOT NULL, + org_id INTEGER NOT NULL REFERENCES anclax.orgs(id) ON DELETE CASCADE, + owner_user_id INTEGER REFERENCES anclax.users(id) ON DELETE CASCADE, + created_by INTEGER REFERENCES anclax.users(id) ON DELETE SET NULL, + title TEXT NOT NULL DEFAULT '', + created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP, + + PRIMARY KEY (id), + CHECK ( + (scope = 'personal' AND owner_user_id IS NOT NULL) OR + (scope = 'organization' AND owner_user_id IS NULL) + ) +); + +-- Stores the latest Yjs document snapshot for each collaborative notebook. +-- The snapshot is the raw binary produced by the client (e.g. Y.encodeStateAsUpdate(doc)). +CREATE TABLE collab_doc_snapshots ( + notebook_id TEXT NOT NULL REFERENCES notebooks(id) ON DELETE CASCADE, + snapshot BYTEA NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP, + + PRIMARY KEY (notebook_id) +); + +COMMIT; diff --git a/sql/queries/collab.sql b/sql/queries/collab.sql new file mode 100644 index 00000000..b0e85e4d --- /dev/null +++ b/sql/queries/collab.sql @@ -0,0 +1,58 @@ +-- name: CreateNotebook :one +INSERT INTO notebooks (id, scope, org_id, owner_user_id, title, created_by) +VALUES ($1, $2, $3, $4, $5, $6) +RETURNING id, scope, org_id AS organization_id, owner_user_id, title, created_by, created_at, updated_at; + +-- name: GetNotebook :one +SELECT id, scope, org_id AS organization_id, owner_user_id, title, created_by, created_at, updated_at +FROM notebooks +WHERE id = $1; + +-- name: ListAccessibleNotebooks :many +SELECT id, scope, org_id AS organization_id, owner_user_id, title, created_by, created_at, updated_at +FROM notebooks +WHERE org_id = $1 + AND ( + scope = 'organization'::notebook_scope OR + (scope = 'personal'::notebook_scope AND owner_user_id = $2) + ) +ORDER BY updated_at DESC; + +-- name: UpdateNotebookTitle :one +UPDATE notebooks +SET title = $2, + updated_at = CURRENT_TIMESTAMP +WHERE id = $1 +RETURNING id, scope, org_id AS organization_id, owner_user_id, title, created_by, created_at, updated_at; + +-- name: UpdateNotebookScope :one +UPDATE notebooks +SET scope = $2, + owner_user_id = $3, + updated_at = CURRENT_TIMESTAMP +WHERE id = $1 +RETURNING id, scope, org_id AS organization_id, owner_user_id, title, created_by, created_at, updated_at; + +-- name: DeleteNotebook :exec +DELETE FROM notebooks +WHERE id = $1; + +-- name: UpsertCollabDocSnapshot :exec +INSERT INTO collab_doc_snapshots (notebook_id, snapshot) +VALUES ($1, $2) +ON CONFLICT (notebook_id) +DO UPDATE SET snapshot = EXCLUDED.snapshot, + updated_at = CURRENT_TIMESTAMP; + +-- name: GetCollabDocSnapshot :one +SELECT snapshot FROM collab_doc_snapshots WHERE notebook_id = $1; + +-- name: IsOrgOwner :one +SELECT EXISTS( + SELECT 1 FROM anclax.org_owners WHERE user_id = $1 AND org_id = $2 +); + +-- name: GetUserOrganization :one +SELECT org_id AS organization_id +FROM anclax.user_default_orgs +WHERE user_id = $1; diff --git a/sql/sqlc.yaml b/sql/sqlc.yaml index 445c14e1..83c6930d 100644 --- a/sql/sqlc.yaml +++ b/sql/sqlc.yaml @@ -1,6 +1,8 @@ version: "2" sql: - - schema: "migrations" + - schema: + - "migrations" + - "../dev/anclax/sql/migrations" queries: "queries" engine: "postgresql" gen: diff --git a/web/.cursor/rules/styling.mdc b/web/.cursor/rules/styling.mdc index 95499c6a..8deb4431 100644 --- a/web/.cursor/rules/styling.mdc +++ b/web/.cursor/rules/styling.mdc @@ -38,7 +38,7 @@ const className = sizeClasses[size] ## Class Utility Function -Use the `cn()` utility from `src/lib/cn.ts` for combining classes: +Use the `cn()` utility from `lib/cn.ts` for combining classes: ```typescript import { cn } from '@/lib/cn' diff --git a/web/.gitignore b/web/.gitignore index 45e4bc10..13a53247 100644 --- a/web/.gitignore +++ b/web/.gitignore @@ -40,3 +40,6 @@ yarn-error.log* next-env.d.ts out/* !out/.gitkeep + +# monaco editor (copied from node_modules) +/public/monaco-editor diff --git a/web/CLAUDE.md b/web/CLAUDE.md new file mode 100644 index 00000000..b629704f --- /dev/null +++ b/web/CLAUDE.md @@ -0,0 +1,284 @@ +# CLAUDE.md + +This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. + +## Development Commands + +**Start Development Server:** +```bash +npm run dev +``` +Uses Next.js with Turbopack for fast development. + +**Build Project:** +```bash +npm run build +``` +Builds static export with out/.gitkeep file. + +**Lint Code:** +```bash +npm run lint +``` +Runs Next.js ESLint configuration. + +**Generate API Clients:** +```bash +npm run gen +``` +Generates API clients from OpenAPI specs for both local API (../api/v1.yaml) and Anchor API. + +## Architecture Overview + +This is a **Next.js 15** application with **static export** configuration built for RisingWave Console management. The application uses a modular architecture with strict component organization patterns. + +### Key Technologies +- **Next.js 15** with App Router and static export +- **React 19** with TypeScript +- **Jotai** for state management with custom utilities +- **TailwindCSS** for styling with strict static class requirements +- **Framer Motion** with LazyMotion optimization (using `m.` prefix) +- **Radix UI** primitives for accessible components +- **React Query** (@tanstack/react-query) for data fetching +- **Monaco Editor** for code editing capabilities + +### Directory Structure + +**`/app/`** - Next.js App Router pages +- Route-based pages (clusters, login, sqlconsole, settings, etc.) +- Main layout with RootProviders wrapper + +**`/modules/`** - Feature-specific components organized by domain +- `clusters/` - Cluster management UI +- `database/` - Database operations and insights +- `sqlconsole/` - SQL editor and query execution +- `tasks/` - Task execution history +- `notebook/` - Notebook functionality (new module) + +**`/components/`** - Shared component library +- `ui/` - Universal Radix-based primitives (Button, Input, Dialog, etc.) +- `common/` - App-specific shared components (AppSidebar, AuthGuard, etc.) + +**`/lib/`** - Core utilities and configuration +- `jotai.ts` - Custom Jotai store and hook utilities +- `utils.ts` - TailwindCSS class merging utility (`cn`) +- `query-client.ts` - React Query configuration +- `framer-lazy-feature.ts` - LazyMotion feature loading + +**`/atoms/`** - Jotai state atoms +- `auth.ts` - Authentication state +- `context-menu.ts` - UI context menu state +- `viewport.ts` - Viewport/layout state + +**`/providers/`** - React context providers +- `root-providers.tsx` - Main provider composition +- `auth-provider.tsx` - Authentication context + +## Component Organization Rules + +**Universal Components** → `components/ui/` +- Reusable across any React application +- Based on Radix UI primitives +- No business logic + +**Feature Components** → `modules/{domain}/` +- Domain-specific business logic +- Examples: `FeedTimeline`, `ClusterList`, `SqlEditor` + +**App-Specific Shared** → `components/common/` +- Used across features but specific to this app +- Examples: `AppSidebar`, `AuthGuard` + +Always use `@/` path alias for imports (configured in tsconfig.json). + +## State Management with Jotai + +Use the custom utilities from `lib/jotai.ts`: + +```typescript +// Create atom with hooks +const [useMyAtom, useSetMyAtom, useMyAtomValue, myAtom] = createAtomHooks(atom(null)) + +// Use global store +import { jotaiStore } from '@/lib/jotai' +``` + +Store atoms in `/atoms/` directory and use the global `jotaiStore` instance configured in RootProviders. + +## Styling Guidelines + +**Critical:** All TailwindCSS classes must be **statically defined** - no dynamic className construction. + +```typescript +// ❌ Wrong - Dynamic classes won't work +const className = `text-${size}` + +// ✅ Correct - Static conditional classes +const className = clsx({ + 'text-base': size === 'small', + 'text-lg': size === 'large', +}) +``` + +Always use the `cn()` utility from `@/lib/utils` for class composition: + +```typescript +import { cn } from '@/lib/utils' + + + + + + + + + + + ); + } + + return ( +
+ setIsHovered(true)} + onHoverEnd={() => setIsHovered(false)} + className="flex items-center gap-2" + > + + + + + {cellKind.toUpperCase()} + + + {isRunning && ( + + )} + + +
+ {/* Background DDL Switch */} + + + +
+ handleBackgroundDDLChange(checked === true)} + /> + + +
+
+ +

Execute DDL statements in the background

+
+
+
+ + + + + + + + + {position > 0 && ( + + )} + + {position < total - 1 && ( + + )} + + + + + + + + + + + + +
+
+ ); +} + +export { CellToolbar }; diff --git a/web/modules/notebook/components/Cell/common/CodePreview.tsx b/web/modules/notebook/components/Cell/common/CodePreview.tsx new file mode 100644 index 00000000..5cdc9abf --- /dev/null +++ b/web/modules/notebook/components/Cell/common/CodePreview.tsx @@ -0,0 +1,43 @@ +'use client' + +import { memo } from 'react' +import { ShikiCodeBlock } from '@/components/ui/shiki/ShikiCodeBlock' + +interface CodePreviewProps { + code: string + language?: string + className?: string + onDoubleClick?: () => void + loading?: string + emptyText?: string +} + +const CodePreview = memo(function CodePreview({ + code, + language = 'sql', + className, + onDoubleClick, + emptyText = 'Empty cell' +}: CodePreviewProps) { + return ( + + ) +}, (prevProps, nextProps) => { + return ( + prevProps.code === nextProps.code && + prevProps.language === nextProps.language && + prevProps.className === nextProps.className && + prevProps.loading === nextProps.loading && + prevProps.emptyText === nextProps.emptyText && + prevProps.onDoubleClick === nextProps.onDoubleClick + ) +}) + +export { CodePreview } \ No newline at end of file diff --git a/web/modules/notebook/components/Cell/index.tsx b/web/modules/notebook/components/Cell/index.tsx new file mode 100644 index 00000000..ed55d8a6 --- /dev/null +++ b/web/modules/notebook/components/Cell/index.tsx @@ -0,0 +1,63 @@ +"use client"; + +import { cn } from "@/lib/utils"; +import { useCellKind } from "@/modules/notebook/hooks/use-cell-snapshot"; +import { + useCellEditorModeForCell, + toggleCellEditorMode, +} from "@/atoms/notebook/cell-editor-modes"; +import { + useCellCollapsedForCell, +} from "@/atoms/notebook/cell-runtime"; +import { CellContent } from "./CellContent"; +import { CellOutput } from "./CellOutput"; +import { CellToolbar } from "./CellToolbar"; + +interface CellProps { + notebookId: string; + cellId: string; +} + +export const Cell = function Cell({ + notebookId, + cellId, +}: CellProps) { + const cellKind = useCellKind(cellId); + const editorMode = useCellEditorModeForCell(notebookId, cellId); + const isCollapsed = useCellCollapsedForCell(cellId); + + const toggleEditorMode = () => { + toggleCellEditorMode(notebookId, cellId); + }; + + const isSql = cellKind === "sql"; + + return ( +
+ + {!isCollapsed && ( + + )} + +
+ ); +}; diff --git a/web/modules/notebook/components/CollaborativeMonacoEditor.tsx b/web/modules/notebook/components/CollaborativeMonacoEditor.tsx new file mode 100644 index 00000000..dcb3e529 --- /dev/null +++ b/web/modules/notebook/components/CollaborativeMonacoEditor.tsx @@ -0,0 +1,129 @@ +"use client"; + +import { + forwardRef, + memo, + useCallback, + useEffect, + useImperativeHandle, + useMemo, + useRef, + useState, +} from "react"; +import * as Y from "yjs"; +import type { editor as MonacoEditorNS } from "monaco-editor"; +import { MonacoEditor, type MonacoEditorHandle, type MonacoEditorProps } from "@/components/ui/monaco-editor"; +import { useMonacoAwareness } from "@/modules/notebook/hooks/useMonacoAwareness"; +import { useSchemaCompletion } from "@/modules/notebook/hooks/useSchemaCompletion"; +import { useDatabaseSchemaQuery } from "@/modules/database/queries/databases"; +import { useNotebookSnapshot } from "@/modules/notebook/hooks/use-notebook-snapshot"; +import { NB_CELL_MAP, CELL_SOURCE } from "@/modules/notebook/collab/yjs/schema/core/keys"; +import type { YCell } from "@/modules/notebook/collab/yjs/schema/core/types"; +import { useNotebookRuntime } from "@/modules/notebook/providers/notebook-runtime-context"; +import { getNotebookRoot } from "@/modules/notebook/collab/yjs/schema/access/root"; +import { YjsMonacoBinding } from "../lib/collaborative-monaco/y-monaco-binding"; + +interface CollaborativeMonacoEditorProps extends Omit { + notebookId: string; + cellId: string; + onReady?: (editor: MonacoEditorNS.IStandaloneCodeEditor) => void; +} + +export type CollaborativeMonacoEditorHandle = MonacoEditorHandle; + +/** + * Collaborative Monaco Editor wrapper. + * + * Architecture: + * - Uses YjsMonacoBinding for Yjs Y.Text ↔ Monaco Model text synchronization + * - Uses useMonacoAwareness for collaborative cursors, selections, and presence UI + * - Composes base MonacoEditor component + * + * Separation of concerns: + * - Text sync: YjsMonacoBinding (with USER_ACTION_ORIGIN for undo compatibility) + * - Awareness UI: useMonacoAwareness hook (decorations, widgets, Jotai integration) + */ +const CollaborativeMonacoEditorInner = ( + { cellId, onReady, ...rest }: CollaborativeMonacoEditorProps, + ref: React.Ref +) => { + const baseRef = useRef(null); + const [editor, setEditor] = useState(null); + const editorsRef = useRef>(new Set()); + const bindingRef = useRef(null); + const { resource } = useNotebookRuntime(); + + // Schema completion + const { databaseId } = useNotebookSnapshot(); + const databaseIdNum = databaseId ? parseInt(String(databaseId), 10) : null; + const { data: schemas } = useDatabaseSchemaQuery(databaseIdNum); + useSchemaCompletion({ schemas }); + + const yText = useMemo(() => { + const root = getNotebookRoot(resource.doc); + const cm = root?.get(NB_CELL_MAP) as Y.Map | undefined; + const cell = cm?.get(cellId); + const candidate = cell?.get(CELL_SOURCE); + return candidate instanceof Y.Text ? candidate : null; + }, [resource.doc, cellId]); + + // Awareness wiring + useMonacoAwareness({ editor, cellId }); + + const handleReady = useCallback( + (ed: MonacoEditorNS.IStandaloneCodeEditor) => { + setEditor(ed); + onReady?.(ed); + }, + [onReady] + ); + + useImperativeHandle( + ref, + () => ({ + focus: () => baseRef.current?.focus(), + getValue: () => baseRef.current?.getValue() ?? "", + setValue: (v: string) => baseRef.current?.setValue(v), + getEditor: () => editor, + }), + [editor] + ); + + // Text synchronization: Yjs Y.Text ↔ Monaco Model + // Note: awareness is NOT passed here - it's handled separately by useMonacoAwareness + useEffect(() => { + const monacoEditor = editor; + if (!monacoEditor || !yText) return; + + const model = monacoEditor.getModel(); + if (!model) return; + + const editors = editorsRef.current; + + editors.add(monacoEditor); + const binding = new YjsMonacoBinding({ + yText, + monacoModel: model, + editors, + }); + bindingRef.current = binding; + + return () => { + binding.destroy(); + if (bindingRef.current === binding) { + bindingRef.current = null; + } + editors.delete(monacoEditor); + }; + }, [editor, yText]); + + return ( + + ); +}; + +export const CollaborativeMonacoEditor = memo(forwardRef(CollaborativeMonacoEditorInner)); diff --git a/web/modules/notebook/components/DatabaseDrawer.tsx b/web/modules/notebook/components/DatabaseDrawer.tsx new file mode 100644 index 00000000..ff284d31 --- /dev/null +++ b/web/modules/notebook/components/DatabaseDrawer.tsx @@ -0,0 +1,39 @@ +'use client' + +import { memo, useState } from 'react' +import { DatabaseManagement } from '@/modules/database/database-management' + +interface DatabaseDrawerProps { + children: React.ReactNode + onDatabaseChange?: () => void +} + +const DatabaseDrawer = memo(function DatabaseDrawer({ + children, + onDatabaseChange, +}: DatabaseDrawerProps) { + const [isOpen, setIsOpen] = useState(false) + + const handleClose = () => { + setIsOpen(false) + } + + const handleDatabaseChange = () => { + onDatabaseChange?.() + } + + return ( + <> +
setIsOpen(true)}> + {children} +
+ + + ) +}) + +export { DatabaseDrawer } \ No newline at end of file diff --git a/web/modules/notebook/components/EditableTitle.tsx b/web/modules/notebook/components/EditableTitle.tsx new file mode 100644 index 00000000..205018a5 --- /dev/null +++ b/web/modules/notebook/components/EditableTitle.tsx @@ -0,0 +1,168 @@ +import { AnimatePresence, m } from "motion/react"; +import { Input } from "@/components/ui/input"; +import { cn } from "@/lib/utils"; +import { memo, useState, useRef, useEffect, useCallback, useMemo } from "react"; +import { Spring } from "@/lib/spring"; +import { useUpdateCollabNotebookMutation } from "@/modules/notebook/queries/notebook-mutations"; +import { useSelfEditing, useCellPresence } from "@/modules/notebook/awareness"; +import { NotebookPresenceAvatars } from "@/modules/notebook/components/common/PresenceAvatars"; +import { useNotebookSnapshot, useNotebookStoreState } from "../hooks/use-notebook-snapshot"; +import { useNotebookDirectoryEntry } from "@/modules/notebook/hooks/useNotebookSelection"; + +interface EditableTitleProps { + notebookId: string; + className?: string; +} + +export const EditableTitle = memo(function EditableTitle({ + notebookId, + className, +}: EditableTitleProps) { + const snap = useNotebookSnapshot(); + const title = snap.title; + const notebookState = useNotebookStoreState(); + const notebookEntry = useNotebookDirectoryEntry(notebookId); + const updateNotebook = useUpdateCollabNotebookMutation(); + + const [, setMyEditingState] = useSelfEditing(); + + const titlePresence = useCellPresence("title"); + + const [isEditing, setIsEditing] = useState(false); + const [editValue, setEditValue] = useState(title); + const inputRef = useRef(null); + + const otherEditors = useMemo( + () => titlePresence.filter((p) => !p.isSelf && p.editing?.cellId === "title"), + [titlePresence] + ); + + const isLockedByOther = otherEditors.length > 0; + + const presenceLabel = + otherEditors.length > 0 + ? `${otherEditors + .map((p) => p.user.name ?? p.user.id ?? `Client ${p.clientId}`) + .join(", ")} is editing...` + : null; + + useEffect(() => { + setEditValue(title); + }, [title]); + + useEffect(() => { + if (isEditing && inputRef.current) { + inputRef.current.focus(); + inputRef.current.select(); + } + }, [isEditing]); + + useEffect(() => { + const desiredState = isEditing ? { cellId: "title", origin: "title" } : null; + setMyEditingState(desiredState); + + return () => { + setMyEditingState(null); + }; + }, [isEditing, setMyEditingState]); + + const handleSubmit = useCallback(async () => { + // Immediately exit editing state to clear awareness + setIsEditing(false); + + const trimmedValue = editValue.trim(); + if (!trimmedValue) { + setEditValue(title); + return; + } + + if (trimmedValue === title) { + return; + } + + const previousTitle = title; + notebookState.title = trimmedValue; + + if (notebookEntry) { + try { + await updateNotebook.mutateAsync({ + entry: notebookEntry, + overrides: { title: trimmedValue }, + successMessage: "Notebook renamed", + }); + } catch { + notebookState.title = previousTitle; + setEditValue(previousTitle); + } + } + }, [editValue, notebookEntry, title, updateNotebook]); + + const handleKeyDown = useCallback( + (e: React.KeyboardEvent) => { + if (e.key === "Enter" && !e.nativeEvent.isComposing) { + void handleSubmit(); + } else if (e.key === "Escape" && !e.nativeEvent.isComposing) { + setEditValue(title); + setIsEditing(false); + } + }, + [handleSubmit, title] + ); + + if (isEditing) { + return ( +
+ setEditValue(e.target.value)} + onBlur={() => void handleSubmit()} + onKeyDown={handleKeyDown} + disabled={updateNotebook.isPending} + className={cn( + "h-8 text-2xl font-semibold bg-transparent border-transparent hover:border-input focus:border-input", + className + )} + placeholder="Notebook title..." + /> +
+ ); + } + + return ( +
+
+

{ + if (isLockedByOther) return; + setIsEditing(true); + }} + className={cn( + "w-full text-2xl font-semibold cursor-pointer hover:bg-muted/50 rounded px-2 py-1 transition-all", + isLockedByOther && "cursor-not-allowed opacity-70", + className + )} + title={isLockedByOther ? "Another user is editing the title" : "Click to edit title"} + aria-disabled={isLockedByOther || undefined} + > + {title} +

+ +
+
+ + {presenceLabel && ( + + {presenceLabel} + + )} + +
+
+ ); +}); diff --git a/web/modules/notebook/components/KeyboardShortcuts.tsx b/web/modules/notebook/components/KeyboardShortcuts.tsx new file mode 100644 index 00000000..e5e7fec4 --- /dev/null +++ b/web/modules/notebook/components/KeyboardShortcuts.tsx @@ -0,0 +1,56 @@ +'use client' + +import { useCallback } from 'react' +import { useHotkeys } from 'react-hotkeys-hook' +import toast from 'react-hot-toast' +import { useUndoRedoActions } from '@/modules/notebook/hooks/use-notebook-undo' +import { useNotebookRuntime } from '@/modules/notebook/providers/notebook-runtime-context' +import { uploadNotebookSnapshot } from '@/modules/notebook/collab/yjs/snapshot/upload' +import { resolveErrorMessage } from '@/lib/errors' +import { notebookSaveActions } from '@/atoms/notebook/save-status' + +export function KeyboardShortcuts() { + const { undo, redo } = useUndoRedoActions() + const { resource, notebookId } = useNotebookRuntime() + + const handleSave = useCallback(() => { + const doc = resource.doc; + notebookSaveActions.markSaving(notebookId) + void (async () => { + try { + const res = await uploadNotebookSnapshot(doc, notebookId) + if (res === 'uploaded') { + notebookSaveActions.markSaved(notebookId) + toast.success('Notebook saved') + } + } catch (err) { + notebookSaveActions.markUnsaved(notebookId) + toast.error(resolveErrorMessage(err, 'Failed to save notebook')) + } + })() + }, [resource.doc, notebookId]) + + useHotkeys('mod+z', () => { + undo() + }, { + preventDefault: true, + enableOnFormTags: false, + }, [undo]) + + useHotkeys('mod+y, mod+shift+z', () => { + redo() + }, { + preventDefault: true, + enableOnFormTags: false, + }, [redo]) + + useHotkeys('mod+s', () => { + handleSave() + }, { + preventDefault: true, + enableOnFormTags: true, + }, [handleSave]) + + + return null +} diff --git a/web/modules/notebook/components/NotebookBottomPanel.tsx b/web/modules/notebook/components/NotebookBottomPanel.tsx new file mode 100644 index 00000000..2ae6ac7a --- /dev/null +++ b/web/modules/notebook/components/NotebookBottomPanel.tsx @@ -0,0 +1,176 @@ +import React, { useRef, useCallback, useEffect } from "react"; +import { AnimatePresence, m } from "motion/react"; +import { ChevronUp, ChevronDown } from "lucide-react"; +import * as Tabs from "@radix-ui/react-tabs"; +import { cn } from "@/lib/utils"; +import { Spring } from "@/lib/spring"; +import { useResizable } from "@/hooks/useResizable"; +import { + notebookUIActions, + useNotebookBottomPanelState, + type NotebookBottomPanelTab, +} from "@/atoms/notebook"; + +// Tab content components +import { LatestStreamingGraphTab } from "./tabs/LatestStreamingGraphTab"; +import { HistoryTab } from "./tabs/HistoryTab"; +import { ExportTab } from "./tabs/ExportTab"; +import { ProgressTab } from "./tabs/ProgressTab"; + +interface NotebookBottomPanelProps { + className?: string; +} + +export function NotebookBottomPanel({ className }: NotebookBottomPanelProps) { + const bottomPanel = useNotebookBottomPanelState(); + + const { width: height, resizeHandleProps } = useResizable({ + defaultWidth: 300, + minWidth: 200, + maxWidth: 600, + storageKey: "notebook-bottom-panel-height", + direction: "vertical", + handle: "top", + }); + + const panelRef = useRef(null); + + const togglePanel = useCallback(() => { + notebookUIActions.toggleBottomPanel(); + }, []); + + const openPanel = useCallback((tab?: NotebookBottomPanelTab) => { + notebookUIActions.openBottomPanel(tab); + }, []); + + const closePanel = useCallback(() => { + notebookUIActions.closeBottomPanel(); + }, []); + + + // Handle keyboard shortcuts + useEffect(() => { + const handleKeyDown = (e: KeyboardEvent) => { + if ((e.ctrlKey || e.metaKey) && e.key === "j") { + e.preventDefault(); + togglePanel(); + } + // Escape to close panel + if (e.key === "Escape" && bottomPanel.isOpen) { + closePanel(); + } + }; + + document.addEventListener("keydown", handleKeyDown); + return () => { + document.removeEventListener("keydown", handleKeyDown); + }; + }, [togglePanel, closePanel, bottomPanel.isOpen]); + + const headerHeight = 42; + + return ( +
+ + {/* Resize handle - only show when expanded */} + {bottomPanel.isOpen && ( +
+ )} + + {/* Header with tabs and controls */} +
+ + + openPanel("history")} + value="history" + className="px-3 py-1 text-sm rounded-md transition-colors data-[state=active]:bg-muted data-[state=active]:text-foreground text-muted-foreground hover:text-foreground" + > + History + + openPanel("progress")} + value="progress" + className="px-3 py-1 text-sm rounded-md transition-colors data-[state=active]:bg-muted data-[state=active]:text-foreground text-muted-foreground hover:text-foreground" + > + Progress + + openPanel("streaming-graph")} + value="streaming-graph" + className="px-3 py-1 text-sm rounded-md transition-colors data-[state=active]:bg-muted data-[state=active]:text-foreground text-muted-foreground hover:text-foreground" + > + Latest Streaming Graph + + openPanel("export")} + value="export" + className="px-3 py-1 text-sm rounded-md transition-colors data-[state=active]:bg-muted data-[state=active]:text-foreground text-muted-foreground hover:text-foreground" + > + Export & Import + + + + +
+ +
+
+ + {/* Tab content - only show when expanded */} + + {bottomPanel.isOpen && ( + + + + + + + + + + + + + + + + + + )} + + +
+ ); +} diff --git a/web/modules/notebook/components/NotebookEditor.tsx b/web/modules/notebook/components/NotebookEditor.tsx new file mode 100644 index 00000000..177963c8 --- /dev/null +++ b/web/modules/notebook/components/NotebookEditor.tsx @@ -0,0 +1,138 @@ +"use client"; + +import { Fragment, memo, useCallback, useEffect } from "react"; +import { m } from "motion/react"; +import { cn } from "@/lib/utils"; +import { useNotebookReady, useNotebookState } from "@/atoms/notebook/hooks"; +import { + setCellEditorMode, + resetCellEditorModes, +} from "@/atoms/notebook/cell-editor-modes"; +import { useActiveNotebookId } from "@/modules/notebook/hooks/useNotebookSelection"; +import { EditableTitle } from "./EditableTitle"; +import { Cell } from "./Cell"; + +const EmptyState = () => ( +
+

No notebook selected

+

Create or select a notebook to get started.

+
+); + +interface AddCellButtonProps { + notebookId: string; + index?: number; +} + +const AddCellButton = memo(function AddCellButton({ notebookId, index }: AddCellButtonProps) { + const { addCell } = useNotebookState(); + + const switchToEditor = useCallback((cellId: string | null) => { + if (!cellId) return; + setCellEditorMode(notebookId, cellId, "editor"); + }, [notebookId]); + + const handleAddSql = useCallback(() => { + const newCellId = addCell("sql", "", index); + switchToEditor(newCellId); + }, [addCell, index, switchToEditor]); + + const handleAddMarkdown = useCallback(() => { + const newCellId = addCell("markdown", "", index); + switchToEditor(newCellId); + }, [addCell, index, switchToEditor]); + + return ( +
+
+
+
+ + +
+
+
+
+ ); +}); + +interface NotebookEditorInnerProps { + notebookId: string; +} + +const NotebookEditorInner = memo(function NotebookEditorInner({ notebookId }: NotebookEditorInnerProps) { + const { cellOrder } = useNotebookState(); + useEffect(() => { + resetCellEditorModes(notebookId); + return () => { + resetCellEditorModes(notebookId); + }; + }, [notebookId]); + + return ( +
+ + {cellOrder.map((cellId, index) => ( + +
+ +
+ +
+ ))} + {cellOrder.length === 0 && ( +
+

No cells yet

+

Use the buttons above to create your first cell.

+
+ )} +
+ ); +}); + +interface NotebookEditorProps { + className?: string; +} + +export const NotebookEditor = memo(function NotebookEditor({ className }: NotebookEditorProps) { + const isReady = useNotebookReady(); + const notebookId = useActiveNotebookId(); + + const renderContent = () => { + if (!notebookId) { + return ; + } + + return ( + <> + + + + + + ); + }; + + return ( +
+ {renderContent()} +
+ ); +}); diff --git a/web/modules/notebook/components/NotebookScopeBadge.tsx b/web/modules/notebook/components/NotebookScopeBadge.tsx new file mode 100644 index 00000000..20c39a42 --- /dev/null +++ b/web/modules/notebook/components/NotebookScopeBadge.tsx @@ -0,0 +1,48 @@ +import { Badge } from "@/components/ui/badge"; +import { cn } from "@/lib/utils"; +import { User, Users } from "lucide-react"; +import type { ComponentType, ReactNode } from "react"; + +export type NotebookScopeVariant = "personal" | "organization"; + +const SCOPE_META: Record = { + personal: { + label: "Personal", + className: "", + }, + organization: { + label: "Team", + className: "bg-sky-50 text-sky-700 border-sky-200 dark:bg-sky-500/10 dark:text-sky-200 dark:border-sky-400/30", + }, +}; + +const SCOPE_ICON: Record> = { + personal: User, + organization: Users, +}; + +interface NotebookScopeBadgeProps { + scope?: NotebookScopeVariant; + className?: string; + children?: ReactNode; +} + +export function NotebookScopeBadge({ scope, className, children }: NotebookScopeBadgeProps) { + const Icon = SCOPE_ICON[scope ?? "personal"]; + const meta = SCOPE_META[scope ?? "personal"]; + + return ( + + + {meta.label} + {children} + + ); +} diff --git a/web/modules/notebook/components/NotebookScopeMenu.tsx b/web/modules/notebook/components/NotebookScopeMenu.tsx new file mode 100644 index 00000000..998d8a97 --- /dev/null +++ b/web/modules/notebook/components/NotebookScopeMenu.tsx @@ -0,0 +1,477 @@ +"use client"; + +import { useCallback, useMemo, useState } from "react"; +import toast from "react-hot-toast"; +import { + Building2, + User, + Trash2, + AlertCircle, + ArrowRight, + Share2, + ChevronDown, +} from "lucide-react"; +import { Button } from "@/components/ui/button"; +import { Popover, PopoverContent, PopoverTrigger } from "@/components/ui/popover"; +import { + Dialog, + DialogContent, + DialogDescription, + DialogFooter, + DialogHeader, + DialogTitle, +} from "@/components/ui/dialog"; +import { Input } from "@/components/ui/input"; +import { Label } from "@/components/ui/label"; +import { Separator } from "@/components/ui/separator"; +import { + Tooltip, + TooltipContent, + TooltipProvider, + TooltipTrigger, +} from "@/components/ui/tooltip"; +import { NotebookScopeBadge, type NotebookScopeVariant } from "./NotebookScopeBadge"; +import { CollabNotebookTransferRequest } from "@/api-gen"; +import { useTransferCollabNotebookMutation, useDeleteCollabNotebookMutation } from "@/modules/notebook/hooks/useCollabNotebooks"; +import { cn } from "@/lib/utils"; +import { NotebookDirectoryEntry } from "../queries/notebook-directory"; + +interface NotebookScopeMenuProps { + notebookId: string; + entry: NotebookDirectoryEntry; + className?: string; +} + +const scopeDescription: Record = { + personal: { text: "Only you can access this notebook", icon: User }, + organization: { text: "Shared with your organization", icon: Building2 }, +}; + +const reasonPersonalToOrg = "Only the notebook owner can share it with the organization."; +const reasonOrgToPersonal = + "Only the notebook's creator can move it back to personal space."; + +export function NotebookScopeMenu({ notebookId, entry, className }: NotebookScopeMenuProps) { + const [popoverOpen, setPopoverOpen] = useState(false); + const [dialogOpen, setDialogOpen] = useState(false); + const [ownerCandidate, setOwnerCandidate] = useState(""); + const [deleteDialogOpen, setDeleteDialogOpen] = useState(false); + const [deleteStage, setDeleteStage] = useState<0 | 1>(0); + const [deleteConfirmation, setDeleteConfirmation] = useState(""); + // const userContext = useAuthState(); + // const currentUserId = useMemo(() => { + // if (!userContext?.userId) return null; + // const maybe = Number(userContext.userId); + // return Number.isFinite(maybe) ? maybe : null; + // }, [userContext?.userId]); + // TODO: replace with real user ID from auth context + const currentUserId = 1; + + const transferMutation = useTransferCollabNotebookMutation(); + const deleteMutation = useDeleteCollabNotebookMutation(); + + const scope = entry.scope as NotebookScopeVariant; + + const canPromoteToOrganization = useMemo(() => { + if (scope !== "personal") return false; + if (currentUserId === null) return false; + return entry.ownerUserID === currentUserId || entry.createdByUserID === currentUserId; + }, [currentUserId, entry.createdByUserID, entry.ownerUserID, scope]); + + const canMoveToPersonal = useMemo(() => { + if (scope !== "organization") return false; + if (currentUserId === null) return false; + return entry.createdByUserID === currentUserId; + }, [currentUserId, entry.createdByUserID, scope]); + + const handlePromote = useCallback(async () => { + if (!canPromoteToOrganization) return; + try { + await transferMutation.mutateAsync({ + notebookId, + payload: { + targetScope: CollabNotebookTransferRequest.targetScope.ORGANIZATION, + }, + successMessage: "Notebook is now shared with your organization", + }); + setPopoverOpen(false); + } catch { + /* handled by mutation */ + } + }, [canPromoteToOrganization, notebookId, transferMutation]); + + const handleMakePersonal = useCallback(async () => { + if (!canMoveToPersonal) return; + const trimmed = ownerCandidate.trim(); + let ownerId: number | undefined; + if (trimmed.length > 0) { + const parsed = Number(trimmed); + if (!Number.isFinite(parsed)) { + toast.error("Owner ID must be a number"); + return; + } + ownerId = parsed; + } else if (currentUserId !== null) { + ownerId = currentUserId; + } + + try { + await transferMutation.mutateAsync({ + notebookId, + payload: { + targetScope: CollabNotebookTransferRequest.targetScope.PERSONAL, + ownerUserID: ownerId, + }, + successMessage: ownerId && ownerId !== currentUserId + ? "Notebook transferred to the selected teammate" + : "Notebook moved to your personal workspace", + }); + setDialogOpen(false); + setPopoverOpen(false); + setOwnerCandidate(""); + } catch { + /* handled by mutation */ + } + }, [canMoveToPersonal, currentUserId, notebookId, ownerCandidate, transferMutation]); + + const resetDeleteDialog = useCallback(() => { + setDeleteStage(0); + setDeleteConfirmation(""); + }, []); + + const openDeleteDialog = useCallback(() => { + resetDeleteDialog(); + setDeleteDialogOpen(true); + setPopoverOpen(false); + }, [resetDeleteDialog]); + + const closeDeleteDialog = useCallback(() => { + setDeleteDialogOpen(false); + resetDeleteDialog(); + }, [resetDeleteDialog]); + + const handleDeleteNotebook = useCallback(async () => { + try { + await deleteMutation.mutateAsync({ notebookId }); + closeDeleteDialog(); + } catch { + /* handled by mutation */ + } + }, [deleteMutation, notebookId, resetDeleteDialog]); + + const moveToPersonalDisabledReason = useMemo(() => { + if (scope !== "organization") return "Already personal"; + if (currentUserId === null) return "Sign in to change scope."; + if (!canMoveToPersonal) return reasonOrgToPersonal; + return null; + }, [canMoveToPersonal, currentUserId, scope]); + + const deleteConfirmationMatches = useMemo( + () => deleteConfirmation.trim() === entry.title.trim(), + [deleteConfirmation, entry.title], + ); + + const promoteDisabledReason = useMemo(() => { + if (scope !== "personal") return "Already shared"; + if (currentUserId === null) return "Sign in to change scope."; + if (!canPromoteToOrganization) return reasonPersonalToOrg; + return null; + }, [canPromoteToOrganization, currentUserId, scope]); + + const ScopeIcon = scopeDescription[scope].icon; + + return ( + + + + + + + {/* Header Section */} +
+
+
+ +
+
+
+

+ {scope === "personal" ? "Personal" : "Organization"} +

+ +
+

+ {scopeDescription[scope].text} +

+
+
+ +
+ + {/* Actions Section */} + +
+ {/* Share to Organization */} + + + + + {promoteDisabledReason && ( + +

{promoteDisabledReason}

+
+ )} +
+ + {/* Move to Personal */} + + + + + {moveToPersonalDisabledReason && ( + +

{moveToPersonalDisabledReason}

+
+ )} +
+
+ + {/* Danger Zone */} + +
+ +
+
+
+ + + + +
+
+ +
+ Move to personal workspace +
+ + Transfer this notebook to a personal workspace. You can take ownership yourself or assign it to a teammate. + +
+ +
+
+ + setOwnerCandidate(event.target.value)} + disabled={transferMutation.isPending} + inputMode="numeric" + className="h-10" + /> +
+ +

+ Leave blank to assign the notebook to yourself. Otherwise, enter a valid user ID. +

+
+
+
+ + + + + +
+
+ { + if (open) { + setDeleteDialogOpen(true); + } else { + closeDeleteDialog(); + } + }} + > + + {deleteStage === 0 ? ( + <> + +
+
+ +
+ Delete notebook +
+ + You're about to permanently delete "{entry.title}". This action cannot be undone. + +
+ +
+ +
+

What will be deleted:

+
    +
  • All notebook content and cells
  • +
  • Complete collaborative history
  • +
  • Associated metadata and settings
  • +
+
+
+ + + + + + + ) : ( + <> + +
+
+ +
+ Confirm deletion +
+ + Type the notebook title exactly as shown below to confirm this permanent action. + +
+ +
+
+ +
+

{entry.title}

+
+ setDeleteConfirmation(event.target.value)} + disabled={deleteMutation.isPending} + className={cn( + "h-10", + deleteConfirmation.length > 0 && !deleteConfirmationMatches && "border-destructive/50" + )} + /> + {deleteConfirmation.length > 0 && !deleteConfirmationMatches && ( +
+ + Notebook title doesn't match +
+ )} +
+
+ + + + + + + )} +
+
+
+ ); +} diff --git a/web/modules/notebook/components/NotebookSidebar/SchemaBrowser/SchemaSearch.tsx b/web/modules/notebook/components/NotebookSidebar/SchemaBrowser/SchemaSearch.tsx new file mode 100644 index 00000000..a4b8ebf2 --- /dev/null +++ b/web/modules/notebook/components/NotebookSidebar/SchemaBrowser/SchemaSearch.tsx @@ -0,0 +1,56 @@ +"use client" + +import { memo, useCallback } from "react" +import { Search, X } from "lucide-react" +import { m, AnimatePresence } from "motion/react" +import { Input } from "@/components/ui/input" +import { cn } from "@/lib/utils" +import { Spring } from "@/lib/spring" +import { notebookUIActions, useNotebookSidebarState } from "@/atoms/notebook/ui" + +interface SchemaSearchProps { + className?: string +} + +export const SchemaSearch = memo(function SchemaSearch({ + className, +}: SchemaSearchProps) { + const { schemaSearchQuery } = useNotebookSidebarState() + + const handleChange = useCallback( + (e: React.ChangeEvent) => { + notebookUIActions.setSchemaSearchQuery(e.target.value) + }, + [] + ) + + const handleClear = useCallback(() => { + notebookUIActions.setSchemaSearchQuery("") + }, []) + + return ( +
+ + + + {schemaSearchQuery && ( + + + + )} + +
+ ) +}) diff --git a/web/modules/notebook/components/NotebookSidebar/SchemaBrowser/SchemaTree.tsx b/web/modules/notebook/components/NotebookSidebar/SchemaBrowser/SchemaTree.tsx new file mode 100644 index 00000000..bbc3d754 --- /dev/null +++ b/web/modules/notebook/components/NotebookSidebar/SchemaBrowser/SchemaTree.tsx @@ -0,0 +1,141 @@ +"use client" + +import { memo, useMemo } from "react" +import { m } from "motion/react" +import type { Schema } from "@/api-gen/models/Schema" +import { Relation } from "@/api-gen/models/Relation" +import { Spring } from "@/lib/spring" +import { SchemaTreeNode } from "./SchemaTreeNode" +import type { SchemaTreeNodeData, SchemaNodeType, SchemaTreeProps } from "./types" + +// Map API relation type to our node type +function getRelationType(type: Relation.type): SchemaNodeType { + switch (type) { + case Relation.type.TABLE: + return "table" + case Relation.type.SOURCE: + return "source" + case Relation.type.SINK: + return "sink" + case Relation.type.MATERIALIZED_VIEW: + return "materializedView" + case Relation.type.SYSTEM_TABLE: + return "systemTable" + default: + return "table" + } +} + +function buildTreeData(schemas: Schema[]): SchemaTreeNodeData[] { + // Sort schemas: 'public' first, system schemas last, others alphabetically + const sortedSchemas = [...schemas].sort((a, b) => { + if (a.name === "public") return -1 + if (b.name === "public") return 1 + const aIsSystem = a.name.startsWith("pg_") || a.name === "information_schema" || a.name === "rw_catalog" + const bIsSystem = b.name.startsWith("pg_") || b.name === "information_schema" || b.name === "rw_catalog" + if (aIsSystem && !bIsSystem) return 1 + if (!aIsSystem && bIsSystem) return -1 + return a.name.localeCompare(b.name) + }) + + return sortedSchemas.map((schema) => ({ + id: `schema:${schema.name}`, + name: schema.name, + type: "schema" as const, + fullPath: schema.name, + children: [...schema.relations] + .sort((a, b) => a.name.localeCompare(b.name)) + .map((relation) => ({ + id: `relation:${schema.name}.${relation.name}`, + name: relation.name, + type: getRelationType(relation.type), + fullPath: `${schema.name}.${relation.name}`, + children: relation.columns + .filter((col) => !col.isHidden) + .map((column) => ({ + id: `column:${schema.name}.${relation.name}.${column.name}`, + name: column.name, + type: "column" as const, + dataType: column.type, + isPrimaryKey: column.isPrimaryKey, + isHidden: column.isHidden, + fullPath: column.name, + })), + })), + })) +} + +function filterTree( + nodes: SchemaTreeNodeData[], + query: string +): SchemaTreeNodeData[] { + if (!query.trim()) return nodes + + const lowerQuery = query.toLowerCase() + const result: SchemaTreeNodeData[] = [] + + for (const node of nodes) { + const nameMatches = node.name.toLowerCase().includes(lowerQuery) + const filteredChildren = node.children + ? filterTree(node.children, query) + : undefined + + if (nameMatches || (filteredChildren && filteredChildren.length > 0)) { + result.push({ + ...node, + children: + filteredChildren && filteredChildren.length > 0 + ? filteredChildren + : node.children, + }) + } + } + + return result +} + +export const SchemaTree = memo(function SchemaTree({ + schemas, + searchQuery, + onCopyName, + onInsertIntoEditor, +}: SchemaTreeProps) { + const treeData = useMemo(() => buildTreeData(schemas), [schemas]) + const filteredData = useMemo( + () => filterTree(treeData, searchQuery), + [treeData, searchQuery] + ) + + if (filteredData.length === 0) { + return ( + +

+ {searchQuery ? "No matching tables or columns" : "No schemas found"} +

+
+ ) + } + + return ( + + {filteredData.map((node) => ( + + ))} + + ) +}) diff --git a/web/modules/notebook/components/NotebookSidebar/SchemaBrowser/SchemaTreeNode.tsx b/web/modules/notebook/components/NotebookSidebar/SchemaBrowser/SchemaTreeNode.tsx new file mode 100644 index 00000000..c636a5fa --- /dev/null +++ b/web/modules/notebook/components/NotebookSidebar/SchemaBrowser/SchemaTreeNode.tsx @@ -0,0 +1,177 @@ +"use client" + +import { memo, useCallback } from "react" +import { m, AnimatePresence } from "motion/react" +import { + ChevronRight, + Table2, + Database, + Columns3, + Key, + ArrowRightLeft, + ArrowDownToLine, + Layers, + Server, + Copy, + Eye, +} from "lucide-react" +import { cn } from "@/lib/utils" +import { Spring } from "@/lib/spring" +import { notebookUIActions, useNotebookSidebarState } from "@/atoms/notebook/ui" +import type { SchemaTreeNodeData, SchemaNodeType } from "./types" +import { + Tooltip, + TooltipContent, + TooltipProvider, + TooltipTrigger, +} from "@/components/ui/tooltip" + +const NODE_ICONS: Record = { + schema: Database, + table: Table2, + source: ArrowRightLeft, + sink: ArrowDownToLine, + materializedView: Layers, + systemTable: Server, + view: Eye, + column: Columns3, +} + +const NODE_COLORS: Record = { + schema: "text-blue-500", + table: "text-emerald-500", + source: "text-orange-500", + sink: "text-purple-500", + materializedView: "text-cyan-500", + systemTable: "text-gray-500", + view: "text-indigo-500", + column: "text-muted-foreground", +} + +interface SchemaTreeNodeProps { + node: SchemaTreeNodeData + depth: number + onCopyName: (name: string) => void + onInsertIntoEditor?: (text: string) => void +} + +export const SchemaTreeNode = memo(function SchemaTreeNode({ + node, + depth, + onCopyName, + onInsertIntoEditor, +}: SchemaTreeNodeProps) { + const { schemaExpandedNodes } = useNotebookSidebarState() + const isExpanded = schemaExpandedNodes[node.id] ?? false + const hasChildren = node.children && node.children.length > 0 + const Icon = NODE_ICONS[node.type] + const iconColor = NODE_COLORS[node.type] + + const handleToggle = useCallback(() => { + if (hasChildren) { + notebookUIActions.toggleSchemaNode(node.id) + } + }, [hasChildren, node.id]) + + const handleCopy = useCallback( + (e: React.MouseEvent) => { + e.stopPropagation() + onCopyName(node.fullPath) + }, + [node.fullPath, onCopyName] + ) + + const handleDoubleClick = useCallback(() => { + onInsertIntoEditor?.(node.fullPath) + }, [node.fullPath, onInsertIntoEditor]) + + return ( +
+ + {/* Expand/collapse chevron */} + {hasChildren ? ( + + + + ) : ( +
+ )} + + {/* Icon */} + + + {/* Name */} + + {node.name} + {node.isPrimaryKey && ( + + )} + + + {/* Data type for columns */} + {node.dataType && ( + + {node.dataType} + + )} + + {/* Copy button */} + + + + + + + + + Copy name + + + + + + {/* Children */} + + {isExpanded && hasChildren && ( + + {node.children!.map((child) => ( + + ))} + + )} + +
+ ) +}) diff --git a/web/modules/notebook/components/NotebookSidebar/SchemaBrowser/index.tsx b/web/modules/notebook/components/NotebookSidebar/SchemaBrowser/index.tsx new file mode 100644 index 00000000..f8dbd678 --- /dev/null +++ b/web/modules/notebook/components/NotebookSidebar/SchemaBrowser/index.tsx @@ -0,0 +1,200 @@ +"use client" + +import { memo, useCallback } from "react" +import { m, AnimatePresence } from "motion/react" +import { Database, Loader2, RefreshCcw, AlertCircle, FileText } from "lucide-react" +import toast from "react-hot-toast" +import { cn } from "@/lib/utils" +import { Button } from "@/components/ui/button" +import { ScrollArea } from "@/components/ui/scroll-area" +import { useDatabaseSchemaQuery } from "@/modules/database/queries/databases" +import { useNotebookSnapshot } from "@/modules/notebook/hooks/use-notebook-snapshot" +import { useNotebookSidebarState } from "@/atoms/notebook/ui" +import { useActiveNotebookId } from "@/modules/notebook/hooks/useNotebookSelection" +import { SchemaSearch } from "./SchemaSearch" +import { SchemaTree } from "./SchemaTree" + +interface SchemaBrowserProps { + className?: string +} + +export const SchemaBrowser = memo(function SchemaBrowser({ + className, +}: SchemaBrowserProps) { + const activeNotebookId = useActiveNotebookId() + + // If no notebook is selected, show a placeholder + if (!activeNotebookId) { + return + } + + return +}) + +/** + * Placeholder when no notebook is selected + */ +function SchemaBrowserNoNotebook({ className }: { className?: string }) { + return ( + + + + +

+ No notebook selected +

+

+ Select a notebook to browse its database schema +

+
+ ) +} + +/** + * Schema browser when a notebook is selected (has NotebookProvider context) + */ +const SchemaBrowserReady = memo(function SchemaBrowserReady({ + className, +}: { + className?: string +}) { + const { databaseId } = useNotebookSnapshot() + const { schemaSearchQuery } = useNotebookSidebarState() + + const databaseIdNum = databaseId ? parseInt(String(databaseId), 10) : null + const { + data: schemas, + isLoading, + isError, + refetch, + isFetching, + } = useDatabaseSchemaQuery(databaseIdNum) + + const handleCopyName = useCallback(async (name: string) => { + try { + await navigator.clipboard.writeText(name) + toast.success(`Copied: ${name}`) + } catch { + toast.error("Failed to copy") + } + }, []) + + // No database selected + if (!databaseId) { + return ( + + + + +

+ No database selected +

+

+ Select a database from the toolbar to browse schemas +

+
+ ) + } + + // Loading state + if (isLoading) { + return ( + + +

Loading schemas...

+
+ ) + } + + // Error state + if (isError) { + return ( + + +

+ Failed to load schemas +

+ +
+ ) + } + + return ( +
+ {/* Search */} +
+ +
+ + {/* Refresh indicator */} + + {isFetching && !isLoading && ( + +
+ + Refreshing... +
+
+ )} +
+ + {/* Tree */} + +
+ +
+
+
+ ) +}) diff --git a/web/modules/notebook/components/NotebookSidebar/SchemaBrowser/types.ts b/web/modules/notebook/components/NotebookSidebar/SchemaBrowser/types.ts new file mode 100644 index 00000000..76220be6 --- /dev/null +++ b/web/modules/notebook/components/NotebookSidebar/SchemaBrowser/types.ts @@ -0,0 +1,29 @@ +import type { Schema } from "@/api-gen/models/Schema" + +export type SchemaNodeType = + | "schema" + | "table" + | "source" + | "sink" + | "materializedView" + | "systemTable" + | "view" + | "column" + +export interface SchemaTreeNodeData { + id: string // "schema:public" | "relation:public.users" | "column:public.users.id" + name: string + type: SchemaNodeType + dataType?: string // For columns + isPrimaryKey?: boolean + isHidden?: boolean + children?: SchemaTreeNodeData[] + fullPath: string // For copy: "public.users" or column name +} + +export interface SchemaTreeProps { + schemas: Schema[] + searchQuery: string + onCopyName: (name: string) => void + onInsertIntoEditor?: (text: string) => void +} diff --git a/web/modules/notebook/components/NotebookSidebar/SidebarTabs.tsx b/web/modules/notebook/components/NotebookSidebar/SidebarTabs.tsx new file mode 100644 index 00000000..d130824a --- /dev/null +++ b/web/modules/notebook/components/NotebookSidebar/SidebarTabs.tsx @@ -0,0 +1,65 @@ +"use client" + +import { memo } from "react" +import * as Tabs from "@radix-ui/react-tabs" +import { m } from "motion/react" +import { FileText, Database } from "lucide-react" +import { cn } from "@/lib/utils" +import { Spring } from "@/lib/spring" +import { + notebookUIActions, + useNotebookSidebarState, + type NotebookSidebarTab, +} from "@/atoms/notebook/ui" + +interface SidebarTabsProps { + className?: string +} + +const tabs: { value: NotebookSidebarTab; label: string; icon: typeof FileText }[] = [ + { value: "notebooks", label: "Notebooks", icon: FileText }, + { value: "schema", label: "Schema", icon: Database }, +] + +export const SidebarTabs = memo(function SidebarTabs({ + className, +}: SidebarTabsProps) { + const { activeTab } = useNotebookSidebarState() + + return ( + + + {tabs.map((tab) => { + const Icon = tab.icon + const isActive = activeTab === tab.value + + return ( + notebookUIActions.setSidebarTab(tab.value)} + className={cn( + "relative flex-1 flex items-center justify-center gap-1.5 px-3 py-1.5 text-xs font-medium rounded-md transition-colors", + isActive + ? "text-foreground" + : "text-muted-foreground hover:text-foreground" + )} + > + {isActive && ( + + )} + + + {tab.label} + + + ) + })} + + + ) +}) diff --git a/web/modules/notebook/components/NotebookSidebar/index.tsx b/web/modules/notebook/components/NotebookSidebar/index.tsx new file mode 100644 index 00000000..8731b226 --- /dev/null +++ b/web/modules/notebook/components/NotebookSidebar/index.tsx @@ -0,0 +1,411 @@ +"use client"; + +import { memo, useCallback, useMemo, useRef, useLayoutEffect } from "react"; +import { FileText, Loader2, Plus, RefreshCcw, Sparkles } from "lucide-react"; +import { + AnimatePresence, + m, + useMotionValue, + useSpring, + useTransform, +} from "motion/react"; +import { cn } from "@/lib/utils"; +import { Button } from "@/components/ui/button"; +import { ScrollArea } from "@/components/ui/scroll-area"; +import { useResizable } from "@/hooks/useResizable"; +import { Spring } from "@/lib/spring"; +import { useCreateCollabNotebookMutation } from "@/modules/notebook/queries/notebook-mutations"; +import { useActiveNotebookId, useSelectNotebook } from "@/modules/notebook/hooks/useNotebookSelection"; +import { NotebookListItem } from "./item"; +import { useNotebookDirectory } from "../../queries/notebook-directory"; +import { SidebarTabs } from "./SidebarTabs"; +import { SchemaBrowser } from "./SchemaBrowser"; +import { useNotebookSidebarState } from "@/atoms/notebook/ui"; + +interface NotebookSidebarProps { + className?: string; +} + +const INDICATOR_H = 40; + +export const NotebookSidebar = memo(function NotebookSidebar({ + className, +}: NotebookSidebarProps) { + const { activeTab } = useNotebookSidebarState(); + const { entries: directory, refetch, isFetching } = useNotebookDirectory(); + const activeNotebookId = useActiveNotebookId(); + const createNotebookMutation = useCreateCollabNotebookMutation(); + const selectNotebook = useSelectNotebook(); + const handleSelectNotebook = useCallback((notebookId: string) => { + selectNotebook(notebookId, { history: "push" }); + }, [selectNotebook]); + + // refs + const itemRefs = useRef>(new Map()); + const listContainerRef = useRef(null); + + // observers + const resizeObserverRef = useRef(null); + const mutationObserverRef = useRef(null); + + const rafIdRef = useRef(null); + const pendingMeasureRef = useRef(false); + + const yTarget = useMotionValue(0); + const hTarget = useMotionValue(0); + + const y = useSpring(yTarget, { stiffness: 900, damping: 48, mass: 0.24 }); + const height = useSpring(hTarget, { stiffness: 900, damping: 48, mass: 0.24 }); + + const indicatorY = useTransform([y, height], ([yv, hv]: number[]) => yv + Math.max(0, hv - INDICATOR_H - 5) / 2); + + const handleCreate = useCallback(() => { + createNotebookMutation.mutate({ makeActive: true }); + }, [createNotebookMutation]); + + const scheduleMeasure = useCallback(() => { + if (pendingMeasureRef.current) return; + pendingMeasureRef.current = true; + rafIdRef.current = requestAnimationFrame(() => { + pendingMeasureRef.current = false; + + if (!activeNotebookId) return; + const activeEl = itemRefs.current.get(activeNotebookId); + const containerEl = listContainerRef.current; + if (!activeEl || !containerEl) return; + + const itemRect = activeEl.getBoundingClientRect(); + const containerRect = containerEl.getBoundingClientRect(); + const nextY = itemRect.top - containerRect.top; + const nextH = itemRect.height; + + yTarget.set(nextY); + hTarget.set(nextH); + }); + }, [activeNotebookId, yTarget, hTarget]); + + const cancelScheduledMeasure = useCallback(() => { + if (rafIdRef.current != null) { + cancelAnimationFrame(rafIdRef.current); + rafIdRef.current = null; + pendingMeasureRef.current = false; + } + }, []); + const handleItemRef = useCallback( + (entryId: string) => (el: HTMLButtonElement | null) => { + if (el) { + itemRefs.current.set(entryId, el); + if (entryId === activeNotebookId) { + scheduleMeasure(); + } + } else { + itemRefs.current.delete(entryId); + } + }, + [activeNotebookId, scheduleMeasure] + ); + useLayoutEffect(() => { + scheduleMeasure(); + + resizeObserverRef.current?.disconnect(); + mutationObserverRef.current?.disconnect(); + cancelScheduledMeasure(); + + const activeEl = activeNotebookId ? itemRefs.current.get(activeNotebookId) : null; + const containerEl = listContainerRef.current; + + const ro = new ResizeObserver(() => { + scheduleMeasure(); + }); + resizeObserverRef.current = ro; + if (activeEl) ro.observe(activeEl); + if (containerEl) ro.observe(containerEl); + + if (activeEl) { + const mo = new MutationObserver(() => { + scheduleMeasure(); + }); + mutationObserverRef.current = mo; + mo.observe(activeEl, { + subtree: true, + characterData: true, + childList: true, + attributes: true, + }); + } + + return () => { + resizeObserverRef.current?.disconnect(); + mutationObserverRef.current?.disconnect(); + cancelScheduledMeasure(); + }; + }, [activeNotebookId, scheduleMeasure, cancelScheduledMeasure]); + + const { width: sidebarWidth, resizeHandleProps } = useResizable({ + defaultWidth: 320, + minWidth: 220, + maxWidth: 520, + storageKey: "notebook-sidebar-width", + direction: "horizontal", + handle: "right", + }); + + const isCreating = createNotebookMutation.isPending; + const isEmpty = directory.length === 0; + const isLoading = isFetching && isEmpty; + + const containerVariants = useMemo( + () => ({ + hidden: { opacity: 0 }, + show: { + opacity: 1, + transition: { staggerChildren: 0.05, delayChildren: 0.1 }, + }, + }), + [] + ); + + const itemVariants = useMemo( + () => ({ + hidden: { opacity: 0, y: 10, scale: 0.95 }, + show: { + opacity: 1, + y: 0, + scale: 1, + transition: Spring.smooth(0.3), + }, + exit: { + opacity: 0, + scale: 0.95, + transition: Spring.smooth(0.2), + }, + }), + [] + ); + + return ( + + {/* Header with Tabs */} + + {/* Tabs */} +
+ +
+ + {/* Actions - only show for notebooks tab */} + + {activeTab === "notebooks" && ( + +
+
+ + {isFetching && ( + + + + )} + +
+ +
+ + + + + + + +
+
+
+ )} +
+
+ + {/* Content */} + + {activeTab === "notebooks" ? ( + + +
+ + {isLoading ? ( + + + + +
+

Loading notebooks…

+

Please wait while we fetch your notebooks

+
+
+ ) : isEmpty ? ( + + +
+ + + +
+
+ +

No notebooks yet

+
+

+ Create your first notebook to start collaborating +

+
+ + + + + + ) : ( + + {activeNotebookId && ( + <> + + + + )} + + + {directory.map((entry) => { + const isActive = entry.id === activeNotebookId; + return ( + { + if (isActive) { + scheduleMeasure(); + } + }} + > + + + ); + })} + + + )} + +
+ +
+ ) : ( + + + + )} +
+ + {/* Resize Handle */} +
+ +
+ + ); +}); diff --git a/web/modules/notebook/components/NotebookSidebar/item.tsx b/web/modules/notebook/components/NotebookSidebar/item.tsx new file mode 100644 index 00000000..224d23c1 --- /dev/null +++ b/web/modules/notebook/components/NotebookSidebar/item.tsx @@ -0,0 +1,169 @@ +import { NotebookDirectoryEntry } from "../../queries/notebook-directory"; +import { Spring } from "@/lib/spring"; +import { cn } from "@/lib/utils"; +import { FileText, Trash2, Copy } from "lucide-react"; +import { m } from "motion/react"; +import { memo, useCallback, useState, forwardRef } from "react"; +import { NotebookScopeBadge } from "../NotebookScopeBadge"; +import { + ContextMenu, + ContextMenuContent, + ContextMenuItem, + ContextMenuSeparator, + ContextMenuTrigger, +} from "@/components/ui/context-menu"; +import { Button } from "@/components/ui/button"; +import { + Dialog, + DialogContent, + DialogDescription, + DialogFooter, + DialogHeader, + DialogTitle, +} from "@/components/ui/dialog"; +import toast from "react-hot-toast"; +import { useDeleteCollabNotebookMutation } from "@/modules/notebook/hooks/useCollabNotebooks"; + +const normalizeNotebookTitle = (title?: string | null): string => { + const trimmed = title?.trim() ?? ""; + return trimmed.length > 0 ? trimmed : ""; +}; + + +interface NotebookListItemProps { + entry: NotebookDirectoryEntry; + isActive: boolean; + onSelect: (notebookId: string) => void; +} + +export const NotebookListItem = memo( + forwardRef(function NotebookListItem( + { entry, isActive, onSelect }, + ref, + ) { + const deleteNotebook = useDeleteCollabNotebookMutation(); + + const [deleteOpen, setDeleteOpen] = useState(false); + + const handleSelect = useCallback(() => { + onSelect(entry.id); + }, [entry.id, onSelect]); + + const handleCopyId = useCallback(async () => { + try { + await navigator.clipboard.writeText(entry.id); + toast.success("Notebook ID copied"); + } catch { + toast.error("Failed to copy ID"); + } + }, [entry.id]); + + const confirmDelete = useCallback(async () => { + try { + await deleteNotebook.mutateAsync({ notebookId: entry.id }); + setDeleteOpen(false); + } catch { + /* toast handled in mutation */ + } + }, [deleteNotebook, entry.id]); + + return ( + + + + {/* Content wrapper with padding */} +
+ {/* Icon */} + + + + + {/* Title and badges - flex-1 with min-w-0 for proper truncation */} +
+ + {normalizeNotebookTitle(entry.title)} + + + {/* Badges row - flex-wrap for responsiveness */} +
+ + + +
+
+
+
+
+ + + + Open + + + + Copy ID + + + setDeleteOpen(true)} + className="text-destructive focus:text-destructive" + > + + Delete… + + + + {/* Delete confirmation dialog */} + + + + Delete notebook + + This will permanently delete "{entry.title}". This action cannot be undone. + + + + + + + + +
+ ); + }), +); diff --git a/web/modules/notebook/components/NotebookToolbar.tsx b/web/modules/notebook/components/NotebookToolbar.tsx new file mode 100644 index 00000000..b0d482cb --- /dev/null +++ b/web/modules/notebook/components/NotebookToolbar.tsx @@ -0,0 +1,375 @@ +"use client"; + +import { useEffect, useState, useCallback, useRef } from "react"; +import { m } from "motion/react"; +import { Button } from "@/components/ui/button"; +import { + Select, + SelectContent, + SelectItem, + SelectTrigger, + SelectValue, +} from "@/components/ui/select"; +import { Popover, PopoverContent, PopoverTrigger } from "@/components/ui/popover"; +import { + Tooltip, + TooltipContent, + TooltipProvider, + TooltipTrigger, +} from "@/components/ui/tooltip"; +import { useNotebookOperations } from "../../../atoms/notebook/hooks"; +import { useDatabasesQuery } from "@/modules/database/queries/databases"; +import { DatabaseDrawer } from "./DatabaseDrawer"; +import type { Database as DatabaseType } from "@/api-gen/models/Database"; +import { + FileText, + Wifi, + WifiOff, + Settings2, + Plus, + Play, + ChevronDown, + Eraser, + Undo, + Redo, + Loader2, +} from "lucide-react"; +import { useCreateCollabNotebookMutation } from "@/modules/notebook/hooks/useCollabNotebooks"; +import { NotebookScopeMenu } from "./NotebookScopeMenu"; +import { NotebookScopeBadge } from "./NotebookScopeBadge"; +import { NotebookPresenceAvatars } from "./common/PresenceAvatars"; +import { useNotebookSnapshot, useNotebookStoreState } from "../hooks/use-notebook-snapshot"; +import { useActiveNotebookId, useNotebookDirectoryEntry } from "@/modules/notebook/hooks/useNotebookSelection"; +import { useUndoRedoActions, useUndoRedoState } from "@/modules/notebook/hooks/use-notebook-undo"; +import { SaveStatusIndicator } from "./SaveStatusIndicator"; + +interface NotebookToolbarProps { + className?: string; +} + +export function NotebookToolbar({ className }: NotebookToolbarProps) { + const notebookId = useActiveNotebookId(); + + const createNotebookMutation = useCreateCollabNotebookMutation(); + const handleCreateNotebook = useCallback(() => { + createNotebookMutation.mutate({ makeActive: true }); + }, [createNotebookMutation]); + + if (!notebookId) { + return ( +
+ +
+ ); + } + + return ( +
+ +
+ ); +} + +function NoNotebookToolbar({ onCreateNotebook, isCreating }: { onCreateNotebook: () => void; isCreating: boolean }) { + return ( +
+
+ + Select a notebook to begin. +
+ +
+ ); +} + +function NotebookToolbarReady({ + notebookId, + onCreateNotebook, + isCreating, +}: { + notebookId: string; + onCreateNotebook: () => void; + isCreating: boolean; +}) { + // const { title, databaseId, order } = useNotebookSnapshot(); + const snap = useNotebookSnapshot(); + const title = snap.title; + const databaseId = snap.databaseId; + const order = snap.order || []; + const notebookState = useNotebookStoreState(); + const { clearOutputs, executeCells } = useNotebookOperations(); + const { data: databases, isLoading: isLoadingDatabases, refetch } = useDatabasesQuery(); + const notebookEntry = useNotebookDirectoryEntry(notebookId); + + const [isRunAllInProgress, setIsRunAllInProgress] = useState(false); + const [isTitleTruncated, setIsTitleTruncated] = useState(false); + const titleRef = useRef(null); + + useEffect(() => { + const checkTruncation = () => { + if (titleRef.current) { + setIsTitleTruncated(titleRef.current.scrollWidth > titleRef.current.clientWidth); + } + }; + + checkTruncation(); + window.addEventListener('resize', checkTruncation); + + return () => { + window.removeEventListener('resize', checkTruncation); + }; + }, [title]); + + const handleDatabaseSelect = (value: string) => { + notebookState.databaseId = value; + }; + + const handleClearOutputs = () => { + clearOutputs(); + }; + + const handleRunAll = useCallback(async () => { + if (order.length === 0) return; + setIsRunAllInProgress(true); + try { + // await executeCells(order, { stopOnError: true }); + } finally { + setIsRunAllInProgress(false); + } + }, [order, executeCells]); + + const connectionStatus = databaseId ? "connected" : "disconnected"; + const disableRunActions = order.length === 0 || !databaseId || isRunAllInProgress; + + return ( +
+
+
+ +
+
+ + + + {title} + + +

{title}

+
+
+
+ {notebookEntry ? ( + + ) : ( + + )} + · + +
+
+
+ + + refetch()} + /> +
+ +
+ + + + + +
+
+ ); +} + +function DatabaseSection({ + connectionStatus, + selectedDatabaseId, + isLoading, + databases, + onSelectDatabase, + onDatabaseChange, +}: { + connectionStatus: "connected" | "disconnected"; + selectedDatabaseId: string | null; + isLoading: boolean; + databases: DatabaseType[]; + onSelectDatabase: (value: string) => void; + onDatabaseChange: () => void; +}) { + return ( +
+ + + + + +
+ ); +} + +function UndoRedoControls() { + const { undo, redo } = useUndoRedoActions(); + const { canUndo, canRedo } = useUndoRedoState(); + + return ( +
+ + +
+ ); +} + +function RunAllControls({ + disableRunActions, + isRunAllInProgress, + onRunAll, + onClearOutputs, +}: { + disableRunActions: boolean; + isRunAllInProgress: boolean; + onRunAll: () => void | Promise; + onClearOutputs: () => void; +}) { + const [isMenuOpen, setIsMenuOpen] = useState(false); + + const handleClearOutputsClick = () => { + onClearOutputs(); + setIsMenuOpen(false); + }; + + const hoverAnimation = disableRunActions ? undefined : { scale: 1.02 }; + const tapAnimation = disableRunActions ? undefined : { scale: 0.97 }; + + return ( + + + + + + + + + + + + + + + ); +} diff --git a/web/modules/notebook/components/OutlineFloating/OutlineItem.tsx b/web/modules/notebook/components/OutlineFloating/OutlineItem.tsx new file mode 100644 index 00000000..883286d0 --- /dev/null +++ b/web/modules/notebook/components/OutlineFloating/OutlineItem.tsx @@ -0,0 +1,34 @@ +'use client' + +import { memo } from 'react' +import { m } from 'motion/react' +import { cn } from '@/lib/utils' +import { Spring } from '@/lib/spring' +import type { OutlineItemProps } from './types' + +/** + * Individual clickable heading item in the outline panel + */ +export const OutlineItem = memo(function OutlineItem({ + heading, + isActive, + indent, + onClick, +}: OutlineItemProps) { + return ( + + {heading.text} + + ) +}) diff --git a/web/modules/notebook/components/OutlineFloating/OutlinePanel.tsx b/web/modules/notebook/components/OutlineFloating/OutlinePanel.tsx new file mode 100644 index 00000000..f2e6ae01 --- /dev/null +++ b/web/modules/notebook/components/OutlineFloating/OutlinePanel.tsx @@ -0,0 +1,55 @@ +'use client' + +import { memo } from 'react' +import { m } from 'motion/react' +import { OutlineItem } from './OutlineItem' +import type { OutlinePanelProps } from './types' + +/** + * Indentation per heading level (pixels) + */ +const INDENT_PER_LEVEL = 12 + +/** + * Calculate indentation based on heading level + */ +function getIndent(level: number): number { + return (level - 1) * INDENT_PER_LEVEL +} + +/** + * Expanded state of the outline showing the full heading text + */ +export const OutlinePanel = memo(function OutlinePanel({ + headings, + activeId, + onHeadingClick, +}: OutlinePanelProps) { + return ( + +
+ {headings.map((heading) => ( + onHeadingClick(heading)} + /> + ))} +
+
+ ) +}) diff --git a/web/modules/notebook/components/OutlineFloating/OutlineTrigger.tsx b/web/modules/notebook/components/OutlineFloating/OutlineTrigger.tsx new file mode 100644 index 00000000..3ded38ef --- /dev/null +++ b/web/modules/notebook/components/OutlineFloating/OutlineTrigger.tsx @@ -0,0 +1,51 @@ +'use client' + +import { memo } from 'react' +import { m } from 'motion/react' +import { cn } from '@/lib/utils' +import type { OutlineTriggerProps } from './types' + +/** + * Line widths based on heading level. + * Higher level (h1) = longer line, lower level (h6) = shorter line. + */ +const LINE_WIDTHS: Record = { + 1: 24, + 2: 20, + 3: 16, + 4: 12, + 5: 8, + 6: 6, +} + +/** + * Collapsed state of the outline showing vertical lines + * representing the heading hierarchy. + */ +export const OutlineTrigger = memo(function OutlineTrigger({ + headings, + activeId, +}: OutlineTriggerProps) { + return ( + + {headings.map((heading) => ( +
+ ))} + + ) +}) diff --git a/web/modules/notebook/components/OutlineFloating/index.tsx b/web/modules/notebook/components/OutlineFloating/index.tsx new file mode 100644 index 00000000..1d79c193 --- /dev/null +++ b/web/modules/notebook/components/OutlineFloating/index.tsx @@ -0,0 +1,134 @@ +'use client' + +import { memo, useState, useRef, useCallback } from 'react' +import { AnimatePresence } from 'motion/react' +import { useNotebookHeadings } from '@/modules/notebook/hooks/useNotebookHeadings' +import { useActiveHeading } from '@/modules/notebook/hooks/useActiveHeading' +import { OutlineTrigger } from './OutlineTrigger' +import { OutlinePanel } from './OutlinePanel' +import type { HeadingItem, OutlineFloatingProps } from './types' + +/** + * Delay before collapsing the panel after mouse leaves (ms) + */ +const COLLAPSE_DELAY = 200 + +/** + * Scroll to a heading element with smooth animation + */ +function scrollToHeading( + heading: HeadingItem, + scrollContainerRef: React.RefObject +) { + const container = scrollContainerRef.current + if (!container) { + console.warn('[OutlineFloating] No scroll container') + return + } + + const el = container.querySelector(`[data-heading-id="${heading.id}"]`) + if (!el) { + console.warn('[OutlineFloating] Heading element not found:', heading.id) + // Try to find by querying the whole document + const docEl = document.querySelector(`[data-heading-id="${heading.id}"]`) + if (docEl) { + console.log('[OutlineFloating] Found in document, scrolling via scrollIntoView') + docEl.scrollIntoView({ behavior: 'smooth', block: 'start' }) + } + return + } + + // Calculate the target scroll position + const containerRect = container.getBoundingClientRect() + const elementRect = el.getBoundingClientRect() + const relativeTop = elementRect.top - containerRect.top + const targetScrollTop = container.scrollTop + relativeTop - 20 // 20px offset from top + + container.scrollTo({ + top: targetScrollTop, + behavior: 'smooth', + }) +} + +/** + * Floating outline sidebar that shows document structure. + * Appears as minimal lines when collapsed, expands to full TOC on hover. + */ +export const OutlineFloating = memo(function OutlineFloating({ + scrollContainerRef, +}: OutlineFloatingProps) { + const headings = useNotebookHeadings() + const activeId = useActiveHeading(headings, scrollContainerRef) + const [isExpanded, setIsExpanded] = useState(false) + const collapseTimeoutRef = useRef(null) + + // Clear any pending collapse timer + const clearCollapseTimeout = useCallback(() => { + if (collapseTimeoutRef.current) { + clearTimeout(collapseTimeoutRef.current) + collapseTimeoutRef.current = null + } + }, []) + + // Handle mouse entering the trigger/panel area + const handleMouseEnter = useCallback(() => { + clearCollapseTimeout() + setIsExpanded(true) + }, [clearCollapseTimeout]) + + // Handle mouse leaving the trigger/panel area with delay + const handleMouseLeave = useCallback(() => { + clearCollapseTimeout() + collapseTimeoutRef.current = setTimeout(() => { + setIsExpanded(false) + }, COLLAPSE_DELAY) + }, [clearCollapseTimeout]) + + // Handle heading click - scroll to heading, keep panel open + const handleHeadingClick = useCallback( + (heading: HeadingItem) => { + scrollToHeading(heading, scrollContainerRef) + }, + [scrollContainerRef] + ) + + // Don't render if there are no headings + if (headings.length === 0) return null + + return ( +
+
+ {/* Trigger lines - fades out when expanded */} + + {!isExpanded && ( + + )} + + + {/* Panel - absolutely positioned to overlay from the right */} + + {isExpanded && ( +
+ +
+ )} +
+
+
+ ) +}) + +export type { HeadingItem, OutlineFloatingProps } diff --git a/web/modules/notebook/components/OutlineFloating/types.ts b/web/modules/notebook/components/OutlineFloating/types.ts new file mode 100644 index 00000000..4684a112 --- /dev/null +++ b/web/modules/notebook/components/OutlineFloating/types.ts @@ -0,0 +1,61 @@ +import type { RefObject } from 'react' + +/** + * Represents a heading extracted from a markdown cell + */ +export interface HeadingItem { + /** Unique ID for the heading: `${cellId}-h-${index}` */ + id: string + /** Parent cell ID */ + cellId: string + /** Heading level (h1-h6) */ + level: 1 | 2 | 3 | 4 | 5 | 6 + /** Heading text content */ + text: string + /** Position in the document (0-indexed) */ + index: number +} + +/** + * Props for OutlineFloating component + */ +export interface OutlineFloatingProps { + /** Reference to the scrollable container */ + scrollContainerRef: RefObject +} + +/** + * Props for OutlineTrigger component + */ +export interface OutlineTriggerProps { + /** List of headings to display as lines */ + headings: HeadingItem[] + /** Currently active heading ID */ + activeId: string | null +} + +/** + * Props for OutlinePanel component + */ +export interface OutlinePanelProps { + /** List of headings to display */ + headings: HeadingItem[] + /** Currently active heading ID */ + activeId: string | null + /** Callback when a heading is clicked */ + onHeadingClick: (heading: HeadingItem) => void +} + +/** + * Props for OutlineItem component + */ +export interface OutlineItemProps { + /** The heading to display */ + heading: HeadingItem + /** Whether this heading is currently active/visible */ + isActive: boolean + /** Indentation in pixels */ + indent: number + /** Callback when clicked */ + onClick: () => void +} diff --git a/web/modules/notebook/components/SaveStatusIndicator.tsx b/web/modules/notebook/components/SaveStatusIndicator.tsx new file mode 100644 index 00000000..a31b4849 --- /dev/null +++ b/web/modules/notebook/components/SaveStatusIndicator.tsx @@ -0,0 +1,121 @@ +'use client' + +import { useEffect, useState } from 'react' +import { m, AnimatePresence } from 'motion/react' +import { Check, Cloud, Loader2 } from 'lucide-react' +import { cn } from '@/lib/utils' +import { + useNotebookSaveStatus, + useNotebookLastSavedAt, + type SaveStatus, +} from '@/atoms/notebook/save-status' +import { + Tooltip, + TooltipContent, + TooltipProvider, + TooltipTrigger, +} from '@/components/ui/tooltip' +import { formatRelativeTime } from '@/lib/time' + +interface SaveStatusIndicatorProps { + notebookId: string + className?: string +} + + +const STATUS_CONFIG: Record = { + saved: { + icon: Check, + label: 'Saved', + className: 'text-muted-foreground', + }, + saving: { + icon: Loader2, + label: 'Saving...', + className: 'text-muted-foreground', + }, + unsaved: { + icon: Cloud, + label: 'Unsaved', + className: 'text-amber-500', + }, +} + +export function SaveStatusIndicator({ notebookId, className }: SaveStatusIndicatorProps) { + const status = useNotebookSaveStatus(notebookId) + const lastSavedAt = useNotebookLastSavedAt(notebookId) + const [relativeTime, setRelativeTime] = useState(() => formatRelativeTime(lastSavedAt)) + + useEffect(() => { + if (!lastSavedAt) return + + setRelativeTime(formatRelativeTime(lastSavedAt)) + + const interval = setInterval(() => { + setRelativeTime(formatRelativeTime(lastSavedAt)) + }, 10000) + + return () => clearInterval(interval) + }, [lastSavedAt]) + + const config = STATUS_CONFIG[status] + const Icon = config.icon + + const tooltipContent = status === 'saved' && relativeTime + ? `Saved ${relativeTime}` + : config.label + + return ( + + + + + + + + + + + + {config.label} + + + + + +

{tooltipContent}

+
+
+
+ ) +} diff --git a/web/modules/notebook/components/SqlConsoleMigrationBanner.tsx b/web/modules/notebook/components/SqlConsoleMigrationBanner.tsx new file mode 100644 index 00000000..9a16bd9b --- /dev/null +++ b/web/modules/notebook/components/SqlConsoleMigrationBanner.tsx @@ -0,0 +1,91 @@ +"use client"; +import { useCallback, useState } from 'react'; +import { migrateSqlConsoleTabs, readSqlConsoleTabsFromLocalStorage, markSqlConsoleMigrationDone, isSqlConsoleMigrationDone } from '@/modules/notebook/lib/migration/sqlconsole-to-notebook'; +import { CollabNotebook } from '@/api-gen'; +import { Button } from '@/components/ui/button'; +import { toast } from 'react-hot-toast'; +import { useSelectNotebook } from '@/modules/notebook/hooks/useNotebookSelection'; +import { useNotebookDirectory } from '../queries/notebook-directory'; + +interface SqlConsoleMigrationBannerProps { + className?: string; +} + +export function SqlConsoleMigrationBanner({ className }: SqlConsoleMigrationBannerProps) { + const [running, setRunning] = useState(false); + const [done, setDone] = useState(0); + const [total, setTotal] = useState(0); + const [results, setResults] = useState(null); + const [aborted, setAborted] = useState(false); + const { refetch } = useNotebookDirectory(); + const selectNotebook = useSelectNotebook(); + + const tabs = readSqlConsoleTabsFromLocalStorage(); + const already = isSqlConsoleMigrationDone(); + const nonEmptyCount = tabs.filter(t => (t.content ?? '').trim().length > 0).length; + + const handleMigrate = useCallback(async () => { + if (running) return; + setRunning(true); + setResults(null); + setAborted(false); + const controller = new AbortController(); + const localTabs = readSqlConsoleTabsFromLocalStorage(); + setTotal(localTabs.filter(t => (t.content ?? '').trim().length > 0).length); + try { + const summary = await migrateSqlConsoleTabs(localTabs, { + scope: CollabNotebook.scope.PERSONAL, + onProgress: (d, tot) => { + setDone(d); + setTotal(tot); + }, + signal: controller.signal, + }); + setResults(summary.migrated); + if (!summary.aborted) { + markSqlConsoleMigrationDone(); + toast.success(`迁移完成:成功 ${summary.migrated.filter(r => r.status==='ok').length},失败 ${summary.migrated.filter(r => r.status==='error').length},空白 ${summary.skippedEmpty.length}`); + refetch(); + const firstOk = summary.migrated.find(r => r.status === 'ok'); + if (firstOk) { + selectNotebook(firstOk.notebookId, { history: 'replace' }); + } + } else { + setAborted(true); + toast('迁移已中止'); + } + } catch (err: any) { + toast.error(`迁移过程中出现错误: ${err?.message || String(err)}`); + } finally { + setRunning(false); + } + }, [running, selectNotebook, refetch]); + + if (already || nonEmptyCount === 0) return null; + + return ( +
+
+
+
+

检测到 {nonEmptyCount} 条旧 SQLConsole 查询

+

一键迁移:每条查询将转换成一个个人 Notebook(含单个 SQL cell)。迁移后会标记完成。

+ {running && ( +
+

进度 {done}/{total} ({total === 0 ? 0 : Math.round((done / total) * 100)}%){aborted && '(已中止)'}

+
+ )} + {results && !running && ( +

成功 {results.filter(r => r.status==='ok').length},失败 {results.filter(r => r.status==='error').length}。

+ )} +
+
+ +
+
+
+
+ ); +} diff --git a/web/modules/notebook/components/common/PresenceAvatars.tsx b/web/modules/notebook/components/common/PresenceAvatars.tsx new file mode 100644 index 00000000..d179549a --- /dev/null +++ b/web/modules/notebook/components/common/PresenceAvatars.tsx @@ -0,0 +1,146 @@ +"use client"; + +import { memo, useMemo } from "react"; +import { + useCellPresence, + useNotebookPresence, + type AwarenessPresence, +} from "@/modules/notebook/awareness"; +import { Avatar, AvatarFallback } from "@/components/ui/avatar"; +import { cn } from "@/lib/utils"; +import { + Tooltip, + TooltipContent, + TooltipProvider, + TooltipTrigger, +} from "@/components/ui/tooltip"; + +interface BasePresenceAvatarsProps { + maxVisible?: number; + className?: string; + includeSelf?: boolean; +} + +interface PresenceAvatarsProps extends BasePresenceAvatarsProps { + cellId: string; +} + +interface NotebookPresenceAvatarsProps extends BasePresenceAvatarsProps { + cellId?: string; + onlyEditing?: boolean; +} + +const PresenceAvatarList = memo(function PresenceAvatarList({ + presences, + maxVisible = 3, + className, + includeSelf = false, +}: { + presences: AwarenessPresence[]; +} & BasePresenceAvatarsProps) { + const { visible, overflow } = useMemo(() => { + const filtered = presences.filter((presence) => includeSelf || !presence.isSelf); + return { + visible: filtered.slice(0, maxVisible), + overflow: Math.max(0, filtered.length - maxVisible), + }; + }, [presences, maxVisible, includeSelf]); + + if (!visible.length) return null; + + return ( + +
+
+ {visible.map((presence) => { + const displayName = presence.user.name ?? presence.user.id ?? `Client ${presence.clientId}`; + const initials = (displayName || "CL").slice(0, 2); + const color = presence.user.color ?? ""; + const fallbackClass = color && !color.startsWith("#") ? `bg-${color}` : "bg-muted"; + const fallbackStyle = color && color.startsWith("#") ? { backgroundColor: color } : undefined; + + return ( + + + + + {initials} + + + + + {displayName} + + + ); + })} +
+ {overflow > 0 && ( + +{overflow} + )} +
+
+ ); +}); + +/** + * Compact presence indicator scoped to a specific cell. + */ +export const PresenceAvatars = memo(function PresenceAvatars({ + cellId, + maxVisible = 3, + className, + includeSelf, +}: PresenceAvatarsProps) { + const presences = useCellPresence(cellId); + + return ( + + ); +}); + +/** + * Presence indicator for everyone connected to the notebook. + */ +export const NotebookPresenceAvatars = memo(function NotebookPresenceAvatars({ + cellId, + onlyEditing = false, + maxVisible = 4, + className, + includeSelf, +}: NotebookPresenceAvatarsProps) { + const presences = useNotebookPresence(); + + const scoped = useMemo(() => { + let next = presences; + if (cellId) { + next = next.filter((presence) => presence.editing?.cellId === cellId); + } + if (onlyEditing) { + next = next.filter((presence) => presence.editing !== null); + } + return next; + }, [cellId, onlyEditing, presences]); + + return ( + + ); +}); + +export default PresenceAvatars; diff --git a/web/modules/notebook/components/debug/debug-bus.ts b/web/modules/notebook/components/debug/debug-bus.ts new file mode 100644 index 00000000..f52f8cbf --- /dev/null +++ b/web/modules/notebook/components/debug/debug-bus.ts @@ -0,0 +1,483 @@ +"use client"; + +import { useEffect, useMemo, useState } from "react"; +import * as decoding from "lib0/decoding"; +import * as Y from "yjs"; +import { snapshot as valtioSnapshot, subscribe as subscribeToProxy } from "valtio"; +import type { NotebookResource } from "@/modules/notebook/collab/yjs/utils/notebook-resource"; +import { + getCellMap, + getNotebookRoot, + ROOT_NOTEBOOK_KEY, + SCHEMA_META_KEY, + yCellToModel, + yNotebookToModel, + yOutputsToModel, +} from "@/modules/notebook/collab/yjs/schema"; +import type { CellModel, NotebookModel, YNotebook } from "@/modules/notebook/collab/yjs/schema/core/types"; +import { awarenessState, type AwarenessPresence } from "@/modules/notebook/awareness"; +import { + messageAwareness, + messageAuth, + messageQueryAwareness, + messageSync, + WebsocketProvider, +} from "y-websocket"; + +type NotebookOutputsModel = ReturnType; + +export interface NotebookDebugViewModel extends Omit { + cells: Record; + outputs: NotebookOutputsModel; +} + +export type TrafficDirection = "in" | "out"; + +export type TrafficMessageType = + | "sync" + | "awareness" + | "auth" + | "queryAwareness" + | "meta" + | "unknown"; + +export interface TrafficEvent { + id: string; + ts: number; + direction: TrafficDirection; + type: TrafficMessageType; + size: number; + preview: string; + detail: string; + parsedData?: any; + rawBytes: Uint8Array; +} + +export interface DebugBusSnapshot { + traffic: TrafficEvent[]; + awareness: AwarenessPresence[]; + notebook: NotebookDebugViewModel | null; + ydocJson: string; + ydocBytes: number; + schemaMeta: Record | null; +} + +const TRAFFIC_LIMIT = 200; +const PREVIEW_BYTE_LIMIT = 64; + +function formatPreview(bytes: Uint8Array): string { + const slice = bytes.slice(0, PREVIEW_BYTE_LIMIT); + const hex = Array.from(slice) + .map((b) => b.toString(16).padStart(2, "0")) + .join(" "); + if (bytes.length <= PREVIEW_BYTE_LIMIT) return hex; + const rest = bytes.length - PREVIEW_BYTE_LIMIT; + return `${hex} … (+${rest} bytes)`; +} + +function truncate(text: string, limit = 160): string { + if (text.length <= limit) return text; + return `${text.slice(0, limit)}… (+${text.length - limit} chars)`; +} + +function normalizePayload(data: unknown): Uint8Array | null { + if (data instanceof ArrayBuffer) return new Uint8Array(data); + if (ArrayBuffer.isView(data)) { + const view = data as ArrayBufferView; + return new Uint8Array(view.buffer, view.byteOffset, view.byteLength); + } + if (typeof data === "string") { + return new TextEncoder().encode(data); + } + return null; +} + +function summarizeContent(content: any): string { + const name = content?.constructor?.name ?? "Unknown"; + if (name === "ContentString") return `text:${truncate(JSON.stringify(content.str))}`; + if (name === "ContentBinary") return `binary:${content.content?.length ?? 0} bytes`; + if (name === "ContentJSON") return `json:${truncate(JSON.stringify(content.arr ?? content.content ?? []))}`; + if (name === "ContentAny") return `any:${truncate(JSON.stringify(content.arr ?? []))}`; + if (name === "ContentEmbed") return `embed:${truncate(JSON.stringify(content.embed ?? content.content ?? {}))}`; + if (name === "ContentType") { + const tName = content.type?.constructor?.name ?? "type"; + return `type:${tName}`; + } + return name; +} + +function summarizeStruct(struct: any): string { + const name = struct?.constructor?.name ?? "Struct"; + const id = struct?.id ? `${struct.id.client}:${struct.id.clock}` : "?"; + const len = typeof struct?.length === "number" ? struct.length : "?"; + if (name === "Item") { + const contentSummary = summarizeContent(struct.content); + return `Item ${id}+${len} ${contentSummary}`; + } + if (name === "GC") { + return `GC ${id}+${len}`; + } + return `${name} ${id}+${len}`; +} + +function summarizeUpdate(update: Uint8Array): string { + try { + const decoded = Y.decodeUpdateV2(update); + const lines = decoded.structs.slice(0, 8).map((s: any) => summarizeStruct(s)); + const more = decoded.structs.length > 8 ? `… (+${decoded.structs.length - 8} more structs)` : ""; + const dsSize = decoded.ds?.clients?.size ?? 0; + const dsLabel = dsSize ? ` | DeleteSet clients:${dsSize}` : ""; + return [...lines, more].filter(Boolean).join(" | ") + dsLabel; + } catch { + return `update ${update.length} bytes`; + } +} + +function formatAwarenessState(stateStr: string): string { + try { + const obj = JSON.parse(stateStr) as Record; + const user = obj.user ?? {}; + const userLabel = user.name ? `${user.name}#${user.id ?? "?"}` : "user:?"; + const color = user.color ? ` (${user.color})` : ""; + const editing = obj.editing?.cellId ? ` editing:${obj.editing.cellId}` : ""; + const cursor = obj.cursor?.cellId ? ` cursor:${obj.cursor.cellId}` : ""; + return `${userLabel}${color}${editing}${cursor}`; + } catch { + return truncate(stateStr, 200); + } +} + +function buildDocDebugJson(root: YNotebook): { + docJson: Record; + schemaMetaJson: Record | null; +} { + const rootJson = root.toJSON() as Record; + const { [SCHEMA_META_KEY]: schemaMetaRaw, schemaMeta: schemaMetaAlias, ...rest } = rootJson as { + [key: string]: unknown; + }; + + const schemaMetaJson = + (schemaMetaRaw as Record | undefined) ?? + (schemaMetaAlias as Record | undefined) ?? + null; + + const docJson: Record = { + [ROOT_NOTEBOOK_KEY]: rest, + [SCHEMA_META_KEY]: schemaMetaJson, + }; + + return { docJson, schemaMetaJson }; +} + +function interpretMessage(bytes: Uint8Array): { type: TrafficMessageType; detail: string; parsedData?: any } { + try { + const decoder = decoding.createDecoder(bytes); + const messageType = decoding.readVarUint(decoder); + + if (messageType === 101) { + const clientId = decoding.readVarUint(decoder); + const clock = decoding.readVarUint(decoder); + const parsedData = { clientId, clock }; + return { + type: "meta", + detail: `UpdateMeta client:${clientId} clock:${clock}`, + parsedData, + }; + } + + if (messageType === 100) { + return { type: "meta", detail: "Request client upload snapshot" }; + } + + if (messageType === messageSync) { + const syncType = decoding.readVarUint(decoder); + const syncLabel = + syncType === 0 ? "SyncStep1" : syncType === 1 ? "SyncStep2" : syncType === 2 ? "Update" : `Sync:${syncType}`; + + if (syncType === 1 || syncType === 2) { + const update = decoding.readVarUint8Array(decoder); + const summary = summarizeUpdate(update); + + // 完整解析 update + try { + const decoded = Y.decodeUpdate(update); + const ds = Array.from(decoded.ds?.clients?.entries() || []).map(([client, ranges]) => ({ + client, + ranges: Array.from(ranges as any), + })); + const structs = decoded.structs.map((s: any) => ({ + type: s.constructor.name, + id: s.id ? { client: s.id.client, clock: s.id.clock } : null, + length: s.length, + content: s.content ? { + type: s.content.constructor.name, + ...(s.content.str !== undefined ? { str: s.content.str } : {}), + ...(s.content.arr !== undefined ? { arr: s.content.arr } : {}), + ...(s.content.embed !== undefined ? { embed: s.content.embed } : {}), + } : null, + })); + + const parsedData = { + payloadLen: update.length, + ds, + structs, + }; + + return { type: "sync", detail: `${syncLabel} ${summary}`, parsedData }; + } catch { + return { type: "sync", detail: `${syncLabel} ${summary}` }; + } + } + + const remaining = bytes.length - decoder.pos; + return { type: "sync", detail: `${syncLabel} (payload ${remaining} bytes)` }; + } + + if (messageType === messageAwareness) { + const raw = decoding.readVarUint8Array(decoder); + const inner = decoding.createDecoder(raw); + const len = decoding.readVarUint(inner); + const entries: string[] = []; + const parsedData: any[] = []; + + for (let i = 0; i < len; i++) { + const clientId = decoding.readVarUint(inner); + const clock = decoding.readVarUint(inner); + const stateStr = decoding.readVarString(inner); + entries.push(`#${clientId}@${clock}: ${formatAwarenessState(stateStr)}`); + + try { + parsedData.push({ + clientId, + clock, + state: JSON.parse(stateStr), + }); + } catch { + parsedData.push({ + clientId, + clock, + state: stateStr, + }); + } + } + + return { type: "awareness", detail: entries.join(" | ") || "(empty)", parsedData }; + } + + if (messageType === messageQueryAwareness) { + return { type: "queryAwareness", detail: "query-awareness" }; + } + + if (messageType === messageAuth) { + const remaining = bytes.length - decoder.pos; + return { type: "auth", detail: `auth message (${remaining} bytes)` }; + } + + return { type: "unknown", detail: "unknown message" }; + } catch { + // console.warn("[DebugBus] Failed to interpret message", err); + return { type: "unknown", detail: "decode error" }; + } +} + +export class DebugBus { + private listeners = new Set<(snapshot: DebugBusSnapshot) => void>(); + private traffic: TrafficEvent[] = []; + private awareness: AwarenessPresence[] = []; + private notebook: NotebookDebugViewModel | null = null; + private ydocJson = ""; + private ydocBytes = 0; + private schemaMeta: Record | null = null; + private disposeFns: Array<() => void> = []; + private currentWs: WebSocket | null = null; + private teardownSocket: (() => void) | null = null; + private seq = 0; + + constructor(private resource: NotebookResource) { + this.attachDoc(resource.doc); + this.attachAwareness(); + this.attachProvider(resource.provider); + this.emit(); + } + + subscribe(listener: (snapshot: DebugBusSnapshot) => void): () => void { + this.listeners.add(listener); + listener(this.getSnapshot()); + return () => { + this.listeners.delete(listener); + }; + } + + destroy() { + this.detachSocketTap(); + this.disposeFns.forEach((fn) => { + try { + fn(); + } catch { + // console.error("[DebugBus] dispose error", err); + } + }); + this.disposeFns = []; + this.listeners.clear(); + } + + getSnapshot(): DebugBusSnapshot { + return { + traffic: this.traffic, + awareness: this.awareness, + notebook: this.notebook, + ydocJson: this.ydocJson, + ydocBytes: this.ydocBytes, + schemaMeta: this.schemaMeta, + }; + } + + private emit() { + const snapshot = this.getSnapshot(); + this.listeners.forEach((listener) => listener(snapshot)); + } + + private attachDoc(doc: NotebookResource["doc"]) { + const compute = () => { + try { + const root = getNotebookRoot(doc); + const base = yNotebookToModel(root); + const cellMap = getCellMap(root); + const cells: Record = {}; + cellMap.forEach((value, key) => { + cells[key] = yCellToModel(value); + }); + const outputs = yOutputsToModel(root); + + this.notebook = { + ...base, + cells, + outputs, + }; + + const { docJson, schemaMetaJson } = buildDocDebugJson(root as YNotebook); + this.ydocJson = JSON.stringify(docJson, null, 2); + this.ydocBytes = new Blob([this.ydocJson]).size; + this.schemaMeta = schemaMetaJson; + } catch { + // console.error("[DebugBus] Failed to compute Y.Doc notebook model", err); + } + this.emit(); + }; + + compute(); + doc.on("update", compute); + this.disposeFns.push(() => doc.off("update", compute)); + } + + private attachAwareness() { + const update = () => { + const snap = valtioSnapshot(awarenessState); + this.awareness = Array.from(snap.presences.values()); + this.emit(); + }; + + update(); + const unsub = subscribeToProxy(awarenessState, update); + this.disposeFns.push(unsub); + } + + private attachProvider(provider: WebsocketProvider) { + const handleStatus = () => { + this.refreshSocketTap(); + }; + + provider.on("status", handleStatus); + provider.on("connection-close", handleStatus); + this.disposeFns.push(() => { + provider.off("status", handleStatus); + provider.off("connection-close", handleStatus); + }); + + this.refreshSocketTap(); + } + + private refreshSocketTap() { + if (!this.resource.provider.ws) { + this.detachSocketTap(); + return; + } + + if (this.resource.provider.ws === this.currentWs) { + return; + } + + this.detachSocketTap(); + this.currentWs = this.resource.provider.ws; + + if (!this.currentWs) return; + + const ws = this.currentWs; + const originalSend = ws.send.bind(ws); + + const sendProxy: WebSocket["send"] = (data) => { + this.recordTraffic("out", data); + return originalSend(data); + }; + + const handleMessage = (event: MessageEvent) => { + this.recordTraffic("in", event.data); + }; + + ws.send = sendProxy; + ws.addEventListener("message", handleMessage); + + this.teardownSocket = () => { + ws.removeEventListener("message", handleMessage); + ws.send = originalSend; + this.currentWs = null; + }; + } + + private detachSocketTap() { + if (this.teardownSocket) { + this.teardownSocket(); + } + this.teardownSocket = null; + } + + private recordTraffic(direction: TrafficDirection, data: unknown) { + const bytes = normalizePayload(data); + if (!bytes) return; + + const { type, detail, parsedData } = interpretMessage(bytes); + const event: TrafficEvent = { + id: `${Date.now()}-${++this.seq}`, + ts: Date.now(), + direction, + type, + size: bytes.byteLength, + preview: formatPreview(bytes), + detail, + rawBytes: bytes, + ...(parsedData ? { parsedData } : {}), + }; + + const next = [...this.traffic, event]; + if (next.length > TRAFFIC_LIMIT) { + next.splice(0, next.length - TRAFFIC_LIMIT); + } + this.traffic = next; + this.emit(); + } +} + +export function useDebugBus(resource: NotebookResource): DebugBusSnapshot { + const bus = useMemo(() => new DebugBus(resource), [resource]); + const [snapshot, setSnapshot] = useState(bus.getSnapshot()); + + useEffect(() => { + const unsub = bus.subscribe(setSnapshot); + return () => { + unsub(); + bus.destroy(); + }; + }, [bus]); + + return snapshot; +} diff --git a/web/modules/notebook/components/debug/index.tsx b/web/modules/notebook/components/debug/index.tsx new file mode 100644 index 00000000..7eb0ae4e --- /dev/null +++ b/web/modules/notebook/components/debug/index.tsx @@ -0,0 +1,475 @@ +"use client"; + +import { useMemo, useState } from "react"; +import { m } from "motion/react"; +import { + Activity, + ArrowDownLeft, + ArrowUpRight, + Braces, + CheckCircle2, + ChevronDown, + ChevronUp, + Copy, + Database, + FileCode, + Users, +} from "lucide-react"; + +import { Badge } from "@/components/ui/badge"; +import { Popover, PopoverTrigger, PopoverContent } from "@/components/ui/popover"; +import { ScrollArea } from "@/components/ui/scroll-area"; +import { cn } from "@/lib/utils"; +import { useNotebookRuntime } from "@/modules/notebook/providers/notebook-runtime-context"; +import { + useDebugBus, + type DebugBusSnapshot, + type TrafficEvent, + type TrafficMessageType, +} from "./debug-bus"; +import { coordinatorState } from "@/modules/notebook/coordination"; +import { useSnapshot } from "valtio"; + +type DebugTab = "traffic" | "awareness" | "state"; + +const tabs: Array<{ id: DebugTab; label: string }> = [ + { id: "traffic", label: "Traffic" }, + { id: "awareness", label: "Awareness" }, + { id: "state", label: "Y.Doc" }, +]; + +const trafficLabel: Record = { + sync: "Sync", + awareness: "Awareness", + auth: "Auth", + queryAwareness: "Awareness?", + meta: "Meta", + unknown: "Unknown", +}; + +const trafficTone: Record = { + sync: "bg-emerald-100 text-emerald-900 border-emerald-200 dark:bg-emerald-900/30 dark:text-emerald-50", + awareness: "bg-blue-100 text-blue-900 border-blue-200 dark:bg-blue-900/30 dark:text-blue-50", + auth: "bg-amber-100 text-amber-900 border-amber-200 dark:bg-amber-900/30 dark:text-amber-50", + queryAwareness: "bg-cyan-100 text-cyan-900 border-cyan-200 dark:bg-cyan-900/30 dark:text-cyan-50", + meta: "bg-purple-100 text-purple-900 border-purple-200 dark:bg-purple-900/30 dark:text-purple-50", + unknown: "bg-slate-200 text-slate-900 border-slate-300 dark:bg-slate-800 dark:text-slate-100", +}; + +/** + * Helper component for vertical separators typical in Linear-style footers + */ +const Separator = () =>
; + +function formatBytes(size: number): string { + if (size < 1024) return `${size} B`; + if (size < 1024 * 1024) return `${(size / 1024).toFixed(1)} KB`; + return `${(size / (1024 * 1024)).toFixed(1)} MB`; +} + +function formatTime(ts: number): string { + const date = new Date(ts); + const base = date + .toLocaleTimeString("en-US", { hour12: false, hour: "2-digit", minute: "2-digit", second: "2-digit" }) + .replace(/^24:/, "00:"); + return `${base}.${String(date.getMilliseconds()).padStart(3, "0")}`; +} + +function copyToClipboard(text: string) { + navigator.clipboard.writeText(text).catch((err) => { + console.error("Failed to copy:", err); + }); +} + +function bytesToHex(bytes: Uint8Array): string { + return Array.from(bytes) + .map((b) => b.toString(16).padStart(2, "0")) + .join(""); +} + +function TrafficList({ events }: { events: TrafficEvent[] }) { + const [expandedIds, setExpandedIds] = useState>(new Set()); + const [filterTypes, setFilterTypes] = useState>(new Set()); + + const toggleExpanded = (id: string) => { + setExpandedIds((prev) => { + const next = new Set(prev); + if (next.has(id)) { + next.delete(id); + } else { + next.add(id); + } + return next; + }); + }; + + const toggleFilter = (type: TrafficMessageType) => { + setFilterTypes((prev) => { + const next = new Set(prev); + if (next.has(type)) { + next.delete(type); + } else { + next.add(type); + } + return next; + }); + }; + + const filteredEvents = useMemo(() => { + if (filterTypes.size === 0) return events; + return events.filter((event) => filterTypes.has(event.type)); + }, [events, filterTypes]); + + const rows = useMemo(() => [...filteredEvents].reverse(), [filteredEvents]); + + const copyAllTraffics = () => { + const allData = filteredEvents.map((event) => ({ + ts: formatTime(event.ts), + direction: event.direction, + type: event.type, + size: event.size, + detail: event.detail, + hex: bytesToHex(event.rawBytes), + })); + copyToClipboard(JSON.stringify(allData, null, 2)); + }; + + const allTypes: TrafficMessageType[] = ["sync", "awareness", "auth", "queryAwareness", "meta", "unknown"]; + + return ( +
+
+ Y.js WebSocket Traffic +
+ {filteredEvents.length} / {events.length} events + +
+
+ +
+ {allTypes.map((type) => { + const isActive = filterTypes.has(type); + return ( + + ); + })} +
+ + +
+ {rows.length === 0 && ( +
No traffic yet
+ )} + {rows.map((event) => { + const isExpanded = expandedIds.has(event.id); + return ( +
+
+ {event.direction === "in" ? ( + + ) : ( + + )} + {formatTime(event.ts)} +
+ +
+ + {trafficLabel[event.type]} + + {formatBytes(event.size)} +
+ +
+
+
{event.detail}
+ +
+ {event.parsedData ? ( +
+ + {isExpanded && ( +
+ {JSON.stringify(event.parsedData, null, 2)} +
+ )} +
+ ) : ( +
+ {event.preview} +
+ )} +
+
+ ); + })} +
+
+
+ ); +} + +function AwarenessList({ snapshot }: { snapshot: DebugBusSnapshot }) { + const presences = snapshot.awareness; + const coordinator = useSnapshot(coordinatorState); + + return ( +
+
+ Awareness presences +
+ {presences.length} online + {coordinator.coordinatorId && ( + + Coordinator: #{coordinator.coordinatorId} + + )} +
+
+ +
+ {presences.length === 0 && ( +
No connected peers
+ )} + + {presences.map((presence) => { + const isCoordinator = presence.clientId === coordinator.coordinatorId; + + return ( +
+
+
+
+ + {presence.user.name || "Unknown"} + + {presence.isSelf && ( + + You + + )} + {isCoordinator && ( + + Coordinator + + )} + #{presence.clientId} +
+ +
+
+ Editing: + {presence.editing?.cellId ?? "—"} + {presence.editing?.origin && ( + ({presence.editing.origin}) + )} +
+
+ Cursor: + {presence.cursor?.cellId ?? "—"} +
+
+
+
+ ); + })} +
+
+ ); +} + +function StateView({ snapshot }: { snapshot: DebugBusSnapshot }) { + return ( +
+
+ Current Y.Doc snapshot + {formatBytes(snapshot.ydocBytes)} +
+
+ +
+            {snapshot.ydocJson}
+          
+
+
+ Synced +
+
+
+ ); +} + +/** + * Linear-style Debug Panel upgraded to "god mode" with traffic + awareness + state. + * Acts as an external tap: listens to ws + awareness without mutating business logic. + */ +export function DebugPanel() { + const { resource } = useNotebookRuntime(); + const snapshot = useDebugBus(resource); + const [open, setOpen] = useState(false); + const [activeTab, setActiveTab] = useState("traffic"); + + const cellCount = snapshot.notebook?.order.length ?? 0; + const notebookId = snapshot.notebook?.id ?? ""; + const schemaVersion = + snapshot.schemaMeta && typeof snapshot.schemaMeta === "object" + ? (snapshot.schemaMeta as { [key: string]: unknown }).version ?? undefined + : undefined; + + return ( + + {/* LEFT: Context Info */} +
+
+ + Debug +
+ + + +
+ + {snapshot.notebook?.title || "Untitled"} +
+ + + +
+ {schemaVersion != null && ( +
+ + Schema v{schemaVersion as number} + +
+ )} +
+ + {cellCount} cells +
+ {notebookId && ( +
+ ID: {notebookId} +
+ )} +
+ + {snapshot.awareness.length} peers +
+
+
+ + {/* RIGHT: Actions */} +
+ + + + + + event.preventDefault()} + onEscapeKeyDown={(event) => event.preventDefault()} + > + +
+
+
+ Debug Inspector + WS tap · Awareness · State +
+
+ {tabs.map((tab) => ( + + ))} +
+
+ +
+ {activeTab === "traffic" && } + {activeTab === "awareness" && } + {activeTab === "state" && } +
+ + + +
+
+ ); +} diff --git a/web/modules/notebook/components/outputs/ErrorOutput.tsx b/web/modules/notebook/components/outputs/ErrorOutput.tsx new file mode 100644 index 00000000..a01504ac --- /dev/null +++ b/web/modules/notebook/components/outputs/ErrorOutput.tsx @@ -0,0 +1,37 @@ +import { memo } from "react"; +import { AlertCircle } from "lucide-react"; +import { cn } from "@/lib/utils"; + +interface ErrorOutputProps { + error: string; + className?: string; + details?: string; +} + +const ErrorOutput = memo(function ErrorOutput({ error, className, details }: ErrorOutputProps) { + const message = error?.trim().length ? error : "An unknown error occurred"; + return ( +
+
+ +
+
Execution Error
+
{message}
+ {details && ( +
+ {details} +
+ )} +
+
+
+ ); +}); + +export { ErrorOutput }; diff --git a/web/modules/notebook/components/outputs/OutputRenderer.tsx b/web/modules/notebook/components/outputs/OutputRenderer.tsx new file mode 100644 index 00000000..5cbedb5a --- /dev/null +++ b/web/modules/notebook/components/outputs/OutputRenderer.tsx @@ -0,0 +1,18 @@ +import { memo } from "react"; +import type { QueryResponse } from "@/api-gen"; +import { SuccessOutput } from "./SuccessOutput"; +import { ErrorOutput } from "./ErrorOutput"; + +interface OutputRendererProps { + result: QueryResponse; + className?: string; +} + +const OutputRenderer = memo(function OutputRenderer({ result, className }: OutputRendererProps) { + if (typeof result.error === "string" && result.error.length > 0) { + return ; + } + return ; +}); + +export { OutputRenderer }; diff --git a/web/modules/notebook/components/outputs/SuccessOutput.tsx b/web/modules/notebook/components/outputs/SuccessOutput.tsx new file mode 100644 index 00000000..6738a77f --- /dev/null +++ b/web/modules/notebook/components/outputs/SuccessOutput.tsx @@ -0,0 +1,197 @@ +import React, { memo, useMemo, useState } from "react"; +import { useVirtualizer } from "@tanstack/react-virtual"; +import type { QueryResponse } from "@/api-gen"; +import { cn } from "@/lib/utils"; + +interface TooltipState { + visible: boolean; + content: string; + x: number; + y: number; +} + +interface SuccessOutputProps { + result: QueryResponse; + className?: string; +} + +const SuccessOutput = memo(function SuccessOutput({ + result, + className, +}: SuccessOutputProps) { + const rows = Array.isArray(result.rows) ? result.rows : []; + const columns = Array.isArray(result.columns) ? result.columns : []; + const columnNames = useMemo( + () => columns.map((column) => column.name ?? ""), + [columns] + ); + + const [tooltip, setTooltip] = useState({ + visible: false, + content: "", + x: 0, + y: 0, + }); + + const parentRef = React.useRef(null); + + const rowVirtualizer = useVirtualizer({ + count: rows.length, + getScrollElement: () => parentRef.current, + estimateSize: () => 40, + overscan: 5, + }); + + const handleMouseEnter = (event: React.MouseEvent, content: string) => { + const element = event.currentTarget as HTMLElement; + if (element.scrollWidth <= element.clientWidth) return; + const rect = element.getBoundingClientRect(); + setTooltip({ + visible: true, + content, + x: rect.left + rect.width / 2, + y: rect.top - 8, + }); + }; + + const handleMouseLeave = () => { + setTooltip((prev) => ({ ...prev, visible: false })); + }; + + const hasTabularData = columnNames.length > 0; + const rowCountLabel = rows.length.toLocaleString(); + + return ( +
+
+
+
+
Query executed successfully
+
+ {rowCountLabel} row{rows.length === 1 ? "" : "s"} +
+ {typeof result.rowsAffected === "number" && ( +
+ Rows affected: {result.rowsAffected.toLocaleString()} +
+ )} +
+
+ + {hasTabularData ? ( +
+
+ {rows.length === 0 ? ( +
+
+ {columnNames.map((columnName) => { + if (columnName === "?column?") return null; + return ( +
handleMouseEnter(event, columnName)} + onMouseLeave={handleMouseLeave} + > + {columnName} +
+ ) + })} +
+
No data returned
+
+ ) : ( +
+
+ {columnNames.map((columnName) => { + if (columnName === "?column?") return null; + return ( +
handleMouseEnter(event, columnName)} + onMouseLeave={handleMouseLeave} + > + {columnName} +
+ ) + })} +
+ + {rowVirtualizer.getVirtualItems().map((virtualItem) => { + const row = rows[virtualItem.index] ?? {}; + return ( +
+ {columnNames.map((columnName) => { + const rawValue = (row as Record)[columnName]; + const cellValue = + rawValue === null || rawValue === undefined + ? "NULL" + : String(rawValue); + return ( +
handleMouseEnter(event, cellValue)} + onMouseLeave={handleMouseLeave} + > + {cellValue} +
+ ); + })} +
+ ); + })} +
+ )} +
+
+ ) : ( +
+

No tabular data returned for this statement.

+ {typeof result.rowsAffected === "number" && ( +

+ Rows affected: {result.rowsAffected} +

+ )} +
+ )} +
+ + {tooltip.visible && ( +
+ {tooltip.content} +
+ )} +
+ ); +}); + +export { SuccessOutput }; diff --git a/web/modules/notebook/components/outputs/index.ts b/web/modules/notebook/components/outputs/index.ts new file mode 100644 index 00000000..70ee0814 --- /dev/null +++ b/web/modules/notebook/components/outputs/index.ts @@ -0,0 +1,3 @@ +export { OutputRenderer } from './OutputRenderer' +export { SuccessOutput } from './SuccessOutput' +export { ErrorOutput } from './ErrorOutput' \ No newline at end of file diff --git a/web/modules/notebook/components/tabs/ExportTab.tsx b/web/modules/notebook/components/tabs/ExportTab.tsx new file mode 100644 index 00000000..f90408e9 --- /dev/null +++ b/web/modules/notebook/components/tabs/ExportTab.tsx @@ -0,0 +1,13 @@ +import { Download } from "lucide-react"; +import { memo } from "react"; + +export const ExportTab = memo(function ExportTab() { + return ( +
+ +
+ Notebook export and import tooling is under construction. +
+
+ ); +}); diff --git a/web/modules/notebook/components/tabs/HistoryTab.tsx b/web/modules/notebook/components/tabs/HistoryTab.tsx new file mode 100644 index 00000000..6e4a8d73 --- /dev/null +++ b/web/modules/notebook/components/tabs/HistoryTab.tsx @@ -0,0 +1,275 @@ +import { memo, useCallback, useEffect, useMemo, useState, type KeyboardEvent, type MouseEvent } from "react"; +import { History, Plus, Minus, Edit, Move, Play, Database, Settings, RotateCcw, type LucideIcon } from "lucide-react"; +import { cn } from "@/lib/utils"; +import { NOTEBOOK_HISTORY_ENTRY_ORIGIN } from "@/atoms/notebook/constants"; +import { generateChangeSummary, getRelativeTime, type HistoryChange } from "./utils/history-analyzer"; +import { useActiveNotebookId } from "@/modules/notebook/hooks/useNotebookSelection"; +import { useNotebookHistoryState, useRestoreNotebookHistoryEntry } from "@/modules/notebook/hooks/use-notebook-history"; +import type { NotebookHistoryEntry } from "@/modules/notebook/collab/yjs/undo/history-types"; + +export const HistoryTab = memo(function HistoryTab() { + const notebookId = useActiveNotebookId(); + const historyState = useNotebookHistoryState(notebookId); + const { entries, cursor } = historyState; + + const timeline = useMemo(() => { + if (entries.length === 0) { + return [] as Array<{ + entry: NotebookHistoryEntry; + isCurrent: boolean; + isFuture: boolean; + }>; + } + + return entries + .map((entry, index) => ({ + entry, + isCurrent: index === cursor, + isFuture: cursor !== -1 ? index > cursor : false, + })) + .reverse(); + }, [entries, cursor]); + + if (!notebookId) { + return ( + + ); + } + + if (timeline.length === 0) { + return ( + + ); + } + + const hasFutureEntries = cursor >= 0 && cursor < entries.length - 1; + + return ( +
+
+
+

Timeline

+

+ In-memory session history. Newest actions appear first. +

+
+ + {entries.length} entries + +
+ +
+
    + {timeline.map(({ entry, isCurrent, isFuture }) => ( + + ))} +
+
+
+ ); +}); + +const changeTypeConfig = { + cell_added: { label: "Cell added", icon: Plus, tone: "success" }, + cell_deleted: { label: "Cell deleted", icon: Minus, tone: "destructive" }, + cell_modified: { label: "Cell modified", icon: Edit, tone: "muted" }, + cell_moved: { label: "Cell moved", icon: Move, tone: "muted" }, + cell_executed: { label: "Cell executed", icon: Play, tone: "warning" }, + notebook_modified: { label: "Notebook updated", icon: Database, tone: "info" }, + metadata_changed: { label: "Metadata changed", icon: Settings, tone: "info" }, +} as const; + +const toneClassName: Record = { + success: "bg-emerald-50 text-emerald-600 dark:bg-emerald-950 dark:text-emerald-400", + destructive: "bg-rose-50 text-rose-600 dark:bg-rose-950 dark:text-rose-400", + warning: "bg-amber-50 text-amber-600 dark:bg-amber-950 dark:text-amber-400", + info: "bg-sky-50 text-sky-600 dark:bg-sky-950 dark:text-sky-400", + muted: "bg-muted text-muted-foreground", +}; + +function HistoryTimelineItem({ + notebookId, + entry, + isCurrent, + isFuture, + canRestore, +}: { + notebookId: string; + entry: NotebookHistoryEntry; + isCurrent?: boolean; + isFuture?: boolean; + canRestore: boolean; +}) { + const [expanded, setExpanded] = useState(isCurrent ?? false); + const restoreHistoryEntry = useRestoreNotebookHistoryEntry(); + + useEffect(() => { + setExpanded(isCurrent ?? false); + }, [isCurrent]); + + const summary = useMemo(() => generateChangeSummary(entry.changes), [entry.changes]); + const relativeTime = useMemo(() => getRelativeTime(entry.timestamp), [entry.timestamp]); + + const handleToggle = useCallback(() => { + setExpanded((prev) => !prev); + }, []); + + const handleKeyDown = useCallback( + (event: KeyboardEvent) => { + if (event.key === "Enter" || event.key === " ") { + event.preventDefault(); + handleToggle(); + } + }, + [handleToggle] + ); + + const handleRestore = useCallback( + (event: MouseEvent) => { + event.stopPropagation(); + if (!canRestore) return; + restoreHistoryEntry({ notebookId, entryId: entry.id }); + }, + [canRestore, entry.id, notebookId, restoreHistoryEntry] + ); + + return ( +
  • + + +
    +
    +
    +
    + + {summary} +
    +

    + {new Date(entry.timestamp).toLocaleString()} ・ {relativeTime} +

    +
    +
    + + {entry.origin === NOTEBOOK_HISTORY_ENTRY_ORIGIN.INITIAL ? "Initial state" : "User"} + + +
    +
    + + {expanded && ( +
    + {entry.changes.map((change) => { + const config = changeTypeConfig[change.type as keyof typeof changeTypeConfig] ?? changeTypeConfig.metadata_changed; + const Icon = config.icon; + return ( +
    +
    + + + {config.label} + + {change.description} +
    + {change.details && Object.keys(change.details).length > 0 && ( +
    + +
    + )} +
    + ); + })} +
    + )} +
    +
  • + ); +} + +function DetailList({ details }: { details: HistoryChange["details"] }) { + if (!details) return null; + + return ( +
    + {Object.entries(details).map(([key, value]) => ( +
    +
    {key.replace(/([A-Z])/g, " $1").toLowerCase()}
    +
    {String(value)}
    +
    + ))} +
    + ); +} + +function EmptyState({ + icon: Icon, + title, + description, +}: { + icon: LucideIcon; + title: string; + description: string; +}) { + return ( +
    + + + +
    +

    {title}

    +

    {description}

    +
    +
    + ); +} diff --git a/web/modules/notebook/components/tabs/LatestStreamingGraphTab.tsx b/web/modules/notebook/components/tabs/LatestStreamingGraphTab.tsx new file mode 100644 index 00000000..df40f507 --- /dev/null +++ b/web/modules/notebook/components/tabs/LatestStreamingGraphTab.tsx @@ -0,0 +1,177 @@ +import React, { useEffect, useCallback, useMemo } from 'react' +import { RefreshCw, AlertCircle } from 'lucide-react' +import { StreamingGraph } from '@/components/common/streaming-graph' +import { cn } from '@/lib/utils' +import { convertSchemasToStreamingGraph } from '@/modules/notebook/utils/sql-convert' +import { useNotebookState } from '@/atoms/notebook/hooks' +import { useDatabaseInfoQuery, useDatabaseSchemaQuery } from '@/modules/database/queries/databases' + +export function LatestStreamingGraphTab() { + const { databaseId } = useNotebookState(); + const numericDatabaseId = useMemo(() => (databaseId ? Number(databaseId) : null), [databaseId]) + + const { + data: schemas = [], + isLoading, + isFetching, + error, + refetch, + dataUpdatedAt, + } = useDatabaseSchemaQuery(numericDatabaseId) + + const { data: databaseInfo } = useDatabaseInfoQuery(numericDatabaseId) + + const lastUpdated = dataUpdatedAt ? new Date(dataUpdatedAt) : null + + // Convert schema to streaming graph data + const graphData = useMemo(() => { + if (!schemas || schemas.length === 0) { + return [] + } + return convertSchemasToStreamingGraph(schemas) + }, [schemas]) + + // Manual refresh function + const handleRefresh = useCallback(() => { + if (!isFetching) { + refetch() + } + }, [isFetching, refetch]) + + // Auto-refresh effect + useEffect(() => { + if (!numericDatabaseId) { + return + } + const intervalId = setInterval(() => { + refetch() + }, 30000) + return () => clearInterval(intervalId) + }, [numericDatabaseId, refetch]) + + // Show no database connection state + if (!numericDatabaseId) { + return ( +
    +
    +

    No database connected

    +

    + Please select a database connection in the toolbar to view the latest streaming graph. +

    +
    +
    + ) + } + + // Show loading state for initial load + if (isLoading && graphData.length === 0) { + return ( +
    +
    + + Loading latest streaming graph... +
    +
    + ) + } + + // Show error state + if (error) { + return ( +
    +
    + + Failed to load streaming graph +
    +

    + {error instanceof Error ? error.message : String(error)} +

    + +
    + ) + } + + // Show empty state + if (graphData.length === 0) { + return ( +
    +
    +

    No streaming graph data

    +

    + No tables, materialized views, sources, or sinks found in the current database. + Execute some DDL operations to create streaming objects. +

    +
    + +
    + ) + } + + return ( +
    + {/* Header with refresh controls */} +
    +
    +

    Latest Streaming Graph

    + {lastUpdated && ( + + Updated {lastUpdated.toLocaleTimeString()} + + )} +
    + +
    + + Auto-refresh: 30s + + +
    +
    + + {/* Streaming graph content */} +
    + {isFetching && !isLoading && ( +
    + + Updating... +
    + )} + + +
    +
    + ) +} diff --git a/web/modules/notebook/components/tabs/ProgressTab.tsx b/web/modules/notebook/components/tabs/ProgressTab.tsx new file mode 100644 index 00000000..a0b7c4a8 --- /dev/null +++ b/web/modules/notebook/components/tabs/ProgressTab.tsx @@ -0,0 +1,42 @@ +import React, { useCallback } from "react"; +import toast from "react-hot-toast"; +import { AlertCircle } from "lucide-react"; +import { useNotebookState } from "@/atoms/notebook/hooks"; +import { ProgressView } from "@/components/common/progress-view"; +import { DefaultService } from "@/api-gen"; + +export function ProgressTab() { + const { databaseId } = useNotebookState(); + + const handleCancel = useCallback( + async (ddlId: number) => { + if (!databaseId) return; + try { + await DefaultService.cancelDdlProgress(databaseId, ddlId); + } catch (error) { + console.error("Error canceling DDL:", error); + toast.error("Failed to cancel operation"); + } + }, + [databaseId] + ); + + if (!databaseId) { + return ( +
    + +
    + Connect a database in the toolbar to monitor DDL progress. +
    +
    + ); + } + + return ( + + ); +} + diff --git a/web/modules/notebook/components/tabs/utils/history-analyzer.ts b/web/modules/notebook/components/tabs/utils/history-analyzer.ts new file mode 100644 index 00000000..fd14bf43 --- /dev/null +++ b/web/modules/notebook/components/tabs/utils/history-analyzer.ts @@ -0,0 +1,56 @@ +export interface HistoryChange { + type: + | "cell_added" + | "cell_deleted" + | "cell_modified" + | "cell_moved" + | "cell_executed" + | "notebook_modified" + | "metadata_changed"; + timestamp: string; + description: string; + details?: { + cellId?: string; + cellType?: string; + cellIndex?: number; + oldIndex?: number; + newIndex?: number; + content?: string; + executionResult?: string; + changes?: string[]; + }; +} + + +export function generateChangeSummary(changes: HistoryChange[]): string { + if (changes.length === 0) return "No changes"; + if (changes.length === 1) return changes[0].description; + + const typeCounts = changes.reduce>((acc, change) => { + acc[change.type] = (acc[change.type] ?? 0) + 1; + return acc; + }, {}); + + const summaryParts = Object.entries(typeCounts).map(([type, count]) => + `${count} ${type.replace(/_/g, " ")}` + ); + + return summaryParts.join(", "); +} + +export function getRelativeTime(timestamp: string): string { + const now = new Date(); + const past = new Date(timestamp); + const diffMs = now.getTime() - past.getTime(); + const diffSecs = Math.floor(diffMs / 1000); + const diffMins = Math.floor(diffSecs / 60); + const diffHours = Math.floor(diffMins / 60); + const diffDays = Math.floor(diffHours / 24); + + if (diffSecs < 60) return "just now"; + if (diffMins < 60) return `${diffMins}m ago`; + if (diffHours < 24) return `${diffHours}h ago`; + if (diffDays < 7) return `${diffDays}d ago`; + + return past.toLocaleDateString(); +} diff --git a/web/modules/notebook/coordination/coordinator-election.ts b/web/modules/notebook/coordination/coordinator-election.ts new file mode 100644 index 00000000..f8bc58e9 --- /dev/null +++ b/web/modules/notebook/coordination/coordinator-election.ts @@ -0,0 +1,27 @@ +export interface CoordinatorElectionResult { + coordinatorId: number | null; + isSelf: boolean; + activeClients: number[]; +} + +/** + * Elect Coordinator from Awareness state using deterministic algorithm. + * Rule: Client with minimum clientId becomes Coordinator. + * + * @param clientIds - Array of active client IDs (pass snapshot, not proxy) + * @param selfId - Current client's ID + */ +export function electCoordinator( + clientIds: number[], + selfId: number | null +): CoordinatorElectionResult { + const activeClients = [...clientIds].sort((a, b) => a - b); + const coordinatorId = activeClients.length > 0 ? activeClients[0] : null; + const isSelf = coordinatorId !== null && coordinatorId === selfId; + + return { + coordinatorId, + isSelf, + activeClients, + }; +} diff --git a/web/modules/notebook/coordination/coordinator-state.ts b/web/modules/notebook/coordination/coordinator-state.ts new file mode 100644 index 00000000..730ed3ff --- /dev/null +++ b/web/modules/notebook/coordination/coordinator-state.ts @@ -0,0 +1,13 @@ +import { proxy } from "valtio"; + +export interface CoordinatorState { + coordinatorId: number | null; + isCoordinator: boolean; + lastElectionAt: number; +} + +export const coordinatorState = proxy({ + coordinatorId: null, + isCoordinator: false, + lastElectionAt: 0, +}); diff --git a/web/modules/notebook/coordination/index.ts b/web/modules/notebook/coordination/index.ts new file mode 100644 index 00000000..fbe97e77 --- /dev/null +++ b/web/modules/notebook/coordination/index.ts @@ -0,0 +1,4 @@ +export { electCoordinator } from "./coordinator-election"; +export type { CoordinatorElectionResult } from "./coordinator-election"; +export { coordinatorState } from "./coordinator-state"; +export type { CoordinatorState } from "./coordinator-state"; diff --git a/web/modules/notebook/hooks/use-cell-snapshot.ts b/web/modules/notebook/hooks/use-cell-snapshot.ts new file mode 100644 index 00000000..e24ce45e --- /dev/null +++ b/web/modules/notebook/hooks/use-cell-snapshot.ts @@ -0,0 +1,158 @@ +import { useMemo, useSyncExternalStore } from "react"; +import { useSnapshot } from "valtio"; + +import { useNotebookRuntime } from "../providers/notebook-runtime-context"; +import type { CellMetadataModel } from "../collab/yjs/schema/core/types"; +import type { CellOutput, SQLNotebook } from "../collab/yjs/schema/types"; +import { NB_CELL_MAP, NB_OUTPUTS } from "../collab/yjs/schema/core/keys"; +import * as Y from "yjs"; +import { getNotebookRoot } from "../collab/yjs/schema/access/root"; + +/** + * Cell state derived from Valtio snapshot. + * Replaces the old useCellState hook that used manual Y.js subscriptions. + */ +export interface CellSnapshotState { + /** Cell source code */ + source: string; + /** Cell kind: 'sql' | 'markdown' */ + kind: "sql" | "markdown"; + /** Cell metadata (e.g., backgroundDDL) */ + metadata: CellMetadataModel; + /** Cell execution output */ + output: CellOutput | null; +} + +const DEFAULT_METADATA: CellMetadataModel = { backgroundDDL: false }; + +function useCellText(cellId: string): string { + const { resource } = useNotebookRuntime(); + + const yText = useMemo(() => { + const root = getNotebookRoot(resource.doc); + const cellMap = root.get(NB_CELL_MAP) as Y.Map | undefined; + const cell = cellMap?.get(cellId) as Y.Map | undefined; + const text = cell?.get("source"); + return text instanceof Y.Text ? text : null; + }, [resource.doc, cellId]); + + const subscribe = useMemo(() => { + return (cb: () => void) => { + if (!yText) return () => {}; + const handler = () => cb(); + yText.observe(handler); + return () => yText.unobserve(handler); + }; + }, [yText]); + + return useSyncExternalStore( + subscribe, + () => (yText ? yText.toString() : ""), + () => "", + ); +} + +/** + * Hook to get reactive cell state from Valtio snapshot. + * + * This hook replaces the old useCellState which used manual Y.js subscriptions. + * It reads from the Valtio store that is bound to Y.js via valtio-yjs. + * + * @param cellId - The cell ID to get state for + * @returns Cell state including source, kind, metadata, and output + */ +export function useCellSnapshot(cellId: string): CellSnapshotState { + const { store } = useNotebookRuntime(); + const snapshot = useSnapshot(store.state); + const source = useCellText(cellId); + + return useMemo(() => { + const cellMap = snapshot[NB_CELL_MAP] as SQLNotebook[typeof NB_CELL_MAP]; + const outputs = snapshot[NB_OUTPUTS] as SQLNotebook[typeof NB_OUTPUTS]; + + const cell = cellMap?.[cellId]; + const rawOutput = outputs?.[cellId]; + + // Normalize output with defaults + const output: CellOutput | null = rawOutput + ? { + running: rawOutput.running ?? false, + stale: rawOutput.stale ?? false, + startedAt: rawOutput.startedAt, + completedAt: rawOutput.completedAt, + runId: rawOutput.runId, + executedBy: rawOutput.executedBy, + result: rawOutput.result, + } + : null; + + return { + source, + kind: cell?.kind ?? "sql", + metadata: cell?.meta as CellMetadataModel ?? DEFAULT_METADATA, + output, + }; + }, [snapshot, cellId, source]); +} + +/** + * Hook to get just the cell's source code. + * Useful when only the source is needed to minimize re-renders. + */ +export function useCellSource(cellId: string): string { + return useCellText(cellId); +} + +/** + * Hook to get just the cell's kind. + * Useful when only the kind is needed to minimize re-renders. + */ +export function useCellKind(cellId: string): "sql" | "markdown" { + const { store } = useNotebookRuntime(); + const snapshot = useSnapshot(store.state); + + return useMemo(() => { + const cellMap = snapshot[NB_CELL_MAP] as SQLNotebook[typeof NB_CELL_MAP]; + return cellMap?.[cellId]?.kind ?? "sql"; + }, [snapshot, cellId]); +} + +/** + * Hook to get just the cell's output. + * Useful when only the output is needed to minimize re-renders. + */ +export function useCellOutput(cellId: string): CellOutput | null { + const { store } = useNotebookRuntime(); + const snapshot = useSnapshot(store.state); + + return useMemo(() => { + const outputs = snapshot[NB_OUTPUTS] as SQLNotebook[typeof NB_OUTPUTS]; + const rawOutput = outputs?.[cellId]; + if (!rawOutput) return null; + + return { + running: rawOutput.running ?? false, + stale: rawOutput.stale ?? false, + startedAt: rawOutput.startedAt, + completedAt: rawOutput.completedAt, + runId: rawOutput.runId, + executedBy: rawOutput.executedBy, + result: rawOutput.result, + }; + }, [snapshot, cellId]); +} + +/** + * Hook to get just the cell's metadata. + * Useful when only the metadata is needed to minimize re-renders. + */ +export function useCellMetadata(cellId: string): CellMetadataModel { + const { store } = useNotebookRuntime(); + const snapshot = useSnapshot(store.state); + + return useMemo(() => { + const cellMap = snapshot[NB_CELL_MAP] as SQLNotebook[typeof NB_CELL_MAP]; + const meta = cellMap?.[cellId]?.meta; + return (meta as CellMetadataModel) ?? DEFAULT_METADATA; + }, [snapshot, cellId]); +} diff --git a/web/modules/notebook/hooks/use-notebook-history.ts b/web/modules/notebook/hooks/use-notebook-history.ts new file mode 100644 index 00000000..fa8d5310 --- /dev/null +++ b/web/modules/notebook/hooks/use-notebook-history.ts @@ -0,0 +1,44 @@ +import { useEffect, useState, useCallback } from "react"; +import { + getNotebookUndoHistory, + subscribeNotebookUndoHistory, +} from "@/modules/notebook/collab/yjs/undo/history-registry"; +import { + createEmptyHistoryState, + type NotebookHistoryState, +} from "@/modules/notebook/collab/yjs/undo/history-types"; + +export function useNotebookHistoryState(notebookId: string | null | undefined): NotebookHistoryState { + const [state, setState] = useState(createEmptyHistoryState()); + + useEffect(() => { + if (!notebookId) { + setState(createEmptyHistoryState()); + return; + } + + const unsubscribe = subscribeNotebookUndoHistory(notebookId, (history) => { + setState(history?.getSnapshot() ?? createEmptyHistoryState()); + }); + + return () => { + unsubscribe(); + }; + }, [notebookId]); + + return state; +} + +export const useNotebookHistoryEntries = (notebookId: string | null | undefined) => + useNotebookHistoryState(notebookId).entries; + +export const useNotebookHistoryCursor = (notebookId: string | null | undefined) => + useNotebookHistoryState(notebookId).cursor; + +export function useRestoreNotebookHistoryEntry() { + return useCallback(({ notebookId, entryId }: { notebookId: string; entryId: string }) => { + if (!notebookId || !entryId) return; + const history = getNotebookUndoHistory(notebookId); + history?.restore(entryId); + }, []); +} diff --git a/web/modules/notebook/hooks/use-notebook-snapshot.ts b/web/modules/notebook/hooks/use-notebook-snapshot.ts new file mode 100644 index 00000000..155b8f30 --- /dev/null +++ b/web/modules/notebook/hooks/use-notebook-snapshot.ts @@ -0,0 +1,13 @@ +import { useSnapshot } from "valtio"; + +import { useNotebookRuntime } from "../providers/notebook-runtime-context"; + +export function useNotebookStoreState() { + const { store } = useNotebookRuntime(); + return store.state; +} + +export function useNotebookSnapshot() { + const state = useNotebookStoreState(); + return useSnapshot(state); +} \ No newline at end of file diff --git a/web/modules/notebook/hooks/use-notebook-undo.ts b/web/modules/notebook/hooks/use-notebook-undo.ts new file mode 100644 index 00000000..6340e2d6 --- /dev/null +++ b/web/modules/notebook/hooks/use-notebook-undo.ts @@ -0,0 +1,70 @@ +import { useEffect, useState, useCallback } from "react"; +import type { UndoManager } from "yjs"; +import { useNotebookRuntime } from "@/modules/notebook/providers/notebook-runtime-context"; + +interface UndoState { + canUndo: boolean; + canRedo: boolean; +} + +const UNDO_EVENTS: Array< + "stack-item-added" | "stack-item-updated" | "stack-item-popped" | "stack-cleared" +> = [ + "stack-item-added", + "stack-item-updated", + "stack-item-popped", + "stack-cleared", +] as const; + +export function useNotebookUndoManager(): UndoManager | null { + const { undoManager } = useNotebookRuntime(); + return undoManager; +} + +export function useUndoRedoState(): UndoState { + const undoManager = useNotebookUndoManager(); + const [state, setState] = useState({ canUndo: false, canRedo: false }); + + useEffect(() => { + if (!undoManager) { + setState({ canUndo: false, canRedo: false }); + return; + } + + const update = () => { + const undoStack = undoManager.undoStack as unknown[] | undefined; + const redoStack = undoManager.redoStack as unknown[] | undefined; + setState({ + canUndo: Array.isArray(undoStack) && undoStack.length > 0, + canRedo: Array.isArray(redoStack) && redoStack.length > 0, + }); + }; + + update(); + UNDO_EVENTS.forEach((event) => { + undoManager.on(event, update); + }); + + return () => { + UNDO_EVENTS.forEach((event) => { + undoManager.off(event, update); + }); + }; + }, [undoManager]); + + return state; +} + +export function useUndoRedoActions() { + const undoManager = useNotebookUndoManager(); + + const undo = useCallback(() => { + undoManager?.undo(); + }, [undoManager]); + + const redo = useCallback(() => { + undoManager?.redo(); + }, [undoManager]); + + return { undo, redo, undoManager }; +} diff --git a/web/modules/notebook/hooks/use-yjs-cell-sources.ts b/web/modules/notebook/hooks/use-yjs-cell-sources.ts new file mode 100644 index 00000000..4284404b --- /dev/null +++ b/web/modules/notebook/hooks/use-yjs-cell-sources.ts @@ -0,0 +1,88 @@ +'use client' + +import { useMemo, useSyncExternalStore, useRef } from 'react' +import * as Y from 'yjs' +import { useNotebookRuntime } from '../providers/notebook-runtime-context' +import { NB_CELL_MAP } from '../collab/yjs/schema/core/keys' +import { getNotebookRoot } from '../collab/yjs/schema/access/root' + +/** + * Hook to subscribe to Y.Text sources of multiple cells. + * Updates in real-time when any cell's source changes (including from collaborators). + * + * This is a lower-level hook that directly subscribes to Y.js changes, + * bypassing the valtio-yjs binding for more immediate updates. + * + * @param cellIds - Array of cell IDs to subscribe to + * @returns Map of cellId -> source string + */ +export function useYjsCellSources(cellIds: readonly string[]): Map { + const { resource } = useNotebookRuntime() + + // Use ref to store memoization state to avoid recreating getSnapshot + const cacheRef = useRef<{ + lastResult: Map + lastKey: string + }>({ + lastResult: new Map(), + lastKey: '', + }) + + // Stable subscribe function - subscribes to deep changes on cellMap + const subscribe = useMemo(() => { + return (callback: () => void) => { + const root = getNotebookRoot(resource.doc) + const cellMap = root.get(NB_CELL_MAP) as Y.Map | undefined + + if (!cellMap) return () => {} + + // observeDeep captures all nested changes including Y.Text modifications + cellMap.observeDeep(callback) + + return () => cellMap.unobserveDeep(callback) + } + }, [resource.doc]) + + // Stable getSnapshot function + // Must return same reference if data unchanged to prevent infinite loops + const getSnapshot = useMemo(() => { + return (): Map => { + const root = getNotebookRoot(resource.doc) + const cellMap = root.get(NB_CELL_MAP) as Y.Map> | undefined + + if (!cellMap) { + return cacheRef.current.lastResult + } + + // Build current snapshot + const entries: [string, string][] = [] + for (const cellId of cellIds) { + const cell = cellMap.get(cellId) + if (!cell) continue + + const sourceYText = cell.get('source') + const source = sourceYText instanceof Y.Text ? sourceYText.toString() : '' + entries.push([cellId, source]) + } + + // Create a stable key for comparison + // Include both length and content hash for accuracy + const newKey = entries + .map(([id, src]) => `${id}:${src.length}:${src}`) + .join('|') + + // Return cached result if unchanged + if (newKey === cacheRef.current.lastKey) { + return cacheRef.current.lastResult + } + + // Update cache and return new Map + cacheRef.current.lastKey = newKey + cacheRef.current.lastResult = new Map(entries) + + return cacheRef.current.lastResult + } + }, [resource.doc, cellIds]) + + return useSyncExternalStore(subscribe, getSnapshot, getSnapshot) +} diff --git a/web/modules/notebook/hooks/useActiveHeading.ts b/web/modules/notebook/hooks/useActiveHeading.ts new file mode 100644 index 00000000..56f9d460 --- /dev/null +++ b/web/modules/notebook/hooks/useActiveHeading.ts @@ -0,0 +1,94 @@ +'use client' + +import { useState, useEffect, useCallback, type RefObject } from 'react' +import type { HeadingItem } from '../components/OutlineFloating/types' + +/** + * Hook to track which heading is currently visible in the viewport. + * Uses scroll events for reliable tracking. + * + * @param headings - Array of headings to track + * @param scrollContainerRef - Reference to the scrollable container + * @returns The ID of the currently active heading, or null if none + */ +export function useActiveHeading( + headings: HeadingItem[], + scrollContainerRef: RefObject +): string | null { + const [activeId, setActiveId] = useState(null) + + const updateActiveHeading = useCallback(() => { + const container = scrollContainerRef.current + if (!container || headings.length === 0) { + return + } + + const containerRect = container.getBoundingClientRect() + // Threshold: consider a heading "active" when it's in the top 20% of the container + const threshold = containerRect.height * 0.2 + + let currentActiveId: string | null = null + let smallestPositiveOffset = Infinity + + for (const heading of headings) { + const el = container.querySelector(`[data-heading-id="${heading.id}"]`) + if (!el) continue + + const rect = el.getBoundingClientRect() + // Calculate position relative to container's visible area + const offsetFromTop = rect.top - containerRect.top + + // If heading is above the threshold line, it could be the active one + // We want the last heading that has scrolled past the threshold + if (offsetFromTop <= threshold) { + currentActiveId = heading.id + } else if (offsetFromTop > 0 && offsetFromTop < smallestPositiveOffset) { + // Track the first heading below threshold as fallback + smallestPositiveOffset = offsetFromTop + if (!currentActiveId) { + currentActiveId = heading.id + } + } + } + + // If no heading found yet, use the first one + if (!currentActiveId && headings.length > 0) { + const firstEl = container.querySelector(`[data-heading-id="${headings[0].id}"]`) + if (firstEl) { + currentActiveId = headings[0].id + } + } + + if (currentActiveId !== activeId) { + setActiveId(currentActiveId) + } + }, [headings, scrollContainerRef, activeId]) + + useEffect(() => { + const container = scrollContainerRef.current + if (!container || headings.length === 0) { + setActiveId(null) + return + } + + // Initial update with a small delay to wait for markdown rendering + const initialTimeout = setTimeout(updateActiveHeading, 100) + + // Listen to scroll events + const handleScroll = () => { + requestAnimationFrame(updateActiveHeading) + } + + container.addEventListener('scroll', handleScroll, { passive: true }) + + // Also update when headings change + updateActiveHeading() + + return () => { + clearTimeout(initialTimeout) + container.removeEventListener('scroll', handleScroll) + } + }, [headings, scrollContainerRef, updateActiveHeading]) + + return activeId +} diff --git a/web/modules/notebook/hooks/useCollabNotebooks.ts b/web/modules/notebook/hooks/useCollabNotebooks.ts new file mode 100644 index 00000000..3e5ff227 --- /dev/null +++ b/web/modules/notebook/hooks/useCollabNotebooks.ts @@ -0,0 +1,6 @@ +export { + useCreateCollabNotebookMutation, + useDeleteCollabNotebookMutation, + useUpdateCollabNotebookMutation, + useTransferCollabNotebookMutation, +} from "@/modules/notebook/queries/notebook-mutations"; diff --git a/web/modules/notebook/hooks/useInjectAwarenessStyle.ts b/web/modules/notebook/hooks/useInjectAwarenessStyle.ts new file mode 100644 index 00000000..87979a42 --- /dev/null +++ b/web/modules/notebook/hooks/useInjectAwarenessStyle.ts @@ -0,0 +1,45 @@ +import { useEffect, useRef } from 'react'; +import type { AwarenessPresence } from '@/modules/notebook/awareness'; +import { useInjectStyle } from './useInjectStyle'; +import { getAwarenessSelectionClass } from '@/modules/notebook/awareness'; + +/** + * Injects remote selection and cursor styles based on collaborative presence. + * Only injects styles for colors that have appeared, avoiding duplicates. + */ +export function useInjectAwarenessStyle(presences: AwarenessPresence[]) { + const injectedColorsRef = useRef>(new Set()); + const { appendRule } = useInjectStyle('rw-notebook-awareness-style'); + + useEffect(() => { + presences.forEach(p => { + const colorRaw = p.user.color || '#6366f1'; + const color = colorRaw.trim(); + // const username = (p.user.name || String(p.user.id || `Client ${p.clientId}`)).slice(0, 24); + + if (!injectedColorsRef.current.has(color)) { + injectedColorsRef.current.add(color); + + const selClass = getAwarenessSelectionClass(color); + const rgba = hexToRGBA(color, 0.28); + appendRule(`.monaco-editor .${selClass} { background: ${rgba}; } + `); + } + }); + }, [presences, appendRule]); +} + +// Simple hex -> rgba conversion (supports #RGB/#RRGGBB), returns original color on failure +function hexToRGBA(hex: string, alpha: number): string { + if (!hex.startsWith('#')) return hex; + let h = hex.slice(1); + if (h.length === 3) { + h = h.split('').map(x => x + x).join(''); + } + if (h.length !== 6) return hex; + const num = parseInt(h, 16); + const r = (num >> 16) & 255; + const g = (num >> 8) & 255; + const b = num & 255; + return `rgba(${r}, ${g}, ${b}, ${alpha})`; +} \ No newline at end of file diff --git a/web/modules/notebook/hooks/useInjectStyle.ts b/web/modules/notebook/hooks/useInjectStyle.ts new file mode 100644 index 00000000..14bf868f --- /dev/null +++ b/web/modules/notebook/hooks/useInjectStyle.ts @@ -0,0 +1,37 @@ +import { useRef, useEffect, useCallback } from 'react'; + +/** + * General style injection Hook. + * Responsible for creating a