@@ -253,11 +288,15 @@ export function SyncAuthDialog({ isOpen, onClose, onSuccess }: SyncAuthDialogPro
) : (
<>
-
🔐 End-to-end encrypted
+
End-to-end encrypted
Your tasks are encrypted on your device before syncing. After signing in,
you'll create a separate encryption passphrase for maximum security.
@@ -283,9 +322,7 @@ export function SyncAuthDialog({ isOpen, onClose, onSuccess }: SyncAuthDialogPro
isOpen={showEncryptionDialog}
isNewUser={isNewUser}
onComplete={handleEncryptionComplete}
- onCancel={() => {
- setShowEncryptionDialog(false);
- }}
+ onCancel={() => setShowEncryptionDialog(false)}
serverEncryptionSalt={serverEncryptionSalt}
/>
diff --git a/components/sync/supabase-oauth-buttons.tsx b/components/sync/supabase-oauth-buttons.tsx
new file mode 100644
index 00000000..158ca6b8
--- /dev/null
+++ b/components/sync/supabase-oauth-buttons.tsx
@@ -0,0 +1,70 @@
+"use client";
+
+import { useState } from "react";
+import { Button } from "@/components/ui/button";
+import { getSupabaseClient, isSupabaseConfigured } from "@/lib/supabase";
+import { createLogger } from "@/lib/logger";
+
+const logger = createLogger('OAUTH');
+
+interface SupabaseOAuthButtonsProps {
+ onError?: (error: Error) => void;
+ onStart?: (provider: "google" | "apple") => void;
+}
+
+export function SupabaseOAuthButtons({ onError, onStart }: SupabaseOAuthButtonsProps) {
+ const [loading, setLoading] = useState<"google" | "apple" | null>(null);
+
+ const handleOAuth = async (provider: "google" | "apple") => {
+ setLoading(provider);
+ onStart?.(provider);
+
+ try {
+ if (!isSupabaseConfigured()) {
+ throw new Error('Supabase is not configured. Set NEXT_PUBLIC_SUPABASE_URL and NEXT_PUBLIC_SUPABASE_ANON_KEY.');
+ }
+ const supabase = getSupabaseClient();
+
+ const { error } = await supabase.auth.signInWithOAuth({
+ provider,
+ options: {
+ redirectTo: window.location.origin,
+ },
+ });
+
+ if (error) {
+ throw new Error(error.message);
+ }
+
+ logger.info('OAuth flow initiated', { provider });
+ // Supabase handles the redirect — loading state clears on page reload
+ } catch (err) {
+ setLoading(null);
+ const error = err instanceof Error ? err : new Error('OAuth failed');
+ logger.error('OAuth flow failed', error, { provider });
+ onError?.(error);
+ }
+ };
+
+ return (
+
+
+
+
+
+ );
+}
diff --git a/components/sync/sync-button.tsx b/components/sync/sync-button.tsx
index 2fa1cf9d..22d19357 100644
--- a/components/sync/sync-button.tsx
+++ b/components/sync/sync-button.tsx
@@ -6,7 +6,7 @@ import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/comp
import { useSync } from '@/lib/hooks/use-sync';
import { useToast } from '@/components/ui/toast';
import { useState } from 'react';
-import { SyncAuthDialog } from '@/components/sync/sync-auth-dialog';
+import { SupabaseAuthDialog } from '@/components/sync/supabase-auth-dialog';
import { getCryptoManager } from '@/lib/sync/crypto';
import { SYNC_TOAST_DURATION } from '@/lib/constants/sync';
import { useSyncHealth } from '@/components/sync/use-sync-health';
@@ -184,7 +184,7 @@ export function SyncButton() {
-
setAuthDialogOpen(false)}
onSuccess={handleAuthSuccess}
diff --git a/components/sync/use-sync-health.ts b/components/sync/use-sync-health.ts
index f5db4b64..1f76985c 100644
--- a/components/sync/use-sync-health.ts
+++ b/components/sync/use-sync-health.ts
@@ -1,8 +1,9 @@
"use client";
import { useEffect, useState } from 'react';
-import { getHealthMonitor, type HealthIssue } from '@/lib/sync/health-monitor';
import { SYNC_CONFIG, SYNC_TOAST_DURATION } from '@/lib/constants/sync';
+import { getSyncQueue } from '@/lib/sync/queue';
+import { getConnectionState } from '@/lib/sync/realtime-listener';
interface SyncHealthOptions {
isEnabled: boolean;
@@ -10,79 +11,53 @@ interface SyncHealthOptions {
onSync: () => void;
}
+const STALE_THRESHOLD_MS = 60 * 60 * 1000; // 1 hour
+
/**
* Hook for monitoring sync health and showing notifications
- * Checks health periodically and displays toasts for issues
+ * Checks for stale queue operations and connection issues
*/
export function useSyncHealth({ isEnabled, onHealthIssue, onSync }: SyncHealthOptions) {
const [lastNotificationTime, setLastNotificationTime] = useState(0);
useEffect(() => {
- if (!isEnabled) {
- return;
- }
+ if (!isEnabled) return;
- const checkHealthAndNotify = async () => {
+ const checkHealth = async () => {
const now = Date.now();
// Avoid notification spam
- if (now - lastNotificationTime < SYNC_CONFIG.NOTIFICATION_COOLDOWN_MS) {
+ if (now - lastNotificationTime < SYNC_CONFIG.NOTIFICATION_COOLDOWN_MS) return;
+
+ // Check for stale queue operations
+ const queue = getSyncQueue();
+ const pending = await queue.getPending();
+ const staleOps = pending.filter(op => now - op.timestamp > STALE_THRESHOLD_MS);
+
+ if (staleOps.length > 0) {
+ onHealthIssue(
+ `${staleOps.length} sync operation${staleOps.length > 1 ? 's' : ''} pending for over an hour`,
+ { label: 'Sync Now', onClick: onSync },
+ SYNC_TOAST_DURATION.LONG
+ );
+ setLastNotificationTime(now);
return;
}
- const healthMonitor = getHealthMonitor();
- const report = await healthMonitor.check();
-
- // Show toast for health issues
- if (!report.healthy && report.issues.length > 0) {
- handleHealthIssues(report.issues, now);
+ // Check Realtime connection
+ const connectionState = getConnectionState();
+ if (connectionState === 'disconnected') {
+ onHealthIssue(
+ 'Real-time sync is disconnected. Changes may not sync automatically.',
+ undefined,
+ SYNC_TOAST_DURATION.LONG
+ );
+ setLastNotificationTime(now);
}
};
- const handleHealthIssues = (issues: HealthIssue[], now: number) => {
- for (const issue of issues) {
- if (shouldShowErrorIssue(issue)) {
- showErrorIssue(issue);
- setLastNotificationTime(now);
- } else if (shouldShowStaleQueueWarning(issue)) {
- showStaleQueueWarning(issue);
- setLastNotificationTime(now);
- }
- }
- };
-
- const shouldShowErrorIssue = (issue: HealthIssue) => {
- return issue.severity === 'error';
- };
-
- const shouldShowStaleQueueWarning = (issue: HealthIssue) => {
- return issue.severity === 'warning' && issue.type === 'stale_queue';
- };
-
- const showErrorIssue = (issue: HealthIssue) => {
- const message = `${issue.message}. ${issue.suggestedAction}`;
- onHealthIssue(message, undefined, SYNC_TOAST_DURATION.LONG);
- };
-
- const showStaleQueueWarning = (issue: HealthIssue) => {
- onHealthIssue(
- issue.message,
- {
- label: 'Sync Now',
- onClick: onSync,
- },
- SYNC_TOAST_DURATION.LONG
- );
- };
-
- // Check health periodically
- const interval = setInterval(checkHealthAndNotify, SYNC_CONFIG.HEALTH_CHECK_INTERVAL_MS);
-
- // Run initial check after delay
- const initialTimeout = setTimeout(
- checkHealthAndNotify,
- SYNC_CONFIG.INITIAL_HEALTH_CHECK_DELAY_MS
- );
+ const interval = setInterval(checkHealth, SYNC_CONFIG.HEALTH_CHECK_INTERVAL_MS);
+ const initialTimeout = setTimeout(checkHealth, SYNC_CONFIG.INITIAL_HEALTH_CHECK_DELAY_MS);
return () => {
clearInterval(interval);
diff --git a/lib/archive.ts b/lib/archive.ts
index 4853b07b..79109025 100644
--- a/lib/archive.ts
+++ b/lib/archive.ts
@@ -8,8 +8,6 @@
import { getDb } from "@/lib/db";
import type { TaskRecord, ArchiveSettings } from "@/lib/types";
import { getSyncQueue } from "@/lib/sync/queue";
-import { incrementVectorClock } from "@/lib/sync/vector-clock";
-import { getSyncConfig } from "@/lib/sync/config";
/**
* Get archive settings from database
@@ -69,7 +67,7 @@ export async function archiveOldTasks(
// Enqueue delete operations for sync before archiving
const queue = getSyncQueue();
for (const task of tasksToArchive) {
- await queue.enqueue('delete', task.id, task, task.vectorClock || {});
+ await queue.enqueue('delete', task.id, task);
}
// Move tasks to archive table
@@ -106,24 +104,16 @@ export async function restoreTask(taskId: string): Promise {
throw new Error("Task not found in archive");
}
- // Get device ID and increment vector clock for sync
- const syncConfig = await getSyncConfig();
- const deviceId = syncConfig?.deviceId || 'local';
- const vectorClock = incrementVectorClock(archivedTask.vectorClock || {}, deviceId);
-
- // Remove archivedAt timestamp and update vector clock
+ // Remove archivedAt timestamp
// eslint-disable-next-line @typescript-eslint/no-unused-vars
- const { archivedAt: _archivedAt, ...taskWithoutArchive } = {
- ...archivedTask,
- vectorClock
- };
+ const { archivedAt: _archivedAt, ...taskWithoutArchive } = archivedTask;
// Move back to main tasks table
await db.tasks.add(taskWithoutArchive);
// Enqueue update operation for sync
const queue = getSyncQueue();
- await queue.enqueue('update', taskWithoutArchive.id, taskWithoutArchive, vectorClock);
+ await queue.enqueue('update', taskWithoutArchive.id, taskWithoutArchive);
// Remove from archive
await db.archivedTasks.delete(taskId);
diff --git a/lib/constants/sync.ts b/lib/constants/sync.ts
index d336daa9..3247638a 100644
--- a/lib/constants/sync.ts
+++ b/lib/constants/sync.ts
@@ -1,31 +1,12 @@
/**
- * Sync-related constants
+ * Sync-related constants (Supabase backend)
* Centralizes sync configuration values for consistency and maintenance
*/
-/**
- * Token management configuration
- */
-export const TOKEN_CONFIG = {
- /** Refresh token when within 5 minutes of expiry */
- REFRESH_THRESHOLD_MS: 5 * 60 * 1000,
-
- /**
- * Token expiration normalization threshold
- * JWT tokens use Unix seconds (e.g., 1735689600) while JavaScript uses milliseconds
- * Threshold of 10 billion distinguishes formats: seconds are < 10B, milliseconds are > 10B
- * This normalization ensures consistent millisecond storage regardless of API format
- */
- EXPIRATION_NORMALIZATION_THRESHOLD: 10_000_000_000,
-} as const;
-
/**
* Sync operation configuration
*/
export const SYNC_CONFIG = {
- /** Maximum tasks to fetch in a single pull request */
- MAX_TASKS_PER_PULL: 100,
-
/** Polling interval for pending operation count (2 seconds) */
PENDING_COUNT_POLL_INTERVAL_MS: 2000,
@@ -48,14 +29,6 @@ export const SYNC_CONFIG = {
DEFAULT_HISTORY_LIMIT: 50,
} as const;
-/**
- * OAuth handshake configuration
- */
-export const OAUTH_CONFIG = {
- /** Delay before processing OAuth result to allow subscribers to register (100ms) */
- LISTENER_REGISTRATION_DELAY_MS: 100,
-} as const;
-
/**
* Encryption configuration
*/
diff --git a/lib/db.ts b/lib/db.ts
index 1f187b92..869fc3ae 100644
--- a/lib/db.ts
+++ b/lib/db.ts
@@ -2,7 +2,6 @@ import Dexie, { Table } from "dexie";
import type { TaskRecord, NotificationSettings, ArchiveSettings, SyncHistoryRecord, AppPreferences } from "@/lib/types";
import type { SmartView } from "@/lib/filters";
import type { SyncQueueItem, SyncConfig, DeviceInfo, EncryptionConfig } from "@/lib/sync/types";
-import { ENV_CONFIG } from "@/lib/env-config";
class GsdDatabase extends Dexie {
tasks!: Table;
@@ -106,6 +105,8 @@ class GsdDatabase extends Dexie {
// Initialize sync metadata with defaults
const deviceId = crypto.randomUUID();
+ // Note: v7 created old-format config with token/vectorClock/serverUrl fields.
+ // The v13 migration cleans this up for the Supabase backend.
trans.table("syncMetadata").add({
key: "sync_config",
enabled: false,
@@ -118,7 +119,7 @@ class GsdDatabase extends Dexie {
lastSyncAt: null,
vectorClock: {},
conflictStrategy: "last_write_wins",
- serverUrl: ENV_CONFIG.apiBaseUrl
+ serverUrl: ""
});
// Add deviceInfo
@@ -129,11 +130,10 @@ class GsdDatabase extends Dexie {
createdAt: new Date().toISOString()
});
- // Migrate existing tasks to have empty vectorClock
- return trans.table("tasks").toCollection().modify((task: TaskRecord) => {
- if (!task.vectorClock) {
- task.vectorClock = {};
- }
+ // Legacy migration: vectorClock was removed in the Supabase migration.
+ // This no-op upgrade keeps the Dexie version chain intact for existing databases.
+ return trans.table("tasks").toCollection().modify(() => {
+ // No-op: vectorClock field is no longer used
});
});
@@ -260,6 +260,50 @@ class GsdDatabase extends Dexie {
}
});
});
+
+ // Version 13: Migrate sync config for Supabase backend
+ // Removes vector clock, token, and serverUrl fields; resets sync state for clean re-sync
+ this.version(13)
+ .stores({
+ tasks: "id, quadrant, completed, dueDate, recurrence, *tags, createdAt, updatedAt, [quadrant+completed], notificationSent, *dependencies, completedAt",
+ archivedTasks: "id, quadrant, completed, dueDate, completedAt, archivedAt",
+ smartViews: "id, name, isBuiltIn, createdAt",
+ notificationSettings: "id",
+ syncQueue: "id, taskId, operation, timestamp, retryCount",
+ syncMetadata: "key",
+ deviceInfo: "key",
+ archiveSettings: "id",
+ syncHistory: "id, timestamp, status, deviceId",
+ appPreferences: "id"
+ })
+ .upgrade(async (trans) => {
+ // Migrate sync config: remove Cloudflare-specific fields
+ const syncMeta = trans.table("syncMetadata");
+ const existing = await syncMeta.get("sync_config");
+ if (existing) {
+ const config = existing as SyncConfig;
+ await syncMeta.put({
+ key: "sync_config",
+ enabled: false, // Disable sync — user must re-authenticate with Supabase
+ userId: null,
+ deviceId: config.deviceId,
+ deviceName: config.deviceName,
+ email: null,
+ lastSyncAt: null,
+ conflictStrategy: "last_write_wins",
+ provider: null,
+ consecutiveFailures: 0,
+ lastFailureAt: null,
+ lastFailureReason: null,
+ nextRetryAt: null,
+ autoSyncEnabled: true,
+ autoSyncIntervalMinutes: 2,
+ } satisfies SyncConfig);
+ }
+
+ // Clear the sync queue for a fresh start
+ await trans.table("syncQueue").clear();
+ });
}
}
diff --git a/lib/docs/architecture-diagrams.ts b/lib/docs/architecture-diagrams.ts
index cf661c7b..81e1fd7f 100644
--- a/lib/docs/architecture-diagrams.ts
+++ b/lib/docs/architecture-diagrams.ts
@@ -20,12 +20,12 @@ export interface DiagramSection {
export const syncArchitectureDiagrams: DiagramSection = {
id: "sync",
title: "Sync Engine Architecture",
- description: "End-to-end encrypted sync with vector clock-based conflict resolution",
+ description: "End-to-end encrypted sync with timestamp-based LWW conflict resolution",
diagrams: [
{
id: "sync-state-machine",
title: "Sync Engine State Machine",
- description: "6-phase state flow from idle through push/pull to completion",
+ description: "State flow from idle through push/pull to completion",
code: `stateDiagram-v2
[*] --> Idle: Initial State
@@ -35,23 +35,16 @@ export const syncArchitectureDiagrams: DiagramSection = {
Backoff --> Idle: Return error
- Preparing --> Pushing: Prerequisites ready
- Preparing --> AuthError: Token invalid
+ Preparing --> Pushing: Session valid
+ Preparing --> AuthError: Session expired
AuthError --> Idle: Return error
Pushing --> Pulling: Push complete
- Pushing --> Retrying: Unauthorized (401)
-
- Retrying --> Pushing: Token refreshed
- Retrying --> AuthError: Refresh failed
Pulling --> Resolving: Pull complete
- Resolving --> Finalizing: Conflicts resolved
- Resolving --> Manual: Strategy = manual
-
- Manual --> Idle: Return conflicts
+ Resolving --> Finalizing: Conflicts resolved (LWW)
Finalizing --> Idle: Return success`,
},
@@ -64,51 +57,24 @@ export const syncArchitectureDiagrams: DiagramSection = {
UA[User Action] --> TA[Task Modified]
TA --> QA[Add to Queue]
QA --> EA[Encrypt AES-256]
- EA --> PA[Push to Worker]
+ EA --> PA[Push to Supabase]
end
- subgraph Worker[Cloudflare Worker]
- PA --> AUTH[Auth Middleware]
- AUTH --> PUSH[Push Handler]
- PUSH --> D1[(D1 SQLite)]
- PUSH --> R2[(R2 Storage)]
- PULL[Pull Handler] --> D1
- PULL --> R2
+ subgraph Supabase[Supabase Backend]
+ PA --> RLS[RLS Policy Check]
+ RLS --> PG[(Postgres)]
+ PG --> RT[Realtime Broadcast]
+ PULL[PostgREST Query] --> PG
end
subgraph DeviceB[Device B]
- PLB[Pull] --> DB[Decrypt]
+ RT --> WS[WebSocket Event]
+ WS --> DB[Decrypt]
DB --> MB[Merge Local]
MB --> VB[Update View]
end
- PULL --> PLB`,
- },
- {
- id: "vector-clock",
- title: "Vector Clock Comparison",
- description: "Distributed conflict detection algorithm",
- code: `flowchart TD
- START([Compare VC_A vs VC_B]) --> INIT[Initialize counters]
- INIT --> UNION[Get all device IDs]
- UNION --> LOOP[For each device ID]
- LOOP --> GET[Get versions from A and B]
- GET --> CMP{Compare}
-
- CMP -->|A > B| INC_A[aGreater++]
- CMP -->|B > A| INC_B[bGreater++]
- CMP -->|Equal| NEXT
-
- INC_A --> NEXT{More devices?}
- INC_B --> NEXT
-
- NEXT -->|Yes| LOOP
- NEXT -->|No| EVAL{Evaluate}
-
- EVAL -->|aGreater only| A_WINS[A is newer]
- EVAL -->|bGreater only| B_WINS[B is newer]
- EVAL -->|Both > 0| CONFLICT[CONFLICT]
- EVAL -->|Both = 0| SAME[Identical]`,
+ PULL --> DB`,
},
{
id: "encryption-flow",
@@ -140,10 +106,10 @@ export const syncArchitectureDiagrams: DiagramSection = {
],
};
-export const workerArchitectureDiagrams: DiagramSection = {
- id: "worker",
- title: "Worker Backend Architecture",
- description: "Cloudflare Workers with D1, R2, and KV storage",
+export const supabaseArchitectureDiagrams: DiagramSection = {
+ id: "supabase",
+ title: "Supabase Backend Architecture",
+ description: "Supabase with Postgres, Auth, Realtime, and RLS",
diagrams: [
{
id: "system-overview",
@@ -155,119 +121,68 @@ export const workerArchitectureDiagrams: DiagramSection = {
MCP[MCP Server]
end
- subgraph Edge[Cloudflare Edge]
- WORKER[Worker - Hono Router]
+ subgraph Supabase[Supabase Platform]
+ AUTH[Auth - Google/Apple OAuth]
+ REST[PostgREST API]
+ REALTIME[Realtime WebSocket]
subgraph Storage
- D1[(D1 SQLite)]
- R2[(R2 Bucket)]
- KV[(Workers KV)]
+ PG[(Postgres 17)]
+ RLS[Row Level Security]
end
- WORKER --> D1
- WORKER --> R2
- WORKER --> KV
- end
-
- subgraph OAuth[OAuth Providers]
- GOOGLE[Google OIDC]
- APPLE[Apple Sign In]
+ REST --> PG
+ REALTIME --> PG
+ PG --> RLS
end
- PWA --> WORKER
- MCP --> WORKER
- WORKER <--> GOOGLE
- WORKER <--> APPLE`,
+ PWA --> AUTH
+ PWA --> REST
+ PWA --> REALTIME
+ MCP --> REST`,
},
{
- id: "api-routes",
- title: "API Route Structure",
- description: "All API endpoints organized by category",
- code: `flowchart TD
- ROOT[/] --> HEALTH[GET /health]
- ROOT --> AUTH[/api/auth/*]
- ROOT --> SYNC[/api/sync/*]
- ROOT --> DEVICES[/api/devices/*]
-
- subgraph OAuth[OAuth - No Auth]
- AUTH --> START[GET /oauth/:provider/start]
- AUTH --> CALLBACK[POST /oauth/callback]
- AUTH --> RESULT[GET /oauth/result]
- end
-
- subgraph Protected[Auth Required]
- AUTH --> SALT[GET/POST /encryption-salt]
- AUTH --> LOGOUT[POST /logout]
- AUTH --> REFRESH[POST /refresh]
- end
-
- subgraph SyncRoutes[Sync - JWT + Rate Limit]
- SYNC --> PUSH[POST /push]
- SYNC --> PULL[POST /pull]
- SYNC --> STATUS[GET /status]
- end
-
- subgraph DeviceRoutes[Devices]
- DEVICES --> LIST[GET /]
- DEVICES --> REVOKE[DELETE /:id]
- end`,
- },
- {
- id: "oauth-flow",
- title: "OAuth Desktop Flow",
- description: "Popup-based authentication sequence",
+ id: "auth-flow",
+ title: "OAuth Authentication Flow",
+ description: "Supabase Auth handles Google/Apple OAuth automatically",
code: `sequenceDiagram
participant User
participant App
- participant Popup
- participant Worker
- participant KV
+ participant Supabase as Supabase Auth
participant Google
User->>App: Click Sign in
- App->>Worker: GET /oauth/google/start
- Worker->>KV: Store state + PKCE
- Worker-->>App: authUrl + state
-
- App->>Popup: window.open(authUrl)
- Popup->>Google: Authorization Request
+ App->>Supabase: signInWithOAuth(google)
+ Supabase-->>App: Redirect URL
+ App->>Google: Authorization Request
User->>Google: Consent
- Google->>Popup: Redirect with code
-
- Popup->>Worker: GET /callback?code&state
- Worker->>KV: Validate state
- Worker->>Google: Exchange code for tokens
- Google-->>Worker: tokens
- Worker->>KV: Store result
- Worker-->>Popup: Redirect to callback.html
-
- Popup->>App: postMessage
- App->>Worker: GET /oauth/result
- Worker-->>App: token + userId`,
+ Google->>Supabase: Callback with code
+ Supabase->>Supabase: Exchange code, create session
+ Supabase-->>App: onAuthStateChange(SIGNED_IN)
+ App->>App: Check encryption salt
+ App->>App: Show passphrase dialog`,
},
{
- id: "middleware-pipeline",
- title: "Middleware Pipeline",
- description: "Request processing through CORS, Auth, and Rate Limiting",
- code: `flowchart LR
- REQ([Request]) --> CORS[CORS Handler]
-
- CORS --> ROUTE{Route Type}
- ROUTE -->|Public| RATE[Rate Limiter]
- ROUTE -->|Protected| AUTH[Auth Middleware]
-
- AUTH --> JWT{JWT Valid?}
- JWT -->|No| REJECT[401]
- JWT -->|Yes| REVOKE{Revoked?}
-
- REVOKE -->|Yes| REJECT
- REVOKE -->|No| RATE
-
- RATE --> LIMIT{Under Limit?}
- LIMIT -->|No| TOO_MANY[429]
- LIMIT -->|Yes| HANDLER[Route Handler]
+ id: "rls-policies",
+ title: "Row Level Security",
+ description: "User isolation via RLS policies on all tables",
+ code: `flowchart TD
+ REQ([API Request]) --> JWT[Extract JWT from header]
+ JWT --> UID[auth.uid from JWT]
+ UID --> RLS{RLS Policy Check}
+
+ RLS -->|user_id = auth.uid| ALLOW[Allow Query]
+ RLS -->|user_id != auth.uid| DENY[Deny - Empty Result]
+
+ subgraph Tables[Protected Tables]
+ T1[encrypted_tasks]
+ T2[profiles]
+ T3[devices]
+ T4[sync_metadata]
+ T5[conflict_log]
+ end
- HANDLER --> RESP([Response])`,
+ ALLOW --> Tables`,
},
],
};
@@ -280,7 +195,7 @@ export const mcpArchitectureDiagrams: DiagramSection = {
{
id: "mcp-system",
title: "MCP System Architecture",
- description: "Claude Desktop to GSD Worker data flow",
+ description: "Claude Desktop to Supabase data flow",
code: `flowchart TB
subgraph Claude[Claude Desktop]
AI[Claude AI]
@@ -293,39 +208,38 @@ export const mcpArchitectureDiagrams: DiagramSection = {
HANDLERS[Tool Handlers]
subgraph Services
- API[API Client]
+ SUPA[Supabase Client]
CRYPTO[CryptoManager]
CACHE[TTL Cache]
end
TRANSPORT --> ROUTER
ROUTER --> HANDLERS
- HANDLERS --> API
+ HANDLERS --> SUPA
HANDLERS --> CRYPTO
HANDLERS --> CACHE
end
- subgraph Backend[GSD Backend]
- WORKER[Cloudflare Worker]
+ subgraph Backend[Supabase]
+ PG[(Postgres)]
end
AI <--> CLIENT
CLIENT <--> TRANSPORT
- API <--> WORKER`,
+ SUPA <--> PG`,
},
{
id: "tool-organization",
title: "Tool Organization",
- description: "20 tools in 4 categories",
+ description: "19 tools in 4 categories",
code: `flowchart TD
- subgraph Read[Read Tools - 7]
+ subgraph Read[Read Tools - 6]
R1[list_tasks]
R2[get_task]
R3[search_tasks]
R4[get_sync_status]
R5[list_devices]
R6[get_task_stats]
- R7[get_token_status]
end
subgraph Write[Write Tools - 5]
@@ -353,16 +267,15 @@ export const mcpArchitectureDiagrams: DiagramSection = {
{
id: "request-lifecycle",
title: "Request Lifecycle",
- description: "Tool call processing from Claude to API response",
+ description: "Tool call processing from Claude to Supabase response",
code: `sequenceDiagram
participant Claude
participant Transport as stdio
participant Router
participant Handler
participant Cache
- participant API
+ participant Supabase
participant Crypto
- participant Worker
Claude->>Transport: JSON-RPC Request
Transport->>Router: Parse & route
@@ -375,14 +288,10 @@ export const mcpArchitectureDiagrams: DiagramSection = {
Cache-->>Handler: Return cached
end
- Handler->>Crypto: encrypt(data)
- Crypto-->>Handler: encrypted
- Handler->>API: HTTPS + JWT
- API->>Worker: Request
- Worker-->>API: Response
- API-->>Handler: Encrypted response
+ Handler->>Supabase: Query encrypted_tasks
+ Supabase-->>Handler: Encrypted blobs
Handler->>Crypto: decrypt(blob)
- Crypto-->>Handler: decrypted
+ Crypto-->>Handler: decrypted task
Handler->>Cache: Store result
Handler-->>Router: Tool result
Router-->>Transport: JSON-RPC Response
@@ -409,14 +318,10 @@ export const mcpArchitectureDiagrams: DiagramSection = {
CIRCULAR -->|No| ENCRYPT
DEPS -->|No| ENCRYPT[Encrypt task data]
- ENCRYPT --> API_CALL[Call Worker API]
-
- API_CALL --> SUCCESS{Success?}
- SUCCESS -->|No| RETRY{Retryable?}
- RETRY -->|Yes| BACKOFF[Exponential backoff]
- BACKOFF --> API_CALL
- RETRY -->|No| ERROR
+ ENCRYPT --> UPSERT[Upsert to Supabase]
+ UPSERT --> SUCCESS{Success?}
+ SUCCESS -->|No| ERROR
SUCCESS -->|Yes| INVALIDATE[Invalidate cache]
INVALIDATE --> RESP[Return success]`,
},
@@ -425,6 +330,6 @@ export const mcpArchitectureDiagrams: DiagramSection = {
export const allDiagramSections: DiagramSection[] = [
syncArchitectureDiagrams,
- workerArchitectureDiagrams,
+ supabaseArchitectureDiagrams,
mcpArchitectureDiagrams,
];
diff --git a/lib/env-config.ts b/lib/env-config.ts
index a270f702..f64658a7 100644
--- a/lib/env-config.ts
+++ b/lib/env-config.ts
@@ -6,10 +6,10 @@
export type Environment = 'development' | 'staging' | 'production';
export interface EnvironmentConfig {
- /** API base URL for sync worker */
- apiBaseUrl: string;
- /** OAuth callback URL for auth flow */
- oauthCallbackUrl: string;
+ /** Supabase project URL */
+ supabaseUrl: string;
+ /** Supabase anon (publishable) key */
+ supabaseAnonKey: string;
/** Whether running in development mode */
isDevelopment: boolean;
/** Whether running in production mode */
@@ -47,30 +47,10 @@ function detectEnvironment(): Environment {
*/
export function getEnvironmentConfig(): EnvironmentConfig {
const environment = detectEnvironment();
- const apiBaseUrlOverride = process.env.NEXT_PUBLIC_API_BASE_URL;
-
- // API Base URL configuration
- const apiBaseUrl =
- apiBaseUrlOverride ||
- (environment === 'development'
- ? 'http://localhost:8787'
- : environment === 'staging'
- ? 'https://api-dev.vinny.dev'
- : typeof window !== 'undefined'
- ? window.location.origin // Use same-origin (CloudFront proxies /api/* to worker)
- : 'https://gsd.vinny.dev');
-
- // OAuth Callback URL configuration
- const oauthCallbackUrl =
- environment === 'development'
- ? 'http://localhost:3000/auth/callback'
- : environment === 'staging'
- ? 'https://gsd-dev.vinny.dev/auth/callback'
- : 'https://gsd.vinny.dev/auth/callback';
return {
- apiBaseUrl,
- oauthCallbackUrl,
+ supabaseUrl: process.env.NEXT_PUBLIC_SUPABASE_URL ?? '',
+ supabaseAnonKey: process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY ?? '',
isDevelopment: environment === 'development',
isProduction: environment === 'production',
isStaging: environment === 'staging',
diff --git a/lib/hooks/use-sync.ts b/lib/hooks/use-sync.ts
index 7b4449ed..c5ec6005 100644
--- a/lib/hooks/use-sync.ts
+++ b/lib/hooks/use-sync.ts
@@ -3,10 +3,8 @@
import { useState, useCallback, useEffect } from 'react';
import { getSyncEngine } from '@/lib/sync/engine';
import { getSyncCoordinator } from '@/lib/sync/sync-coordinator';
-import { getHealthMonitor } from '@/lib/sync/health-monitor';
import { getBackgroundSyncManager } from '@/lib/sync/background-sync';
import { getAutoSyncConfig } from '@/lib/sync/config';
-import { SYNC_CONFIG } from '@/lib/constants/sync';
import { UI_TIMING } from '@/lib/constants/ui';
import { createLogger } from '@/lib/logger';
import type { SyncResult } from '@/lib/sync/types';
@@ -40,24 +38,13 @@ export function useSync(): UseSyncResult {
const [autoSyncEnabled, setAutoSyncEnabled] = useState(true);
const [autoSyncInterval, setAutoSyncInterval] = useState(2);
- // Check if sync is enabled on mount and periodically
- // Start/stop health monitor and background sync manager based on sync enabled state
+ // Check if sync is enabled on mount and manage background sync lifecycle
useEffect(() => {
const checkEnabled = async () => {
const engine = getSyncEngine();
const enabled = await engine.isEnabled();
setIsEnabled(enabled);
- // Start or stop health monitor based on sync state
- const healthMonitor = getHealthMonitor();
- if (enabled && !healthMonitor.isActive()) {
- logger.debug('Starting health monitor (sync enabled)');
- healthMonitor.start();
- } else if (!enabled && healthMonitor.isActive()) {
- logger.debug('Stopping health monitor (sync disabled)');
- healthMonitor.stop();
- }
-
// Start or stop background sync manager
const bgSyncManager = getBackgroundSyncManager();
if (enabled) {
@@ -83,12 +70,6 @@ export function useSync(): UseSyncResult {
return () => {
clearInterval(interval);
- // Stop health monitor on unmount
- const healthMonitor = getHealthMonitor();
- if (healthMonitor.isActive()) {
- healthMonitor.stop();
- }
-
// Stop background sync manager on unmount
const bgSyncManager = getBackgroundSyncManager();
if (bgSyncManager.isRunning()) {
@@ -131,61 +112,6 @@ export function useSync(): UseSyncResult {
return () => clearInterval(interval);
}, []);
- // Listen for health check results and show notifications
- // This effect runs periodically to check health status
- useEffect(() => {
- if (!isEnabled) {
- return;
- }
-
- let lastHealthCheckTime = 0;
-
- const checkHealth = async () => {
- const now = Date.now();
-
- // Only check once per interval to avoid spam
- if (now - lastHealthCheckTime < SYNC_CONFIG.NOTIFICATION_COOLDOWN_MS) {
- return;
- }
-
- lastHealthCheckTime = now;
-
- const healthMonitor = getHealthMonitor();
- const report = await healthMonitor.check();
-
- // Log health check results
- logger.debug('Health check result', {
- healthy: report.healthy,
- issuesCount: report.issues.length,
- });
-
- // Note: Toast notifications would be shown here if we had access to the toast context
- // For now, we just log the issues. The health monitor integration is complete,
- // and toast notifications can be added by components that use this hook.
- if (!report.healthy && report.issues.length > 0) {
- for (const issue of report.issues) {
- logger.warn('Health issue detected', {
- type: issue.type,
- severity: issue.severity,
- message: issue.message,
- suggestedAction: issue.suggestedAction,
- });
- }
- }
- };
-
- // Run initial check after a short delay
- const initialTimeout = setTimeout(checkHealth, UI_TIMING.INITIAL_HEALTH_CHECK_DELAY_MS);
-
- // Check periodically
- const interval = setInterval(checkHealth, SYNC_CONFIG.NOTIFICATION_COOLDOWN_MS);
-
- return () => {
- clearTimeout(initialTimeout);
- clearInterval(interval);
- };
- }, [isEnabled]);
-
const sync = useCallback(async () => {
setStatus('syncing');
setError(null);
diff --git a/lib/logger.ts b/lib/logger.ts
index 1dfcb265..03a3ed8d 100644
--- a/lib/logger.ts
+++ b/lib/logger.ts
@@ -34,6 +34,7 @@ export type LogContext =
| 'SYNC_CRYPTO'
| 'SYNC_RETRY'
| 'SYNC_HEALTH'
+ | 'SYNC_REALTIME'
| 'SYNC_TOKEN'
| 'SYNC_ERROR'
| 'SYNC_HISTORY'
diff --git a/lib/oauth-config.ts b/lib/oauth-config.ts
deleted file mode 100644
index b3c14d20..00000000
--- a/lib/oauth-config.ts
+++ /dev/null
@@ -1,81 +0,0 @@
-/**
- * OAuth Security Configuration
- *
- * Centralizes allowed origins and security constants for OAuth flows.
- * Following OAuth 2.0 security best practices with defense-in-depth approach.
- */
-
-// Allowed origins for OAuth callback postMessages
-// These are the only origins we trust to send authentication data
-export const ALLOWED_OAUTH_ORIGINS = [
- // Production
- 'https://gsd.vinny.dev',
- 'https://gsd-sync-worker-production.vscarpenter.workers.dev',
-
- // Development/Staging
- 'https://gsd-dev.vinny.dev',
-
- // Worker domains (production)
- 'https://gsd-sync-worker.vscarpenter.workers.dev',
-
- // Worker domains (development/staging)
- 'https://gsd-sync-worker-dev.vscarpenter.workers.dev',
- 'https://gsd-sync-worker-staging.vscarpenter.workers.dev',
-
- // Local development
- 'http://localhost:3000',
- 'http://127.0.0.1:3000',
- 'http://localhost:8787', // Local worker
- 'http://127.0.0.1:8787', // Local worker
-] as const;
-
-// State token configuration
-export const OAUTH_STATE_CONFIG = {
- // Maximum age for OAuth state tokens (30 minutes - matches backend TTL for iPad PWA compatibility)
- MAX_STATE_AGE_MS: 30 * 60 * 1000,
-
- // Minimum length for state tokens
- MIN_STATE_LENGTH: 32,
-
- // Interval for cleaning up expired states (1 minute)
- CLEANUP_INTERVAL_MS: 60 * 1000,
-} as const;
-
-/**
- * Check if an origin is allowed to send OAuth callbacks
- */
-export function isOAuthOriginAllowed(origin: string): boolean {
- // Reject null, undefined, or empty strings
- if (!origin) {
- return false;
- }
-
- // Check exact match in allowed list
- if ((ALLOWED_OAUTH_ORIGINS as readonly string[]).includes(origin)) {
- return true;
- }
-
- // Allow localhost/127.0.0.1 with any port for local development
- // This is safe because these origins are only reachable locally
- const localhostPattern = /^http:\/\/(localhost|127\.0\.0\.1):\d+$/;
- if (localhostPattern.test(origin)) {
- return true;
- }
-
- return false;
-}
-
-/**
- * Get current environment for logging
- */
-export function getOAuthEnvironment(): 'production' | 'development' | 'staging' | 'local' {
- if (typeof window === 'undefined') return 'local';
-
- const { hostname } = window.location;
-
- if (hostname === 'gsd.vinny.dev') return 'production';
- if (hostname === 'gsd-dev.vinny.dev') return 'staging';
- if (hostname === 'localhost' || hostname === '127.0.0.1') return 'local';
-
- return 'development';
-}
diff --git a/lib/oauth-schemas.ts b/lib/oauth-schemas.ts
deleted file mode 100644
index d7adc5e7..00000000
--- a/lib/oauth-schemas.ts
+++ /dev/null
@@ -1,83 +0,0 @@
-/**
- * OAuth Message Validation Schemas
- *
- * Validates postMessage payloads from OAuth callback popup.
- * Prevents malicious payloads and ensures data integrity.
- */
-
-import { z } from 'zod';
-import { OAUTH_STATE_CONFIG } from './oauth-config';
-
-/**
- * OAuth Success Message Schema
- * Validates the authData structure from successful OAuth flows
- */
-export const OAuthSuccessMessageSchema = z.object({
- type: z.literal('oauth_success'),
- state: z.string().min(OAUTH_STATE_CONFIG.MIN_STATE_LENGTH, {
- message: 'State token too short - potential security issue',
- }),
- authData: z.object({
- userId: z.string().min(1, { message: 'User ID is required' }),
- deviceId: z.string().min(1, { message: 'Device ID is required' }),
- email: z.string().email({ message: 'Invalid email format' }),
- token: z.string().min(1, { message: 'Auth token is required' }),
- expiresAt: z.number().positive({ message: 'Invalid expiration timestamp' }),
- requiresEncryptionSetup: z.boolean(),
- provider: z.enum(['google', 'apple'], {
- error: 'Invalid OAuth provider',
- }),
- encryptionSalt: z.string().optional(),
- }),
-});
-
-/**
- * OAuth Error Message Schema
- * Validates error messages from failed OAuth flows
- */
-export const OAuthErrorMessageSchema = z.object({
- type: z.literal('oauth_error'),
- error: z.string().min(1, { message: 'Error message is required' }),
- state: z.string().optional(),
-});
-
-/**
- * Union type for all valid OAuth messages
- */
-export const OAuthMessageSchema = z.discriminatedUnion('type', [
- OAuthSuccessMessageSchema,
- OAuthErrorMessageSchema,
-]);
-
-/**
- * Type exports for TypeScript
- */
-export type OAuthSuccessMessage = z.infer;
-export type OAuthErrorMessage = z.infer;
-export type OAuthMessage = z.infer;
-
-/**
- * Validate OAuth message with detailed error reporting
- */
-export function validateOAuthMessage(data: unknown): {
- success: boolean;
- data?: OAuthMessage;
- error?: string;
-} {
- try {
- const parsed = OAuthMessageSchema.parse(data);
- return { success: true, data: parsed };
- } catch (error) {
- if (error instanceof z.ZodError) {
- const errorDetails = error.issues.map((e) => `${e.path.join('.')}: ${e.message}`).join(', ');
- return {
- success: false,
- error: `Invalid OAuth message structure: ${errorDetails}`,
- };
- }
- return {
- success: false,
- error: 'Failed to validate OAuth message',
- };
- }
-}
diff --git a/lib/reset-everything.ts b/lib/reset-everything.ts
index bbad17f3..e7fe0188 100644
--- a/lib/reset-everything.ts
+++ b/lib/reset-everything.ts
@@ -13,7 +13,6 @@
import { getDb } from "@/lib/db";
import { disableSync, getSyncConfig } from "@/lib/sync/config";
import { createLogger } from "@/lib/logger";
-import { ENV_CONFIG } from "@/lib/env-config";
const logger = createLogger("DB");
@@ -87,12 +86,8 @@ async function clearIndexedDB(): Promise<{ tables: string[]; errors: string[] }>
deviceId, // Preserve for future sync
deviceName: "Device",
email: null,
- token: null,
- tokenExpiresAt: null,
lastSyncAt: null,
- vectorClock: {},
conflictStrategy: "last_write_wins",
- serverUrl: ENV_CONFIG.apiBaseUrl,
consecutiveFailures: 0,
lastFailureAt: null,
lastFailureReason: null,
diff --git a/lib/schema.ts b/lib/schema.ts
index 753d21f9..ccac0f60 100644
--- a/lib/schema.ts
+++ b/lib/schema.ts
@@ -55,15 +55,17 @@ export const taskRecordSchema = taskDraftSchema
notificationSent: z.boolean().default(false),
lastNotificationAt: z.string().datetime({ offset: true }).optional(),
snoozedUntil: z.string().datetime({ offset: true }).optional(),
- vectorClock: z.record(z.string(), z.number()).default({}),
// Time tracking fields
timeSpent: z.number().int().min(0).optional(), // Total minutes spent (calculated)
timeEntries: z.array(timeEntrySchema).default([]),
})
.strict();
+/** Lenient task schema for imports — strips unknown keys (e.g. vectorClock from older exports) */
+const importTaskSchema = taskRecordSchema.strip();
+
export const importPayloadSchema = z.object({
- tasks: z.array(taskRecordSchema),
+ tasks: z.array(importTaskSchema),
exportedAt: z.string().datetime({ offset: true }),
version: z.string(),
});
diff --git a/lib/supabase.ts b/lib/supabase.ts
new file mode 100644
index 00000000..f28bc8e2
--- /dev/null
+++ b/lib/supabase.ts
@@ -0,0 +1,37 @@
+/**
+ * Supabase client singleton
+ * Provides authenticated access to the Supabase backend for sync operations.
+ * Auth session management (JWT refresh, persistence) is handled automatically.
+ */
+
+import { createClient, type SupabaseClient } from '@supabase/supabase-js';
+
+const SUPABASE_URL = process.env.NEXT_PUBLIC_SUPABASE_URL ?? '';
+const SUPABASE_ANON_KEY = process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY ?? '';
+
+let client: SupabaseClient | null = null;
+
+export function getSupabaseClient(): SupabaseClient {
+ if (client) return client;
+
+ if (!SUPABASE_URL || !SUPABASE_ANON_KEY) {
+ throw new Error(
+ 'Missing Supabase configuration. Set NEXT_PUBLIC_SUPABASE_URL and NEXT_PUBLIC_SUPABASE_ANON_KEY environment variables.'
+ );
+ }
+
+ client = createClient(SUPABASE_URL, SUPABASE_ANON_KEY, {
+ auth: {
+ persistSession: true,
+ autoRefreshToken: true,
+ detectSessionInUrl: true,
+ },
+ });
+
+ return client;
+}
+
+/** Check if Supabase is configured (env vars present) */
+export function isSupabaseConfigured(): boolean {
+ return Boolean(SUPABASE_URL && SUPABASE_ANON_KEY);
+}
diff --git a/lib/sync/api-client.ts b/lib/sync/api-client.ts
deleted file mode 100644
index 38bac706..00000000
--- a/lib/sync/api-client.ts
+++ /dev/null
@@ -1,215 +0,0 @@
-/**
- * API client for Cloudflare Worker sync backend
- */
-
-import { createLogger } from '@/lib/logger';
-import {
- SyncNetworkError,
- SyncAuthError,
- SyncValidationError,
-} from './errors';
-import type {
- PushRequest,
- PushResponse,
- PullRequest,
- PullResponse,
- SyncStatusResponse,
- DeviceInfo,
-} from './types';
-
-const logger = createLogger('SYNC_API');
-
-export class SyncApiClient {
- private baseUrl: string;
- private token: string | null = null;
-
- constructor(baseUrl: string) {
- this.baseUrl = baseUrl.replace(/\/$/, ''); // Remove trailing slash
- }
-
- /**
- * Set authentication token
- */
- setToken(token: string | null): void {
- this.token = token;
- }
-
- /**
- * Get authentication headers
- */
- private getHeaders(includeAuth = false): HeadersInit {
- const headers: HeadersInit = {
- 'Content-Type': 'application/json',
- };
-
- if (includeAuth && this.token) {
- headers['Authorization'] = `Bearer ${this.token}`;
- }
-
- return headers;
- }
-
- /**
- * Make API request with comprehensive error handling
- */
- private async request(
- endpoint: string,
- options: RequestInit = {},
- requiresAuth = false
- ): Promise {
- const url = `${this.baseUrl}${endpoint}`;
- const headers = this.getHeaders(requiresAuth);
-
- try {
- logger.debug('API request initiated', {
- endpoint,
- method: options.method || 'GET',
- hasAuth: requiresAuth && !!this.token,
- });
-
- const response = await fetch(url, {
- ...options,
- headers: {
- ...headers,
- ...options.headers,
- },
- });
-
- if (!response.ok) {
- const errorData = await response.json().catch(() => ({ error: 'Unknown error' }));
- const errorMessage = errorData.error || `HTTP ${response.status}: ${response.statusText}`;
-
- logger.error('API request failed', undefined, {
- endpoint,
- status: response.status,
- error: errorMessage,
- });
-
- // Categorize error by status code
- if (response.status === 401 || response.status === 403) {
- throw new SyncAuthError(
- errorMessage || 'Authentication failed - please sign in again',
- response.status
- );
- }
-
- if (response.status >= 500) {
- throw new SyncNetworkError(
- errorMessage || `Server error: ${response.status}`,
- response.status
- );
- }
-
- if (response.status === 400 || response.status === 422) {
- throw new SyncValidationError(
- errorMessage || 'Request validation failed',
- errorData
- );
- }
-
- // Default to network error for other 4xx errors
- throw new SyncNetworkError(errorMessage, response.status);
- }
-
- const data = await response.json();
-
- logger.debug('API request successful', {
- endpoint,
- status: response.status,
- });
-
- return data;
- } catch (error) {
- // Re-throw typed errors
- if (
- error instanceof SyncAuthError ||
- error instanceof SyncNetworkError ||
- error instanceof SyncValidationError
- ) {
- throw error;
- }
-
- // Handle network/fetch errors
- logger.error('API request threw error', error instanceof Error ? error : undefined, {
- endpoint,
- errorType: error instanceof Error ? error.constructor.name : 'unknown',
- });
-
- throw new SyncNetworkError(
- `Network request failed: ${error instanceof Error ? error.message : 'Unknown error'}`
- );
- }
- }
-
- // Authentication endpoints
- async logout(): Promise<{ success: boolean }> {
- return this.request<{ success: boolean }>('/api/auth/logout', {
- method: 'POST',
- }, true);
- }
-
- async refreshToken(): Promise<{ token: string; expiresAt: number }> {
- return this.request<{ token: string; expiresAt: number }>('/api/auth/refresh', {
- method: 'POST',
- }, true);
- }
-
- // Sync endpoints
-
- async push(data: PushRequest): Promise {
- return this.request('/api/sync/push', {
- method: 'POST',
- body: JSON.stringify(data),
- }, true);
- }
-
- async pull(data: PullRequest): Promise {
- return this.request('/api/sync/pull', {
- method: 'POST',
- body: JSON.stringify(data),
- }, true);
- }
-
- async getStatus(): Promise {
- return this.request('/api/sync/status', {
- method: 'GET',
- }, true);
- }
-
- // Device management endpoints
-
- async listDevices(): Promise<{ devices: DeviceInfo[] }> {
- return this.request<{ devices: DeviceInfo[] }>('/api/devices', {
- method: 'GET',
- }, true);
- }
-
- async revokeDevice(deviceId: string): Promise<{ success: boolean }> {
- return this.request<{ success: boolean }>(`/api/devices/${deviceId}`, {
- method: 'DELETE',
- }, true);
- }
-}
-
-// Singleton instance
-let apiClientInstance: SyncApiClient | null = null;
-
-/**
- * Get or create API client instance
- */
-export function getApiClient(serverUrl?: string): SyncApiClient {
- if (!apiClientInstance) {
- if (!serverUrl) {
- throw new Error('Server URL required for initial API client creation');
- }
- apiClientInstance = new SyncApiClient(serverUrl);
- }
- return apiClientInstance;
-}
-
-/**
- * Clear API client instance
- */
-export function clearApiClient(): void {
- apiClientInstance = null;
-}
diff --git a/lib/sync/config-migration.ts b/lib/sync/config-migration.ts
deleted file mode 100644
index 5cf936f1..00000000
--- a/lib/sync/config-migration.ts
+++ /dev/null
@@ -1,92 +0,0 @@
-/**
- * Sync configuration migration utilities
- * Handles legacy config updates and initialization
- */
-
-import { getDb } from '@/lib/db';
-import { ENV_CONFIG } from '@/lib/env-config';
-import type { SyncConfig } from './types';
-
-/**
- * Generate a default device name based on user agent
- */
-function getDefaultDeviceName(): string {
- if (typeof navigator === 'undefined') {
- return 'Desktop';
- }
-
- const ua = navigator.userAgent;
- if (ua.includes('Mac')) return 'Mac';
- if (ua.includes('Windows')) return 'Windows';
- if (ua.includes('Linux')) return 'Linux';
- if (ua.includes('iPhone')) return 'iPhone';
- if (ua.includes('iPad')) return 'iPad';
- if (ua.includes('Android')) return 'Android';
-
- return 'Desktop';
-}
-
-/**
- * Create initial sync configuration
- */
-async function createInitialConfig(): Promise {
- const db = getDb();
- const deviceId = crypto.randomUUID();
- const deviceName = getDefaultDeviceName();
-
- await db.syncMetadata.add({
- key: 'sync_config',
- enabled: false,
- userId: null,
- deviceId,
- deviceName,
- email: null,
- token: null,
- tokenExpiresAt: null,
- lastSyncAt: null,
- vectorClock: {},
- conflictStrategy: 'last_write_wins' as const,
- serverUrl: ENV_CONFIG.apiBaseUrl,
- consecutiveFailures: 0,
- lastFailureAt: null,
- lastFailureReason: null,
- nextRetryAt: null,
- });
-}
-
-/**
- * Initialize sync configuration if it doesn't exist
- */
-export async function ensureSyncConfigInitialized(): Promise {
- const db = getDb();
- const config = await db.syncMetadata.get('sync_config');
-
- if (!config) {
- await createInitialConfig();
- }
-}
-
-/**
- * Migrate legacy config to include retry tracking fields
- */
-export function migrateLegacyConfig(config: SyncConfig): SyncConfig {
- const migratedConfig = { ...config };
-
- if (migratedConfig.consecutiveFailures === undefined) {
- migratedConfig.consecutiveFailures = 0;
- }
-
- if (migratedConfig.lastFailureAt === undefined) {
- migratedConfig.lastFailureAt = null;
- }
-
- if (migratedConfig.lastFailureReason === undefined) {
- migratedConfig.lastFailureReason = null;
- }
-
- if (migratedConfig.nextRetryAt === undefined) {
- migratedConfig.nextRetryAt = null;
- }
-
- return migratedConfig;
-}
diff --git a/lib/sync/config/disable.ts b/lib/sync/config/disable.ts
index 8ebebae4..7215db56 100644
--- a/lib/sync/config/disable.ts
+++ b/lib/sync/config/disable.ts
@@ -1,40 +1,17 @@
/**
- * Sync disable functionality
+ * Sync disable functionality (Supabase backend)
*/
import { getDb } from "@/lib/db";
import { getCryptoManager } from "../crypto";
-import { getApiClient } from "../api-client";
import type { SyncConfig } from "../types";
import { getSyncConfig } from "./get-set";
+import { stopRealtimeListener } from "../realtime-listener";
+import { getSupabaseClient } from "@/lib/supabase";
import { createLogger } from "@/lib/logger";
const logger = createLogger('SYNC_CONFIG');
-/**
- * Stop health monitoring
- */
-async function stopHealthMonitor(): Promise {
- const { getHealthMonitor } = await import("../health-monitor");
- const healthMonitor = getHealthMonitor();
-
- if (healthMonitor.isActive()) {
- logger.info('Stopping health monitor (sync disabled)');
- healthMonitor.stop();
- }
-}
-
-/**
- * Clear crypto and API credentials
- */
-function clearCredentials(serverUrl: string): void {
- const crypto = getCryptoManager();
- crypto.clear();
-
- const api = getApiClient(serverUrl);
- api.setToken(null);
-}
-
/**
* Reset sync config to disabled state
*/
@@ -46,12 +23,9 @@ async function resetSyncConfigState(current: SyncConfig): Promise {
enabled: false,
userId: null,
email: null,
- token: null,
- tokenExpiresAt: null,
lastSyncAt: null,
- vectorClock: {},
key: "sync_config",
- });
+ } satisfies SyncConfig);
// Clear sync queue
await db.syncQueue.clear();
@@ -67,12 +41,23 @@ export async function disableSync(): Promise {
return;
}
- // Stop health monitor
- await stopHealthMonitor();
+ // Stop Realtime listener
+ stopRealtimeListener();
- // Clear credentials
- clearCredentials(current.serverUrl);
+ // Clear crypto key
+ const crypto = getCryptoManager();
+ crypto.clear();
+
+ // Sign out of Supabase
+ try {
+ const supabase = getSupabaseClient();
+ await supabase.auth.signOut();
+ } catch {
+ logger.warn('Supabase sign out failed (may not have been signed in)');
+ }
// Reset config
await resetSyncConfigState(current);
+
+ logger.info('Sync disabled');
}
diff --git a/lib/sync/config/enable.ts b/lib/sync/config/enable.ts
index 5fa7d8ff..991c4fd6 100644
--- a/lib/sync/config/enable.ts
+++ b/lib/sync/config/enable.ts
@@ -1,10 +1,9 @@
/**
- * Sync enable functionality
+ * Sync enable functionality (Supabase backend)
*/
import { getDb } from "@/lib/db";
import { getCryptoManager } from "../crypto";
-import { getApiClient } from "../api-client";
import { getSyncQueue } from "../queue";
import type { SyncConfig } from "../types";
import { getSyncConfig, updateAutoSyncConfig } from "./get-set";
@@ -35,51 +34,14 @@ async function queueExistingTasks(): Promise {
}
/**
- * Start health monitoring
- */
-async function startHealthMonitor(): Promise {
- const { getHealthMonitor } = await import("../health-monitor");
- const healthMonitor = getHealthMonitor();
-
- if (!healthMonitor.isActive()) {
- logger.info('Starting health monitor (sync enabled)');
- healthMonitor.start();
- }
-}
-
-/**
- * Update sync config with auth credentials
- */
-async function updateAuthCredentials(
- current: SyncConfig,
- userId: string,
- email: string,
- token: string,
- expiresAt: number
-): Promise {
- const db = getDb();
-
- await db.syncMetadata.put({
- ...current,
- enabled: true,
- userId,
- email,
- token,
- tokenExpiresAt: expiresAt,
- key: "sync_config",
- });
-}
-
-/**
- * Enable sync (typically called after successful auth)
+ * Enable sync (called after successful Supabase OAuth)
*/
export async function enableSync(
userId: string,
email: string,
- token: string,
- expiresAt: number,
salt: string,
- password: string
+ password: string,
+ provider?: string
): Promise {
const current = await getSyncConfig();
@@ -91,20 +53,23 @@ export async function enableSync(
await initializeCrypto(password, salt);
// Update config with auth credentials
- await updateAuthCredentials(current, userId, email, token, expiresAt);
+ const db = getDb();
+ await db.syncMetadata.put({
+ ...current,
+ enabled: true,
+ userId,
+ email,
+ provider: provider ?? null,
+ key: "sync_config",
+ } satisfies SyncConfig);
// Set default auto-sync config if not present
if (current.autoSyncEnabled === undefined) {
- await updateAutoSyncConfig(true, 2); // Default: enabled, 2 min interval
+ await updateAutoSyncConfig(true, 2);
}
- // Set token in API client
- const api = getApiClient(current.serverUrl);
- api.setToken(token);
-
// Queue existing tasks for initial sync
await queueExistingTasks();
- // Start health monitor
- await startHealthMonitor();
+ logger.info('Sync enabled', { userId, email });
}
diff --git a/lib/sync/config/get-set.ts b/lib/sync/config/get-set.ts
index 0a12a6e3..ae20e697 100644
--- a/lib/sync/config/get-set.ts
+++ b/lib/sync/config/get-set.ts
@@ -3,22 +3,20 @@
*/
import { getDb } from "@/lib/db";
-import { ensureSyncConfigInitialized, migrateLegacyConfig } from "../config-migration";
import type { SyncConfig, BackgroundSyncConfig } from "../types";
/**
* Get sync configuration
*/
export async function getSyncConfig(): Promise {
- await ensureSyncConfigInitialized();
const db = getDb();
const config = await db.syncMetadata.get("sync_config");
- if (!config) {
+ if (!config || config.key !== "sync_config") {
return null;
}
- return migrateLegacyConfig(config as SyncConfig);
+ return config as SyncConfig;
}
/**
@@ -93,6 +91,5 @@ export async function getSyncStatus() {
lastSyncAt: config?.lastSyncAt || null,
pendingCount,
deviceId: config?.deviceId || null,
- serverUrl: config?.serverUrl || null,
};
}
diff --git a/lib/sync/config/reset.ts b/lib/sync/config/reset.ts
index efb788d5..67dd6708 100644
--- a/lib/sync/config/reset.ts
+++ b/lib/sync/config/reset.ts
@@ -33,17 +33,16 @@ async function resetSyncMetadata(config: SyncConfig): Promise {
await db.syncMetadata.put({
...config,
- lastSyncAt: 0,
- vectorClock: {},
+ lastSyncAt: null,
key: "sync_config",
});
- logger.info('Reset sync metadata', { lastSyncAt: 0, vectorClock: {} });
+ logger.info('Reset sync metadata for full pull');
}
/**
* Reset sync state and perform full sync from server
- * This clears lastSyncAt and vector clocks to force a complete pull
- * Useful for debugging sync issues or recovering from inconsistent state
+ * Clears lastSyncAt to force a complete pull.
+ * Useful for debugging sync issues or recovering from inconsistent state.
*/
export async function resetAndFullSync(): Promise {
const config = await getSyncConfig();
@@ -55,7 +54,6 @@ export async function resetAndFullSync(): Promise {
logger.info('Starting full sync reset', {
lastSyncAt: config.lastSyncAt ? new Date(config.lastSyncAt).toISOString() : null,
- vectorClock: config.vectorClock as Record,
pendingOps: await db.syncQueue.count(),
});
diff --git a/lib/sync/debug.ts b/lib/sync/debug.ts
index 4d50fe83..3b3cdefb 100644
--- a/lib/sync/debug.ts
+++ b/lib/sync/debug.ts
@@ -47,7 +47,7 @@ export async function debugSyncQueue() {
lastSyncAt: config?.lastSyncAt ? new Date(config.lastSyncAt).toISOString() : null,
consecutiveFailures: config?.consecutiveFailures,
nextRetryAt: config?.nextRetryAt ? new Date(config.nextRetryAt).toISOString() : null,
- vectorClock: config?.vectorClock,
+ deviceId: config?.deviceId,
});
// Get all tasks
diff --git a/lib/sync/encryption-helpers.ts b/lib/sync/encryption-helpers.ts
index a844b361..a8912fe2 100644
--- a/lib/sync/encryption-helpers.ts
+++ b/lib/sync/encryption-helpers.ts
@@ -59,29 +59,16 @@ export function getOrCreateSalt(serverEncryptionSalt?: string | null): Uint8Arra
return generateEncryptionSalt();
}
-/** Build API URL for encryption salt endpoint */
-export function buildSaltApiUrl(): string {
- return window.location.hostname === 'localhost'
- ? 'http://localhost:8787/api/auth/encryption-salt'
- : `${window.location.origin}/api/auth/encryption-salt`;
-}
-
-/** Upload salt to server for new users */
+/** Upload salt to Supabase profile for multi-device support */
export async function uploadSaltToServer(salt: Uint8Array): Promise {
const { getDb } = await import('@/lib/db');
const db = getDb();
const config = await db.syncMetadata.get('sync_config');
- if (config && config.key === 'sync_config' && config.token) {
+ if (config && config.key === 'sync_config' && config.userId) {
+ const { setEncryptionSalt } = await import('@/lib/sync/supabase-sync-client');
const saltString = Array.from(salt).join(',');
- await fetch(buildSaltApiUrl(), {
- method: 'POST',
- headers: {
- 'Content-Type': 'application/json',
- 'Authorization': `Bearer ${config.token}`,
- },
- body: JSON.stringify({ encryptionSalt: saltString }),
- });
+ await setEncryptionSalt(config.userId, saltString);
}
}
@@ -113,9 +100,9 @@ export async function queueAndTriggerSync(
// Trigger automatic sync after dialog close animation
syncTimeoutRef.current = setTimeout(async () => {
try {
- const { getSyncCoordinator } = await import('@/lib/sync/sync-coordinator');
- const coordinator = getSyncCoordinator();
- await coordinator.requestSync('auto');
+ const { getSyncEngine } = await import('@/lib/sync/engine');
+ const engine = getSyncEngine();
+ await engine.sync('auto');
} catch (err) {
logger.error('Auto-sync after encryption setup failed', err instanceof Error ? err : undefined);
}
diff --git a/lib/sync/engine.ts b/lib/sync/engine.ts
index 7cc37fef..1e6f8867 100644
--- a/lib/sync/engine.ts
+++ b/lib/sync/engine.ts
@@ -3,19 +3,4 @@
* All sync logic is now modularized in lib/sync/engine/
*/
-import { SyncEngine as SyncEngineClass } from './engine/coordinator';
-
-export { SyncEngineClass as SyncEngine };
-
-// Singleton instance
-let engineInstance: SyncEngineClass | null = null;
-
-/**
- * Get or create sync engine instance
- */
-export function getSyncEngine() {
- if (!engineInstance) {
- engineInstance = new SyncEngineClass();
- }
- return engineInstance;
-}
+export { SyncEngine, getSyncEngine } from './engine/coordinator';
diff --git a/lib/sync/engine/conflict-resolver.ts b/lib/sync/engine/conflict-resolver.ts
index fd7efd2b..de239a44 100644
--- a/lib/sync/engine/conflict-resolver.ts
+++ b/lib/sync/engine/conflict-resolver.ts
@@ -1,17 +1,20 @@
/**
- * Conflict resolver - auto-resolves conflicts using last-write-wins strategy
- * Compares timestamps and applies the most recent version
+ * Conflict resolver - auto-resolves conflicts using timestamp-based LWW strategy
+ * Compares updatedAt timestamps and writes the winner to IndexedDB.
+ * Remote wins on tie to maintain consistency across devices.
*/
import { getDb } from '@/lib/db';
-import { mergeVectorClocks } from '../vector-clock';
import { createLogger } from '@/lib/logger';
import type { ConflictInfo } from '../types';
const logger = createLogger('SYNC_CONFLICT');
/**
- * Auto-resolve conflicts using last-write-wins strategy
+ * Auto-resolve conflicts using last-write-wins (LWW) strategy.
+ * Compares localUpdatedAt vs remoteUpdatedAt from each ConflictInfo.
+ * Remote wins on tie to ensure deterministic resolution across devices.
+ *
* @param conflicts - Array of conflicts to resolve
* @returns Number of conflicts successfully resolved
*/
@@ -21,7 +24,6 @@ export async function autoResolveConflicts(conflicts: ConflictInfo[]): Promise= comparison) for deterministic resolution
+ const winner = conflict.remoteUpdatedAt >= conflict.localUpdatedAt
+ ? conflict.remote
+ : conflict.local;
- const winner = remoteTime > localTime ? conflict.remote : conflict.local;
+ const winnerLabel = winner === conflict.remote ? 'remote' : 'local';
logger.debug('Resolving conflict', {
taskId: conflict.taskId,
- localTime: new Date(localTime).toISOString(),
- remoteTime: new Date(remoteTime).toISOString(),
- winner: winner === conflict.remote ? 'remote' : 'local',
- });
-
- await db.tasks.put({
- ...winner,
- vectorClock: mergeVectorClocks(conflict.localClock, conflict.remoteClock),
+ localTime: new Date(conflict.localUpdatedAt).toISOString(),
+ remoteTime: new Date(conflict.remoteUpdatedAt).toISOString(),
+ winner: winnerLabel,
});
+ await db.tasks.put(winner);
resolved++;
} catch (error) {
const resolveError = error instanceof Error ? error : new Error('Conflict resolution failed');
- logger.error('Failed to resolve conflict', resolveError, { taskId: conflict.taskId });
+ logger.error('Failed to resolve conflict', resolveError, {
+ taskId: conflict.taskId,
+ });
}
}
diff --git a/lib/sync/engine/coordinator.ts b/lib/sync/engine/coordinator.ts
index 25697763..159b3acd 100644
--- a/lib/sync/engine/coordinator.ts
+++ b/lib/sync/engine/coordinator.ts
@@ -1,18 +1,24 @@
/**
- * Sync coordinator - orchestrates push/pull operations
- * Handles sync state, error handling, and metadata updates
+ * Sync coordinator - orchestrates push/pull operations against Supabase.
+ * Handles sync state, error recovery, and metadata updates.
+ *
+ * Key changes from the Cloudflare Workers version:
+ * - No token management / 401 retry loops (Supabase Auth handles JWTs internally)
+ * - Supabase session check replaces manual token validation
+ * - Start/stop RealtimeListener on enable/disable
+ * - Uses timestamp-based LWW instead of vector clocks
*/
+"use client";
+
+import { getSupabaseClient } from '@/lib/supabase';
import { getCryptoManager } from '../crypto';
-import { getApiClient } from '../api-client';
-import { getTokenManager } from '../token-manager';
import { getRetryManager } from '../retry-manager';
import { getQueueOptimizer } from '../queue-optimizer';
import { createLogger } from '@/lib/logger';
-import type { SyncResult, ConflictInfo, SyncConfig, VectorClock, RejectedOperation } from '../types';
+import type { SyncResult, SyncConfig } from '../types';
import { pushLocalChanges } from './push-handler';
import { pullRemoteChanges } from './pull-handler';
-import { autoResolveConflicts } from './conflict-resolver';
import { handleSyncError } from './error-handler';
import {
getSyncConfig,
@@ -24,84 +30,36 @@ import {
} from './metadata-manager';
import { recordSyncSuccess } from '@/lib/sync-history';
import {
- notifyRejectedOperations,
- notifyConflicts,
notifySyncSuccess,
} from '@/lib/sync/notifications';
+import { startRealtimeListener, stopRealtimeListener } from '../realtime-listener';
const logger = createLogger('SYNC_ENGINE');
-// ============================================================================
-// Types for internal sync operations
-// ============================================================================
-
-interface SyncContext {
- crypto: ReturnType;
- api: ReturnType;
-}
-
-/** Rejected operation with enriched operation info for notifications */
-interface RejectedOpNotification {
- taskId: string;
- reason: string;
- details: string;
- operation?: string;
-}
-
-/** Conflicted operation with enriched operation info for notifications */
-interface ConflictedOpNotification {
- taskId: string;
- reason: string;
- operation?: string;
-}
-
-/** Result from push operation */
-interface PushResult {
- accepted: string[];
- rejected: RejectedOperation[];
- conflicts: ConflictInfo[];
- serverVectorClock: VectorClock;
- rejectedOps?: RejectedOpNotification[];
- conflictedOps?: ConflictedOpNotification[];
-}
-
-/** Result from pull operation - tasks are encrypted blobs used for counting */
-interface PullResult {
- tasks: { id: string }[];
- deletedTaskIds: string[];
- conflicts: ConflictInfo[];
- serverVectorClock: VectorClock;
-}
-
-interface SyncOperationResult {
- pushResult: PushResult;
- pullResult: PullResult;
- updatedConfig: NonNullable>>;
-}
-
// ============================================================================
// SyncEngine Class
// ============================================================================
export class SyncEngine {
private isRunning = false;
- private tokenManager = getTokenManager();
private retryManager = getRetryManager();
- private queueOptimizer = getQueueOptimizer();
- // ============================================================================
- // Private Helper Methods
- // ============================================================================
+ // ==========================================================================
+ // Private Helpers
+ // ==========================================================================
- /** Check if sync can proceed based on backoff rules */
- private async checkBackoffStatus(priority: 'user' | 'auto', config: SyncConfig): Promise {
- if (priority === 'auto') {
+ /** Check whether backoff rules allow syncing now */
+ private async checkBackoffStatus(
+ triggeredBy: 'user' | 'auto',
+ config: SyncConfig
+ ): Promise {
+ if (triggeredBy === 'auto') {
const canSync = await this.retryManager.canSyncNow();
if (!canSync) {
- const retryCount = await this.retryManager.getRetryCount();
logger.debug('Automatic sync blocked by retry backoff', {
- consecutiveFailures: retryCount,
- nextRetryAt: config.nextRetryAt ? new Date(config.nextRetryAt).toISOString() : null,
+ nextRetryAt: config.nextRetryAt
+ ? new Date(config.nextRetryAt).toISOString()
+ : null,
});
return { status: 'error', error: 'Sync in backoff period. Please wait before retrying.' };
}
@@ -111,14 +69,20 @@ export class SyncEngine {
return null;
}
- /** Validate and prepare sync prerequisites: token, queue optimization, crypto */
- private async prepareSyncPrerequisites(config: SyncConfig): Promise {
- const tokenValid = await this.tokenManager.ensureValidToken();
- if (!tokenValid) {
- throw new Error('Failed to refresh authentication token. Please sign in again.');
+ /** Verify that a valid Supabase auth session exists */
+ private async ensureSupabaseSession(): Promise {
+ const supabase = getSupabaseClient();
+ const { data, error } = await supabase.auth.getSession();
+
+ if (error || !data.session) {
+ throw new Error('Authentication expired. Please sign in again.');
}
+ }
- const removedCount = await this.queueOptimizer.consolidateAll();
+ /** Consolidate the sync queue and verify crypto readiness */
+ private async preparePrerequisites(): Promise {
+ const optimizer = getQueueOptimizer();
+ const removedCount = await optimizer.consolidateAll();
if (removedCount > 0) {
logger.debug('Queue optimization complete', { removedCount });
}
@@ -127,191 +91,131 @@ export class SyncEngine {
if (!crypto.isInitialized()) {
throw new Error('Encryption not initialized');
}
-
- const api = getApiClient(config.serverUrl);
- api.setToken(config.token);
-
- return { crypto, api };
}
- /** Execute push and pull with automatic 401 retry */
- private async executeSyncOperations(
- config: SyncConfig,
- syncContext: SyncContext
- ): Promise {
- const { crypto, api } = syncContext;
-
- try {
- const pushResult = await pushLocalChanges(config, syncContext);
- const updatedConfig = await getSyncConfig();
- if (!updatedConfig || !updatedConfig.enabled) {
- throw new Error('Sync config lost or disabled after push');
- }
-
- const pullResult = await pullRemoteChanges(updatedConfig, syncContext);
- return { pushResult, pullResult, updatedConfig };
- } catch (error: unknown) {
- return this.handleAuthRetry(error, { crypto, api });
- }
- }
-
- /** Handle 401 errors with token refresh and retry */
- private async handleAuthRetry(
- error: unknown,
- syncContext: SyncContext
- ): Promise {
- const { crypto, api } = syncContext;
-
- const errorMessage = error instanceof Error ? error.message : String(error);
- if (!errorMessage.includes('401') && !errorMessage.toLowerCase().includes('unauthorized')) {
- throw error;
- }
-
- logger.info('Received 401 error, attempting token refresh');
- const refreshed = await this.tokenManager.handleUnauthorized();
-
- if (!refreshed) {
- throw new Error('Authentication expired. Please sign in again.');
- }
-
- logger.info('Token refreshed, retrying sync operations');
- const refreshedConfig = await getSyncConfig();
- if (!refreshedConfig || !refreshedConfig.enabled) {
- throw new Error('Sync config lost after token refresh');
- }
-
- api.setToken(refreshedConfig.token);
-
- const pushResult = await pushLocalChanges(refreshedConfig, { crypto, api });
- const updatedConfig = await getSyncConfig();
- if (!updatedConfig || !updatedConfig.enabled) {
- throw new Error('Sync config lost or disabled after push');
- }
-
- const pullResult = await pullRemoteChanges(updatedConfig, { crypto, api });
- return { pushResult, pullResult, updatedConfig };
- }
-
- /** Resolve conflicts if needed based on strategy */
- private async resolveConflicts(pullResult: PullResult, config: SyncConfig): Promise {
- if (pullResult.conflicts.length === 0 || config.conflictStrategy !== 'last_write_wins') {
- return 0;
- }
-
- const conflictLogger = createLogger('SYNC_CONFLICT');
- conflictLogger.info('Auto-resolving conflicts', {
- conflictCount: pullResult.conflicts.length,
- taskIds: pullResult.conflicts.map((c: ConflictInfo) => c.taskId),
- strategy: config.conflictStrategy,
- });
-
- const conflictsResolved = await autoResolveConflicts(pullResult.conflicts);
-
- conflictLogger.info('Conflicts resolved', {
- resolvedCount: conflictsResolved,
- totalConflicts: pullResult.conflicts.length,
- });
-
- return conflictsResolved;
- }
-
- /** Send user notifications about sync results */
- private notifySyncResults(pushResult: PushResult, result: SyncResult, priority: 'user' | 'auto'): void {
- if (priority !== 'user') return;
-
- if (pushResult.rejectedOps && pushResult.rejectedOps.length > 0) {
- notifyRejectedOperations(pushResult.rejectedOps, { enabled: true });
- }
- if (pushResult.conflictedOps && pushResult.conflictedOps.length > 0) {
- notifyConflicts(pushResult.conflictedOps, { enabled: true });
- }
- if (result.status === 'success') {
- notifySyncSuccess(result.pushedCount || 0, result.pulledCount || 0, result.conflictsResolved || 0, { enabled: true });
- }
- }
-
- /** Record sync to history (best-effort) */
- private async recordToHistory(
- result: SyncResult,
- deviceId: string,
- priority: 'user' | 'auto',
- syncDuration: number
- ): Promise {
- try {
- await recordSyncSuccess(
- result.pushedCount || 0,
- result.pulledCount || 0,
- result.conflictsResolved || 0,
- deviceId,
- priority,
- syncDuration
- );
- } catch (historyError) {
- logger.error('Failed to record sync success to history', historyError instanceof Error ? historyError : new Error(String(historyError)));
- }
+ /** Build and return the final SyncResult */
+ private buildSyncResult(
+ pushAccepted: number,
+ pulledCount: number,
+ conflictsResolved: number,
+ syncEndTime: number
+ ): SyncResult {
+ return {
+ status: 'success',
+ pushedCount: pushAccepted,
+ pulledCount,
+ conflictsResolved,
+ conflicts: [],
+ timestamp: syncEndTime,
+ };
}
- // ============================================================================
+ // ==========================================================================
// Public API
- // ============================================================================
+ // ==========================================================================
/**
- * One-button sync - push local changes, pull remote changes
- * @param priority - 'user' for manual sync (bypasses backoff), 'auto' for automatic sync (respects backoff)
+ * One-button sync: push local changes, then pull remote changes.
+ * @param triggeredBy - 'user' for manual sync (bypasses backoff), 'auto' for background sync
*/
- async sync(priority: 'user' | 'auto' = 'auto'): Promise {
+ async sync(triggeredBy: 'user' | 'auto' = 'auto'): Promise {
if (this.isRunning) {
logger.debug('Sync already running, skipping');
return { status: 'already_running' };
}
- let pushResult: PushResult | null = null;
- let pullResult: PullResult | null = null;
let syncStartTime = Date.now();
try {
this.isRunning = true;
- logger.info('Starting sync operation', { priority });
+ logger.info('Starting sync operation', { triggeredBy });
- // Phase 1: Validate config and check backoff
+ // Phase 1: Validate config
const config = await getSyncConfig();
if (!config || !config.enabled) {
throw new Error('Sync not configured');
}
+ if (!config.userId) {
+ throw new Error('Sync user ID not set');
+ }
- const backoffResult = await this.checkBackoffStatus(priority, config);
+ const backoffResult = await this.checkBackoffStatus(triggeredBy, config);
if (backoffResult) return backoffResult;
- this.logConfigStatus(config);
-
- // Phase 2: Prepare prerequisites (token, queue optimization, crypto)
- const syncContext = await this.prepareSyncPrerequisites(config);
- syncStartTime = Date.now();
- this.logTimingWindow(config, syncStartTime);
+ logger.debug('Sync config loaded', {
+ deviceId: config.deviceId,
+ userId: config.userId,
+ lastSyncAt: config.lastSyncAt
+ ? new Date(config.lastSyncAt).toISOString()
+ : null,
+ });
- // Phase 3: Execute push/pull operations
- const operationResult = await this.executeSyncOperations(config, syncContext);
- pushResult = operationResult.pushResult;
- pullResult = operationResult.pullResult;
- const { updatedConfig } = operationResult;
+ // Phase 2: Verify Supabase session and prepare prerequisites
+ await this.ensureSupabaseSession();
+ await this.preparePrerequisites();
- // Phase 4: Resolve conflicts
- const conflictsResolved = await this.resolveConflicts(pullResult, updatedConfig);
- const conflicts: ConflictInfo[] = [...pullResult.conflicts];
-
- // Phase 5: Finalize sync
- await updateSyncMetadata(updatedConfig, pullResult.serverVectorClock, syncStartTime);
+ syncStartTime = Date.now();
+ logger.debug('Sync timing window captured', {
+ syncStartTime,
+ previousLastSyncAt: config.lastSyncAt,
+ timeSinceLastSync: config.lastSyncAt
+ ? `${syncStartTime - config.lastSyncAt}ms`
+ : 'initial sync',
+ });
+
+ // Phase 3: Push local pending changes
+ const crypto = getCryptoManager();
+ const pushResult = await pushLocalChanges({
+ crypto,
+ userId: config.userId,
+ deviceId: config.deviceId,
+ });
+
+ // Phase 4: Pull remote changes
+ const lastSyncIso = config.lastSyncAt
+ ? new Date(config.lastSyncAt).toISOString()
+ : null;
+
+ const pullResult = await pullRemoteChanges(lastSyncIso, {
+ crypto,
+ userId: config.userId,
+ });
+
+ // Phase 5: Update metadata and record success
+ await updateSyncMetadata(config, syncStartTime);
await this.retryManager.recordSuccess();
const syncEndTime = Date.now();
const syncDuration = syncEndTime - syncStartTime;
- const result = this.buildSyncResult(pushResult, pullResult, conflicts, conflictsResolved, updatedConfig, syncEndTime);
- this.logSyncComplete(result, syncDuration);
+ const totalConflicts = pushResult.conflictCount + pullResult.conflictCount;
+ const result = this.buildSyncResult(
+ pushResult.acceptedCount,
+ pullResult.pulledCount,
+ totalConflicts,
+ syncEndTime
+ );
+
+ logger.info('Sync operation complete', {
+ status: result.status,
+ pushedCount: result.pushedCount,
+ pulledCount: result.pulledCount,
+ deletedCount: pullResult.deletedCount,
+ conflictsResolved: result.conflictsResolved,
+ syncDuration: `${syncDuration}ms`,
+ });
// Phase 6: Notifications and history
- this.notifySyncResults(pushResult, result, priority);
- await this.recordToHistory(result, config.deviceId, priority, syncDuration);
+ if (triggeredBy === 'user') {
+ notifySyncSuccess(
+ result.pushedCount || 0,
+ result.pulledCount || 0,
+ result.conflictsResolved || 0,
+ { enabled: true }
+ );
+ }
+
+ await this.recordToHistory(result, config.deviceId, triggeredBy, syncDuration);
return result;
} catch (error) {
@@ -320,12 +224,9 @@ export class SyncEngine {
return await handleSyncError(
error,
- pushResult,
- pullResult,
this.retryManager,
- this.tokenManager,
deviceId,
- priority,
+ triggeredBy,
syncStartTime
);
} finally {
@@ -333,84 +234,71 @@ export class SyncEngine {
}
}
- /** Log current config status for debugging */
- private logConfigStatus(config: SyncConfig): void {
- logger.debug('Sync config loaded', {
- deviceId: config.deviceId,
- userId: config.userId || undefined,
- lastSyncAt: config.lastSyncAt ? new Date(config.lastSyncAt).toISOString() : undefined,
- vectorClock: config.vectorClock,
- consecutiveFailures: config.consecutiveFailures,
- });
+ /** Record sync to history (best-effort, never throws) */
+ private async recordToHistory(
+ result: SyncResult,
+ deviceId: string,
+ triggeredBy: 'user' | 'auto',
+ syncDuration: number
+ ): Promise {
+ try {
+ await recordSyncSuccess(
+ result.pushedCount || 0,
+ result.pulledCount || 0,
+ result.conflictsResolved || 0,
+ deviceId,
+ triggeredBy,
+ syncDuration
+ );
+ } catch (historyError) {
+ logger.error(
+ 'Failed to record sync success to history',
+ historyError instanceof Error ? historyError : new Error(String(historyError))
+ );
+ }
}
- /** Log sync timing window for debugging */
- private logTimingWindow(config: SyncConfig, syncStartTime: number): void {
- logger.debug('Sync timing window captured', {
- syncStartTime,
- previousLastSyncAt: config.lastSyncAt,
- timeSinceLastSync: config.lastSyncAt ? `${syncStartTime - config.lastSyncAt}ms` : 'initial sync',
- });
+ /** Check if sync is enabled */
+ async isEnabled(): Promise {
+ return isSyncEnabled();
}
- /** Build the final SyncResult object */
- private buildSyncResult(
- pushResult: PushResult,
- pullResult: PullResult,
- conflicts: ConflictInfo[],
- conflictsResolved: number,
- config: SyncConfig,
- syncEndTime: number
- ): SyncResult {
- return {
- status: conflicts.length > 0 && config.conflictStrategy === 'manual' ? 'conflict' : 'success',
- pushedCount: pushResult.accepted.length,
- pulledCount: pullResult.tasks.length,
- conflictsResolved,
- conflicts: config.conflictStrategy === 'manual' ? conflicts : [],
- timestamp: syncEndTime,
- };
+ /** Get current sync status */
+ async getStatus() {
+ return getSyncStatus(this.isRunning);
}
- /** Log sync completion details */
- private logSyncComplete(result: SyncResult, syncDuration: number): void {
- logger.info('Sync operation complete', {
- status: result.status,
- pushedCount: result.pushedCount,
- pulledCount: result.pulledCount,
- conflictsResolved: result.conflictsResolved,
- conflictsRemaining: result.conflicts?.length || 0,
- syncDuration: `${syncDuration}ms`,
- });
+ /** Update sync configuration */
+ async updateConfig(updates: Partial): Promise {
+ return updateConfigMetadata(updates);
}
- /**
- * Update sync configuration
- */
- async updateConfig(updates: Partial): Promise {
- return updateConfigMetadata(updates);
+ /** Queue all existing tasks for initial sync */
+ async queueExistingTasks(): Promise {
+ return queueAllExistingTasks();
}
- /**
- * Check if sync is enabled
- */
- async isEnabled(): Promise {
- return isSyncEnabled();
+ /** Start Realtime listener for cross-device push notifications */
+ startRealtime(userId: string, deviceId: string): void {
+ startRealtimeListener(userId, deviceId);
}
- /**
- * Get current sync status
- */
- async getStatus() {
- return getSyncStatus(this.isRunning);
+ /** Stop Realtime listener */
+ stopRealtime(): void {
+ stopRealtimeListener();
}
+}
- /**
- * Queue all existing tasks for initial sync
- * Called when sync is first enabled or re-enabled
- * @returns Number of tasks queued
- */
- async queueExistingTasks(): Promise {
- return queueAllExistingTasks();
+// ============================================================================
+// Singleton
+// ============================================================================
+
+let syncEngineInstance: SyncEngine | null = null;
+
+/** Get or create singleton SyncEngine instance */
+export function getSyncEngine(): SyncEngine {
+ if (!syncEngineInstance) {
+ syncEngineInstance = new SyncEngine();
}
+ return syncEngineInstance;
}
diff --git a/lib/sync/engine/error-handler.ts b/lib/sync/engine/error-handler.ts
index 9dfd7e4d..f218eecd 100644
--- a/lib/sync/engine/error-handler.ts
+++ b/lib/sync/engine/error-handler.ts
@@ -1,38 +1,29 @@
/**
* Error handler - handles sync errors with categorized recovery strategies
- * Manages transient, auth, and permanent errors with appropriate retry logic
+ * Manages transient, auth, and permanent errors with appropriate retry logic.
+ * No token management — Supabase Auth handles JWT refresh internally.
*/
import { getSyncQueue } from '../queue';
import { categorizeError } from '../error-categorizer';
import { createLogger } from '@/lib/logger';
import type { RetryManager } from '../retry-manager';
-import type { TokenManager } from '../token-manager';
import type { SyncResult } from '../types';
import { recordSyncError } from '@/lib/sync-history';
import { notifySyncError } from '@/lib/sync/notifications';
const logger = createLogger('SYNC_ERROR');
-/** Partial push result for error logging context */
-interface PartialPushResult {
- accepted: string[];
-}
-
-/** Partial pull result for error logging context */
-interface PartialPullResult {
- tasks: unknown[];
-}
-
/**
- * Handle sync error with categorized recovery strategy
+ * Handle sync error with categorized recovery strategy.
+ *
+ * - Transient errors: record failure and schedule retry with exponential backoff.
+ * - Auth errors: prompt user to sign in again (Supabase handles token refresh).
+ * - Permanent errors: log and surface to user without retry.
*/
export async function handleSyncError(
error: unknown,
- pushResult: PartialPushResult | null,
- pullResult: PartialPullResult | null,
retryManager: RetryManager,
- tokenManager: TokenManager,
deviceId: string,
triggeredBy: 'user' | 'auto',
syncStartTime: number
@@ -40,21 +31,16 @@ export async function handleSyncError(
const syncError = error instanceof Error ? error : new Error('Sync failed');
const errorCategory = categorizeError(syncError);
- // Get operation counts for logging
const queue = getSyncQueue();
const pendingCount = await queue.getPendingCount();
- // Log error with context
logger.error(`Sync operation failed: ${syncError.message}`, syncError, {
category: errorCategory,
- pushed: pushResult?.accepted.length || 0,
- pulled: pullResult?.tasks.length || 0,
pendingCount,
});
- // Record sync error to history (best-effort, don't block retry logic if history write fails)
- const syncEndTime = Date.now();
- const syncDuration = syncEndTime - syncStartTime;
+ // Record sync error to history (best-effort)
+ const syncDuration = Date.now() - syncStartTime;
try {
await recordSyncError(syncError.message, deviceId, triggeredBy, syncDuration);
} catch (historyError) {
@@ -64,110 +50,94 @@ export async function handleSyncError(
);
}
- // Handle transient errors: log, record failure, schedule retry
if (errorCategory === 'transient') {
- await retryManager.recordFailure(syncError);
-
- const retryCount = await retryManager.getRetryCount();
- const shouldRetry = await retryManager.shouldRetry();
-
- logger.error(`Transient error - will retry with backoff: ${syncError.message}`, syncError, {
- consecutiveFailures: retryCount,
- shouldRetry,
- nextRetryDelay: shouldRetry ? `${retryManager.getNextRetryDelay(retryCount) / 1000}s` : 'max retries exceeded',
- });
-
- if (shouldRetry) {
- const delay = retryManager.getNextRetryDelay(retryCount);
- const errorMsg = `Network error. Will retry automatically in ${Math.round(delay / 1000)}s.`;
-
- // Notify user for manual syncs
- if (triggeredBy === 'user') {
- notifySyncError(errorMsg, false, { enabled: true });
- }
-
- return {
- status: 'error',
- error: errorMsg,
- };
- } else {
- const errorMsg = 'Sync failed after multiple retries. Please check your connection and try again.';
-
- // Notify user for manual syncs (permanent failure)
- if (triggeredBy === 'user') {
- notifySyncError(errorMsg, true, { enabled: true });
- }
-
- return {
- status: 'error',
- error: errorMsg,
- };
- }
+ return handleTransientError(syncError, retryManager, triggeredBy);
}
- // Handle auth errors: log, attempt token refresh, retry once
if (errorCategory === 'auth') {
- logger.info('Authentication error - attempting token refresh');
-
- const refreshed = await tokenManager.handleUnauthorized();
-
- if (refreshed) {
- logger.info('Token refreshed successfully - user should retry sync');
- const errorMsg = 'Authentication refreshed. Please try syncing again.';
-
- // Notify user for manual syncs
- if (triggeredBy === 'user') {
- notifySyncError(errorMsg, false, { enabled: true });
- }
-
- return {
- status: 'error',
- error: errorMsg,
- };
- } else {
- logger.warn('Token refresh failed - user must re-authenticate');
- const errorMsg = 'Authentication expired. Please sign in again.';
-
- // Notify user (permanent auth failure)
- if (triggeredBy === 'user') {
- notifySyncError(errorMsg, true, { enabled: true });
- }
-
- return {
- status: 'error',
- error: errorMsg,
- };
- }
+ return handleAuthError(triggeredBy);
}
- // Handle permanent errors: log, notify user, don't retry
if (errorCategory === 'permanent') {
- logger.error(`Permanent error - will not retry: ${syncError.message}`, syncError, {
- category: 'permanent',
- errorType: syncError.constructor.name,
- });
+ return handlePermanentError(syncError, triggeredBy);
+ }
+
+ // Fallback for uncategorized errors (treat as transient)
+ logger.warn('Uncategorized error — treating as transient');
+ await retryManager.recordFailure(syncError);
+
+ return { status: 'error', error: syncError.message };
+}
+
+/**
+ * Handle transient errors: record failure and optionally schedule retry
+ */
+async function handleTransientError(
+ syncError: Error,
+ retryManager: RetryManager,
+ triggeredBy: 'user' | 'auto'
+): Promise {
+ await retryManager.recordFailure(syncError);
+
+ const retryCount = await retryManager.getRetryCount();
+ const shouldRetry = await retryManager.shouldRetry();
+
+ logger.error(`Transient error — will retry with backoff: ${syncError.message}`, syncError, {
+ consecutiveFailures: retryCount,
+ shouldRetry,
+ });
- const errorMsg = `Sync error: ${syncError.message}. Please check your data and try again.`;
+ if (shouldRetry) {
+ const delay = retryManager.getNextRetryDelay(retryCount);
+ const errorMsg = `Network error. Will retry automatically in ${Math.round(delay / 1000)}s.`;
- // Notify user (permanent error)
if (triggeredBy === 'user') {
- notifySyncError(errorMsg, true, { enabled: true });
+ notifySyncError(errorMsg, false, { enabled: true });
}
- // Don't record failure for permanent errors (no retry needed)
- return {
- status: 'error',
- error: errorMsg,
- };
+ return { status: 'error', error: errorMsg };
}
- // Fallback for uncategorized errors (treat as transient)
- logger.warn('Uncategorized error - treating as transient');
+ const errorMsg = 'Sync failed after multiple retries. Please check your connection and try again.';
- await retryManager.recordFailure(syncError);
+ if (triggeredBy === 'user') {
+ notifySyncError(errorMsg, true, { enabled: true });
+ }
+
+ return { status: 'error', error: errorMsg };
+}
+
+/**
+ * Handle auth errors: prompt user to re-authenticate.
+ * No token refresh — Supabase Auth handles JWT lifecycle internally.
+ */
+function handleAuthError(triggeredBy: 'user' | 'auto'): SyncResult {
+ logger.warn('Authentication error — user must sign in again');
+ const errorMsg = 'Authentication expired. Please sign in again.';
+
+ if (triggeredBy === 'user') {
+ notifySyncError(errorMsg, true, { enabled: true });
+ }
+
+ return { status: 'error', error: errorMsg };
+}
+
+/**
+ * Handle permanent errors: log, notify, no retry
+ */
+function handlePermanentError(
+ syncError: Error,
+ triggeredBy: 'user' | 'auto'
+): SyncResult {
+ logger.error(`Permanent error — will not retry: ${syncError.message}`, syncError, {
+ category: 'permanent',
+ });
+
+ const errorMsg = `Sync error: ${syncError.message}. Please check your data and try again.`;
+
+ if (triggeredBy === 'user') {
+ notifySyncError(errorMsg, true, { enabled: true });
+ }
- return {
- status: 'error',
- error: syncError.message,
- };
+ return { status: 'error', error: errorMsg };
}
diff --git a/lib/sync/engine/metadata-manager.ts b/lib/sync/engine/metadata-manager.ts
index 951d54c7..51643937 100644
--- a/lib/sync/engine/metadata-manager.ts
+++ b/lib/sync/engine/metadata-manager.ts
@@ -1,31 +1,27 @@
/**
* Metadata manager - handles sync configuration and metadata updates
- * Manages IndexedDB sync_config record and task queueing
+ * Manages IndexedDB sync_config record and task queueing.
+ * Vector clocks have been removed — timestamps drive conflict resolution.
*/
import { getDb } from '@/lib/db';
import { getSyncQueue } from '../queue';
-import { mergeVectorClocks } from '../vector-clock';
import { createLogger } from '@/lib/logger';
-import type { SyncConfig, VectorClock } from '../types';
+import type { SyncConfig } from '../types';
const logger = createLogger('SYNC_METADATA');
/**
- * Update sync metadata after successful sync
+ * Update sync metadata after a successful sync cycle.
+ * Uses the sync START time as lastSyncAt to avoid a race condition:
+ * tasks modified after sync starts will be caught in the next cycle.
*/
export async function updateSyncMetadata(
config: SyncConfig,
- serverClock: VectorClock,
syncStartTime: number
): Promise {
const db = getDb();
- const mergedClock = mergeVectorClocks(config.vectorClock, serverClock);
-
- // Use sync START time to prevent race condition
- // Tasks modified after sync starts will be caught in the next sync
- // Server uses >= comparison, so no adjustment needed
logger.debug('Updating sync metadata', {
previousLastSyncAt: config.lastSyncAt ? new Date(config.lastSyncAt).toISOString() : null,
newLastSyncAt: new Date(syncStartTime).toISOString(),
@@ -35,7 +31,6 @@ export async function updateSyncMetadata(
await db.syncMetadata.put({
...config,
lastSyncAt: syncStartTime,
- vectorClock: mergedClock,
key: 'sync_config',
});
}
@@ -50,7 +45,7 @@ export async function getSyncConfig(): Promise {
}
/**
- * Update sync configuration
+ * Update sync configuration with partial changes
*/
export async function updateConfig(updates: Partial): Promise {
const db = getDb();
@@ -76,7 +71,7 @@ export async function isEnabled(): Promise {
}
/**
- * Get current sync status
+ * Get current sync status (combines config, queue state, and running flag)
*/
export async function getStatus(isRunning: boolean) {
const config = await getSyncConfig();
@@ -92,8 +87,10 @@ export async function getStatus(isRunning: boolean) {
}
/**
- * Queue all existing tasks for initial sync
- * Called when sync is first enabled or re-enabled
+ * Queue all existing tasks for initial sync.
+ * Called when sync is first enabled or re-enabled.
+ * Skips tasks that are already queued to avoid duplicates.
+ *
* @returns Number of tasks queued
*/
export async function queueExistingTasks(): Promise {
@@ -108,11 +105,9 @@ export async function queueExistingTasks(): Promise {
logger.info('Queueing existing tasks for initial sync');
- // Get all tasks from IndexedDB
const allTasks = await db.tasks.toArray();
logger.debug('Found tasks in IndexedDB', { taskCount: allTasks.length });
- // Get all pending operations to check for duplicates
const pendingOps = await queue.getPending();
const queuedTaskIds = new Set(pendingOps.map(op => op.taskId));
@@ -120,28 +115,17 @@ export async function queueExistingTasks(): Promise {
let skippedCount = 0;
for (const task of allTasks) {
- // Skip if already in queue
if (queuedTaskIds.has(task.id)) {
- logger.debug('Skipping task - already in queue', { taskId: task.id });
+ logger.debug('Skipping task — already in queue', { taskId: task.id });
skippedCount++;
continue;
}
- // Queue as 'create' operation with current vector clock
- await queue.enqueue(
- 'create',
- task.id,
- task,
- task.vectorClock || {}
- );
-
+ await queue.enqueue('create', task.id, task);
queuedCount++;
}
- logger.info('Initial task queueing complete', {
- queuedCount,
- skippedCount,
- });
+ logger.info('Initial task queueing complete', { queuedCount, skippedCount });
return queuedCount;
}
diff --git a/lib/sync/engine/pull-handler.ts b/lib/sync/engine/pull-handler.ts
index 78b46915..0768b82a 100644
--- a/lib/sync/engine/pull-handler.ts
+++ b/lib/sync/engine/pull-handler.ts
@@ -1,15 +1,13 @@
/**
- * Pull handler - pulls remote changes from server
- * Handles decryption, conflict detection, and local database updates
+ * Pull handler - pulls remote changes from Supabase
+ * Handles decryption, LWW conflict resolution, and local database updates
*/
import { getDb } from '@/lib/db';
import { taskRecordSchema } from '@/lib/schema';
-import { mergeVectorClocks } from '../vector-clock';
+import { pullTasksSince, pullDeletedTaskIds } from '../supabase-sync-client';
import { createLogger } from '@/lib/logger';
import type { CryptoManager } from '../crypto';
-import type { SyncApiClient } from '../api-client';
-import type { SyncConfig, ConflictInfo } from '../types';
const logger = createLogger('SYNC_PULL');
@@ -18,129 +16,80 @@ const logger = createLogger('SYNC_PULL');
*/
export interface PullContext {
crypto: CryptoManager;
- api: SyncApiClient;
+ userId: string;
}
/**
- * Pull remote changes from server
+ * Pull remote changes from Supabase.
+ * Fetches updated and deleted tasks since lastSyncAt, decrypts each payload,
+ * applies LWW resolution against local state, and removes locally-deleted tasks.
*/
export async function pullRemoteChanges(
- config: SyncConfig,
+ lastSyncAt: string | null,
context: PullContext
-) {
- const { crypto, api } = context;
+): Promise<{
+ pulledCount: number;
+ deletedCount: number;
+ conflictCount: number;
+}> {
+ const { crypto, userId } = context;
const db = getDb();
- logger.debug('Starting pull phase', {
- deviceId: config.deviceId,
- sinceTimestamp: config.lastSyncAt,
- });
+ logger.debug('Starting pull phase', { sinceTimestamp: lastSyncAt });
- let response;
- try {
- response = await api.pull({
- deviceId: config.deviceId,
- lastVectorClock: config.vectorClock,
- sinceTimestamp: config.lastSyncAt || undefined,
- limit: 50,
- });
-
- logger.info('Pull response received', {
- tasksCount: response.tasks.length,
- deletedCount: response.deletedTaskIds.length,
- conflictsCount: response.conflicts.length,
- });
- } catch (error) {
- const pullError = error instanceof Error ? error : new Error('Pull failed');
- logger.error('Pull operation failed', pullError, {
- url: config.serverUrl,
- });
- throw error;
- }
+ const [encryptedTasks, deletedIds] = await Promise.all([
+ pullTasksSince(userId, lastSyncAt),
+ pullDeletedTaskIds(userId, lastSyncAt),
+ ]);
- const conflicts: ConflictInfo[] = [];
+ logger.info('Pull response received', {
+ tasksCount: encryptedTasks.length,
+ deletedCount: deletedIds.length,
+ });
- logger.debug('Processing tasks from server', { taskCount: response.tasks.length });
+ let pulledCount = 0;
+ let conflictCount = 0;
- for (const encTask of response.tasks) {
+ for (const encTask of encryptedTasks) {
try {
- logger.debug('Processing task', {
- taskId: encTask.id,
- vectorClock: encTask.vectorClock,
- updatedAt: new Date(encTask.updatedAt).toISOString(),
- });
-
- const decrypted = await crypto.decrypt(encTask.encryptedBlob, encTask.nonce);
+ const decrypted = await crypto.decrypt(encTask.encrypted_blob, encTask.nonce);
const task = taskRecordSchema.parse(JSON.parse(decrypted));
- logger.debug('Task decrypted', {
- taskId: task.id,
- title: task.title,
- completed: task.completed,
- });
-
- // Check for local conflicts
const localTask = await db.tasks.get(task.id);
if (localTask) {
- logger.debug('Found local version of task', {
- taskId: task.id,
- localUpdatedAt: localTask.updatedAt,
- remoteUpdatedAt: task.updatedAt,
- });
-
- // BULLETPROOF: Use timestamp comparison for conflict detection
const localTime = new Date(localTask.updatedAt).getTime();
- const remoteTime = new Date(task.updatedAt).getTime();
+ const remoteTime = new Date(encTask.updated_at).getTime();
- // If remote is newer OR same time (use remote as source of truth), apply it
if (remoteTime >= localTime) {
logger.debug('Applying remote version (newer or equal)', { taskId: task.id });
} else {
logger.debug('Keeping local version (newer)', { taskId: task.id });
- continue; // Skip this task, keep local version
+ conflictCount++;
+ continue;
}
- } else {
- logger.debug('Creating new task from remote', { taskId: task.id });
}
- // Apply the remote version
- const existingClock = localTask?.vectorClock || {};
- const mergedClock = mergeVectorClocks(existingClock, encTask.vectorClock);
-
- await db.tasks.put({
- ...task,
- vectorClock: mergedClock,
- });
-
- logger.debug('Task saved successfully', { taskId: task.id });
+ await db.tasks.put(task);
+ pulledCount++;
+ logger.debug('Task saved', { taskId: task.id });
} catch (error) {
const processError = error instanceof Error ? error : new Error('Task processing failed');
- logger.error('Failed to process task', processError, { taskId: encTask.id });
+ logger.error('Failed to process pulled task', processError, {
+ taskId: encTask.id,
+ });
}
}
- logger.debug('Finished processing tasks', { processedCount: response.tasks.length });
-
// Apply deletions
- if (response.deletedTaskIds.length > 0) {
- logger.debug('Deleting tasks', {
- deleteCount: response.deletedTaskIds.length,
- taskIds: response.deletedTaskIds,
- });
- await db.tasks.bulkDelete(response.deletedTaskIds);
+ let deletedCount = 0;
+ if (deletedIds.length > 0) {
+ logger.debug('Deleting locally-cached tasks', { deleteCount: deletedIds.length });
+ await db.tasks.bulkDelete(deletedIds);
+ deletedCount = deletedIds.length;
}
- logger.debug('Pull phase complete', {
- tasksProcessed: response.tasks.length,
- tasksDeleted: response.deletedTaskIds.length,
- conflictsCount: conflicts.length,
- });
+ logger.info('Pull phase complete', { pulledCount, deletedCount, conflictCount });
- return {
- tasks: response.tasks,
- deletedTaskIds: response.deletedTaskIds,
- serverVectorClock: response.serverVectorClock,
- conflicts,
- };
+ return { pulledCount, deletedCount, conflictCount };
}
diff --git a/lib/sync/engine/push-handler.ts b/lib/sync/engine/push-handler.ts
index e1575675..0770a384 100644
--- a/lib/sync/engine/push-handler.ts
+++ b/lib/sync/engine/push-handler.ts
@@ -1,12 +1,11 @@
/**
- * Push handler - pushes local pending changes to server
- * Handles encryption, queue management, and server response processing
+ * Push handler - pushes local pending changes to Supabase
+ * Handles encryption, queue management, and conflict detection
*/
import type { CryptoManager } from '../crypto';
-import type { SyncApiClient } from '../api-client';
-import type { SyncConfig, SyncOperation } from '../types';
import { getSyncQueue } from '../queue';
+import { pushEncryptedTask, softDeleteTask } from '../supabase-sync-client';
import { createLogger } from '@/lib/logger';
const logger = createLogger('SYNC_PUSH');
@@ -16,207 +15,139 @@ const logger = createLogger('SYNC_PUSH');
*/
export interface PushContext {
crypto: CryptoManager;
- api: SyncApiClient;
+ userId: string;
+ deviceId: string;
}
/**
- * Push local pending changes to server
+ * Push local pending changes to Supabase
+ * Processes each operation individually: encrypts and upserts for create/update,
+ * soft-deletes for delete operations.
*/
-export async function pushLocalChanges(
- config: SyncConfig,
- context: PushContext
-) {
- const { crypto, api } = context;
+export async function pushLocalChanges(context: PushContext): Promise<{
+ acceptedCount: number;
+ conflictCount: number;
+ errorCount: number;
+}> {
+ const { crypto, userId, deviceId } = context;
const queue = getSyncQueue();
const pendingOps = await queue.getPending();
logger.debug('Starting push phase', { pendingCount: pendingOps.length });
- if (pendingOps.length > 0) {
- logger.debug('Pending operations', {
- operations: pendingOps.map(op => ({
- operation: op.operation,
- taskId: op.taskId,
- queueId: op.id,
- })),
- });
- }
-
if (pendingOps.length === 0) {
logger.debug('No pending operations, skipping push');
- return { accepted: [], rejected: [], conflicts: [], serverVectorClock: {} };
+ return { acceptedCount: 0, conflictCount: 0, errorCount: 0 };
}
- // Encrypt and prepare operations
- // Track mapping between taskId and queue item IDs for proper cleanup
- const operations: SyncOperation[] = [];
- const taskIdToQueueIds = new Map();
+ let acceptedCount = 0;
+ let conflictCount = 0;
+ let errorCount = 0;
for (const op of pendingOps) {
try {
- logger.debug('Preparing operation', {
- operation: op.operation,
- taskId: op.taskId,
- vectorClock: op.vectorClock,
- });
-
- // Track which queue items correspond to this taskId
- if (!taskIdToQueueIds.has(op.taskId)) {
- taskIdToQueueIds.set(op.taskId, []);
+ if (op.operation === 'delete') {
+ await processDeleteOp(op.id, op.taskId, userId, deviceId, queue);
+ acceptedCount++;
+ continue;
}
- taskIdToQueueIds.get(op.taskId)!.push(op.id);
- if (op.operation === 'delete') {
- operations.push({
- type: 'delete',
+ if (!op.payload) {
+ logger.warn('Skipping operation with no payload', {
taskId: op.taskId,
- vectorClock: op.vectorClock,
+ operation: op.operation,
});
- } else if (op.payload) {
- const plaintext = JSON.stringify(op.payload);
- const { ciphertext, nonce } = await crypto.encrypt(plaintext);
- const checksum = await crypto.hash(plaintext);
+ errorCount++;
+ continue;
+ }
- operations.push({
- type: op.operation,
- taskId: op.taskId,
- encryptedBlob: ciphertext,
- nonce,
- vectorClock: op.vectorClock,
- checksum,
- });
+ const result = await processUpsertOp(
+ op.id,
+ op.taskId,
+ op.payload,
+ crypto,
+ userId,
+ deviceId,
+ queue
+ );
+
+ if (result === 'accepted') {
+ acceptedCount++;
+ } else if (result === 'conflict') {
+ conflictCount++;
}
} catch (error) {
- const encryptError = error instanceof Error ? error : new Error('Encryption failed');
- logger.error('Failed to encrypt task', encryptError, {
+ const opError = error instanceof Error ? error : new Error('Operation failed');
+ logger.error('Failed to push operation', opError, {
taskId: op.taskId,
operation: op.operation,
});
- continue;
+ await queue.incrementRetry(op.id);
+ errorCount++;
}
}
- logger.info('Pushing operations to server', { operationCount: operations.length });
+ logger.info('Push phase complete', { acceptedCount, conflictCount, errorCount });
- // Push to server
- let response;
- try {
- response = await api.push({
- deviceId: config.deviceId,
- operations,
- clientVectorClock: config.vectorClock,
- });
-
- logger.info('Push response received', {
- accepted: response.accepted.length,
- rejected: response.rejected.length,
- conflicts: response.conflicts.length,
- });
- } catch (error) {
- const pushError = error instanceof Error ? error : new Error('Push failed');
- logger.error('Push operation failed', pushError, {
- operationCount: operations.length,
- url: config.serverUrl,
- });
- throw error;
- }
+ return { acceptedCount, conflictCount, errorCount };
+}
- // Remove accepted operations from queue
- // IMPORTANT: Remove ALL queue items that correspond to accepted taskIds
- // This handles cases where consolidation merged multiple operations
- if (response.accepted.length > 0) {
- const acceptedQueueIds: string[] = [];
-
- for (const acceptedTaskId of response.accepted) {
- const queueIds = taskIdToQueueIds.get(acceptedTaskId);
- if (queueIds) {
- acceptedQueueIds.push(...queueIds);
- } else {
- logger.warn('Server accepted taskId but no queue items found', {
- taskId: acceptedTaskId,
- });
- }
- }
+/**
+ * Process a delete operation: soft-delete on Supabase, then dequeue
+ */
+async function processDeleteOp(
+ queueId: string,
+ taskId: string,
+ userId: string,
+ deviceId: string,
+ queue: ReturnType
+): Promise {
+ logger.debug('Pushing delete operation', { taskId });
+ await softDeleteTask(taskId, userId, deviceId);
+ await queue.dequeue(queueId);
+ logger.debug('Delete operation accepted', { taskId });
+}
- logger.debug('Removing accepted operations from queue', {
- queueIdsCount: acceptedQueueIds.length,
- acceptedTaskIds: response.accepted,
+/**
+ * Process a create/update operation: encrypt, push to Supabase, handle conflicts
+ * Returns 'accepted' or 'conflict' based on the server response.
+ */
+async function processUpsertOp(
+ queueId: string,
+ taskId: string,
+ payload: NonNullable,
+ crypto: CryptoManager,
+ userId: string,
+ deviceId: string,
+ queue: ReturnType
+): Promise<'accepted' | 'conflict'> {
+ const plaintext = JSON.stringify(payload);
+ const { ciphertext, nonce } = await crypto.encrypt(plaintext);
+ const checksum = await crypto.hash(plaintext);
+
+ logger.debug('Pushing upsert operation', { taskId });
+
+ const result = await pushEncryptedTask({
+ id: taskId,
+ userId,
+ encryptedBlob: ciphertext,
+ nonce,
+ checksum,
+ deviceId,
+ });
+
+ if (result.conflict) {
+ // Version mismatch — remove from queue; the pull phase will fetch
+ // the authoritative remote version and apply LWW resolution.
+ logger.debug('Conflict detected, deferring to pull phase', {
+ taskId,
+ serverVersion: result.newVersion,
});
-
- if (acceptedQueueIds.length > 0) {
- await queue.dequeueBulk(acceptedQueueIds);
- }
-
- // Verify removal
- const remainingCount = await queue.getPendingCount();
- logger.debug('Queue cleanup complete', { remainingCount });
-
- // Double-check: log any remaining operations for accepted tasks
- if (remainingCount > 0) {
- const remaining = await queue.getPending();
- const orphanedOps = remaining.filter(op => response.accepted.includes(op.taskId));
- if (orphanedOps.length > 0) {
- logger.error('Found orphaned operations for accepted tasks', undefined, {
- orphanedCount: orphanedOps.length,
- orphanedOps: orphanedOps.map(op => ({
- id: op.id,
- taskId: op.taskId,
- operation: op.operation,
- })),
- });
- }
- }
- }
-
- // Handle rejections (increment retry count)
- for (const rejected of response.rejected) {
- logger.debug('Operation rejected', { rejection: rejected });
- const op = pendingOps.find(o => o.taskId === rejected.taskId);
- if (op) {
- await queue.incrementRetry(op.id);
- }
- }
-
- // Handle conflicts - remove from queue since server has authoritative version
- // The server's version will be pulled in the pull phase
- if (response.conflicts.length > 0) {
- const conflictedQueueIds: string[] = [];
-
- for (const conflict of response.conflicts) {
- const queueIds = taskIdToQueueIds.get(conflict.taskId);
- if (queueIds) {
- conflictedQueueIds.push(...queueIds);
- }
- }
-
- if (conflictedQueueIds.length > 0) {
- logger.debug('Removing conflicted operations from queue', {
- conflictedCount: conflictedQueueIds.length,
- conflictedTaskIds: response.conflicts.map((c: { taskId: string }) => c.taskId),
- });
- await queue.dequeueBulk(conflictedQueueIds);
- }
+ await queue.dequeue(queueId);
+ return 'conflict';
}
- logger.debug('Push phase complete');
-
- // Return enriched response with operation details for notifications
- return {
- ...response,
- rejectedOps: response.rejected.map(r => {
- const op = pendingOps.find(o => o.taskId === r.taskId);
- return {
- ...r,
- operation: op?.operation,
- };
- }),
- conflictedOps: response.conflicts.map((c: { taskId: string }) => {
- const op = pendingOps.find(o => o.taskId === c.taskId);
- return {
- taskId: c.taskId,
- operation: op?.operation,
- reason: 'concurrent_edit',
- };
- }),
- };
+ await queue.dequeue(queueId);
+ logger.debug('Upsert operation accepted', { taskId, newVersion: result.newVersion });
+ return 'accepted';
}
diff --git a/lib/sync/errors.ts b/lib/sync/errors.ts
index 8e2dd18f..be139db9 100644
--- a/lib/sync/errors.ts
+++ b/lib/sync/errors.ts
@@ -54,7 +54,7 @@ export class SyncCryptoError extends SyncError {
}
/**
- * Conflict errors when vector clocks diverge (needs manual resolution)
+ * Conflict errors when concurrent edits diverge (needs resolution via LWW or manual)
*/
export class SyncConflictError extends SyncError {
constructor(message: string, public conflictingTasks?: unknown) {
diff --git a/lib/sync/health-monitor.ts b/lib/sync/health-monitor.ts
deleted file mode 100644
index 2ebc78b2..00000000
--- a/lib/sync/health-monitor.ts
+++ /dev/null
@@ -1,273 +0,0 @@
-/**
- * Health Monitor - periodically checks sync health and triggers corrective actions
- * Detects stale operations, token expiration, and server connectivity issues
- */
-
-import { getSyncQueue } from './queue';
-import { getTokenManager } from './token-manager';
-import { getApiClient } from './api-client';
-import { getDb } from '@/lib/db';
-import type { SyncConfig } from './types';
-
-const HEALTH_CHECK_INTERVAL_MS = 5 * 60 * 1000; // 5 minutes
-const STALE_OPERATION_THRESHOLD_MS = 60 * 60 * 1000; // 1 hour
-
-export interface HealthIssue {
- type: 'stale_queue' | 'token_expired' | 'server_unreachable';
- severity: 'warning' | 'error';
- message: string;
- suggestedAction: string;
-}
-
-export interface HealthReport {
- healthy: boolean;
- issues: HealthIssue[];
- timestamp: number;
-}
-
-export class HealthMonitor {
- private intervalId: NodeJS.Timeout | null = null;
- private isRunning = false;
-
- /**
- * Start periodic health checks (5-minute interval)
- */
- start(): void {
- if (this.isRunning) {
- console.log('[HEALTH] Health monitor already running');
- return;
- }
-
- console.log('[HEALTH] Starting health monitor with 5-minute interval');
- this.isRunning = true;
-
- // Run initial check immediately
- this.check().catch(error => {
- console.error('[HEALTH] Initial health check failed:', error);
- });
-
- // Schedule periodic checks
- this.intervalId = setInterval(() => {
- this.check().catch(error => {
- console.error('[HEALTH] Periodic health check failed:', error);
- });
- }, HEALTH_CHECK_INTERVAL_MS);
- }
-
- /**
- * Stop health checks and clear interval
- */
- stop(): void {
- if (!this.isRunning) {
- console.log('[HEALTH] Health monitor not running');
- return;
- }
-
- console.log('[HEALTH] Stopping health monitor');
- this.isRunning = false;
-
- if (this.intervalId) {
- clearInterval(this.intervalId);
- this.intervalId = null;
- }
- }
-
- /**
- * Run immediate health check and return report
- */
- async check(): Promise {
- console.log('[HEALTH] Running health check...');
- const timestamp = Date.now();
-
- try {
- const config = await this.getSyncConfig();
- if (!config || !config.enabled) {
- console.log('[HEALTH] Sync not enabled, skipping health check');
- return { healthy: true, issues: [], timestamp };
- }
-
- const issues = await this.runAllHealthChecks(config);
- this.logHealthCheckResults(issues);
-
- return { healthy: issues.length === 0, issues, timestamp };
- } catch (error) {
- console.error('[HEALTH] Health check error:', error);
- return this.createErrorReport(timestamp);
- }
- }
-
- /** Run all health checks and collect issues */
- private async runAllHealthChecks(config: SyncConfig): Promise {
- const issues: HealthIssue[] = [];
- const staleIssue = await this.checkStaleOperations();
- const tokenIssue = await this.checkTokenExpiration();
- const connectivityIssue = await this.checkServerConnectivity(config);
-
- if (staleIssue) issues.push(staleIssue);
- if (tokenIssue) issues.push(tokenIssue);
- if (connectivityIssue) issues.push(connectivityIssue);
-
- return issues;
- }
-
- /** Log health check results */
- private logHealthCheckResults(issues: HealthIssue[]): void {
- console.log('[HEALTH] Health check complete:', {
- healthy: issues.length === 0,
- issuesFound: issues.length,
- issues: issues.map(i => ({ type: i.type, severity: i.severity })),
- });
- }
-
- /** Create error report for failed health check */
- private createErrorReport(timestamp: number): HealthReport {
- return {
- healthy: false,
- issues: [{
- type: 'server_unreachable',
- severity: 'error',
- message: 'Health check failed',
- suggestedAction: 'Check your internet connection and try again',
- }],
- timestamp,
- };
- }
-
- /**
- * Check for stale queue operations (>1 hour old)
- */
- private async checkStaleOperations(): Promise {
- const queue = getSyncQueue();
- const pendingOps = await queue.getPending();
-
- if (pendingOps.length === 0) {
- return null;
- }
-
- const now = Date.now();
- const staleOps = pendingOps.filter(op => {
- const age = now - op.timestamp;
- return age > STALE_OPERATION_THRESHOLD_MS;
- });
-
- if (staleOps.length === 0) {
- return null;
- }
-
- console.log('[HEALTH] Found stale operations:', {
- total: pendingOps.length,
- stale: staleOps.length,
- oldestAge: Math.max(...staleOps.map(op => now - op.timestamp)),
- });
-
- return {
- type: 'stale_queue',
- severity: 'warning',
- message: `${staleOps.length} pending operations are older than 1 hour`,
- suggestedAction: 'Try syncing manually to clear pending operations',
- };
- }
-
- /** Create expired token issue */
- private createExpiredTokenIssue(): HealthIssue {
- return {
- type: 'token_expired',
- severity: 'error',
- message: 'Authentication token has expired',
- suggestedAction: 'Sign in again to continue syncing',
- };
- }
-
- /** Create token refresh failed issue */
- private createRefreshFailedIssue(): HealthIssue {
- return {
- type: 'token_expired',
- severity: 'warning',
- message: 'Authentication token is expiring soon and refresh failed',
- suggestedAction: 'Sign in again to continue syncing',
- };
- }
-
- /**
- * Check token expiration and attempt refresh if needed
- */
- private async checkTokenExpiration(): Promise {
- const tokenManager = getTokenManager();
- const timeUntilExpiry = await tokenManager.getTimeUntilExpiry();
-
- if (timeUntilExpiry < 0) {
- console.log('[HEALTH] Token has expired');
- return this.createExpiredTokenIssue();
- }
-
- const needsRefresh = await tokenManager.needsRefresh();
- if (!needsRefresh) return null;
-
- console.log('[HEALTH] Token needs refresh, attempting automatic refresh...');
- const refreshed = await tokenManager.ensureValidToken();
-
- if (!refreshed) {
- console.log('[HEALTH] Token refresh failed');
- return this.createRefreshFailedIssue();
- }
-
- console.log('[HEALTH] Token refreshed successfully');
- return null;
- }
-
- /**
- * Check server connectivity with lightweight ping
- */
- private async checkServerConnectivity(config: SyncConfig): Promise {
- try {
- const api = getApiClient(config.serverUrl);
- api.setToken(config.token);
-
- // Use the status endpoint as a lightweight health check
- await api.getStatus();
-
- console.log('[HEALTH] Server connectivity OK');
- return null;
- } catch (error) {
- console.error('[HEALTH] Server connectivity check failed:', error);
-
- const errorMessage = error instanceof Error ? error.message : 'Unknown error';
-
- return {
- type: 'server_unreachable',
- severity: 'error',
- message: `Cannot reach sync server: ${errorMessage}`,
- suggestedAction: 'Check your internet connection and try again',
- };
- }
- }
-
- /**
- * Get sync configuration from IndexedDB
- */
- private async getSyncConfig(): Promise {
- const db = getDb();
- const config = await db.syncMetadata.get('sync_config');
- return config as SyncConfig | null;
- }
-
- /**
- * Check if health monitor is running
- */
- isActive(): boolean {
- return this.isRunning;
- }
-}
-
-// Singleton instance
-let healthMonitorInstance: HealthMonitor | null = null;
-
-/**
- * Get or create health monitor instance
- */
-export function getHealthMonitor(): HealthMonitor {
- if (!healthMonitorInstance) {
- healthMonitorInstance = new HealthMonitor();
- }
- return healthMonitorInstance;
-}
diff --git a/lib/sync/oauth-handshake.ts b/lib/sync/oauth-handshake.ts
deleted file mode 100644
index 1ca9507b..00000000
--- a/lib/sync/oauth-handshake.ts
+++ /dev/null
@@ -1,30 +0,0 @@
-"use client";
-
-/**
- * OAuth handshake utilities - Re-export layer
- *
- * This file provides backward compatibility for existing imports.
- * The actual implementation has been modularized into:
- * - oauth-handshake/types.ts - Type definitions
- * - oauth-handshake/state.ts - State management
- * - oauth-handshake/fetcher.ts - OAuth result fetching
- * - oauth-handshake/broadcaster.ts - Multi-channel broadcasting
- * - oauth-handshake/initializer.ts - Module initialization
- * - oauth-handshake/subscriber.ts - Event subscription
- *
- * @see lib/sync/oauth-handshake/index.ts for the main module entry point
- */
-
-export type {
- OAuthAuthData,
- OAuthHandshakeSuccess,
- OAuthHandshakeError,
- OAuthHandshakeEvent,
-} from './oauth-handshake/types';
-
-export {
- subscribeToOAuthHandshake,
- retryOAuthHandshake,
-} from './oauth-handshake/subscriber';
-
-export { announceOAuthState } from './oauth-handshake/broadcaster';
diff --git a/lib/sync/oauth-handshake/broadcaster.ts b/lib/sync/oauth-handshake/broadcaster.ts
deleted file mode 100644
index 5c546d95..00000000
--- a/lib/sync/oauth-handshake/broadcaster.ts
+++ /dev/null
@@ -1,113 +0,0 @@
-/**
- * OAuth state broadcasting
- * Handles multi-channel communication for OAuth state
- */
-
-import type { BroadcastPayload } from './types';
-import {
- STORAGE_KEY,
- RESULT_KEY,
- storage,
- processedStates,
- notifyListeners,
- getBroadcastChannel,
-} from './state';
-import { fetchOAuthResult } from './fetcher';
-import { createLogger } from '@/lib/logger';
-
-const logger = createLogger('OAUTH');
-
-/**
- * Broadcast OAuth state to all channels (used by OAuth callback page)
- */
-export function announceOAuthState(state: string, success: boolean, error?: string): void {
- if (typeof window === 'undefined') {
- return;
- }
-
- (async () => {
- const payload: BroadcastPayload = {
- type: 'oauth_handshake',
- state,
- success,
- error: error || null,
- timestamp: Date.now(),
- };
-
- const result = await fetchOAuthResult(state);
- processedStates.add(state);
-
- payload.result = result;
-
- // Store result in sessionStorage for same-tab recovery
- storeResult(result);
-
- // Broadcast via BroadcastChannel for cross-tab communication
- broadcastViaChannel(payload);
-
- // Post to opener window for popup flows
- postToOpener(payload);
-
- // Store payload for storage event listeners
- storePayload(payload);
-
- notifyListeners(result);
- })().catch((err) => {
- logger.error('Failed to broadcast handshake result', err instanceof Error ? err : new Error(String(err)));
- // Notify listeners so they can show error feedback to user
- notifyListeners({
- status: 'error',
- state,
- error: 'Failed to complete OAuth handshake. Please try again.',
- });
- });
-}
-
-/**
- * Store OAuth result in sessionStorage
- */
-function storeResult(result: unknown): void {
- try {
- storage?.setItem(RESULT_KEY, JSON.stringify(result));
- } catch (err) {
- logger.warn('Failed to write handshake result to storage', { error: String(err) });
- }
-}
-
-/**
- * Broadcast via BroadcastChannel
- */
-function broadcastViaChannel(payload: BroadcastPayload): void {
- try {
- const channel = getBroadcastChannel();
- if (channel) {
- channel.postMessage(payload);
- }
- } catch (err) {
- logger.warn('Failed to post via BroadcastChannel', { error: String(err) });
- }
-}
-
-/**
- * Post message to opener window (for popup flows)
- */
-function postToOpener(payload: BroadcastPayload): void {
- if (window.opener && typeof window.opener.postMessage === 'function') {
- try {
- window.opener.postMessage(payload, window.location.origin);
- } catch (err) {
- logger.warn('Failed to postMessage to opener', { error: String(err) });
- }
- }
-}
-
-/**
- * Store payload in sessionStorage for storage event listeners
- */
-function storePayload(payload: BroadcastPayload): void {
- try {
- storage?.setItem(STORAGE_KEY, JSON.stringify(payload));
- } catch (err) {
- logger.warn('Failed to write sessionStorage payload', { error: String(err) });
- }
-}
diff --git a/lib/sync/oauth-handshake/fetcher.ts b/lib/sync/oauth-handshake/fetcher.ts
deleted file mode 100644
index 57c85a61..00000000
--- a/lib/sync/oauth-handshake/fetcher.ts
+++ /dev/null
@@ -1,142 +0,0 @@
-/**
- * OAuth result fetching
- * Handles fetching OAuth results from the worker API
- */
-
-import { ENV_CONFIG } from '@/lib/env-config';
-import { HTTP_STATUS } from '@/lib/constants/ui';
-import type { OAuthHandshakeEvent, OAuthAuthData } from './types';
-import { processedStates, pendingFetches, notifyListeners } from './state';
-import { createLogger } from '@/lib/logger';
-
-const logger = createLogger('OAUTH');
-
-/**
- * Fetch OAuth result from worker API
- */
-export async function fetchOAuthResult(state: string): Promise {
- try {
- const workerUrl = ENV_CONFIG.apiBaseUrl;
-
- logger.info('Fetching result from worker', {
- state: state.substring(0, 8) + '...',
- workerUrl,
- });
-
- const response = await fetch(`${workerUrl}/api/auth/oauth/result?state=${encodeURIComponent(state)}`, {
- method: 'GET',
- headers: {
- Accept: 'application/json',
- },
- credentials: 'include',
- });
-
- const data = await response.json().catch(() => ({}));
-
- if (!response.ok) {
- return handleFetchError(state, response.status, data);
- }
-
- if (data.status === 'success' && data.authData) {
- logger.info('Result received', {
- state: state.substring(0, 8) + '...',
- });
-
- return {
- status: 'success',
- state,
- authData: data.authData as OAuthAuthData,
- };
- }
-
- return createErrorResult(state, data);
- } catch (error) {
- logger.error('Fetch threw error', error instanceof Error ? error : new Error(String(error)));
- return {
- status: 'error',
- state,
- error: error instanceof Error ? error.message : 'Network error while completing OAuth.',
- };
- }
-}
-
-/**
- * Handle HTTP error responses
- */
-function handleFetchError(
- state: string,
- status: number,
- data: Record
-): OAuthHandshakeEvent {
- logger.warn('Fetch failed', {
- state: state.substring(0, 8) + '...',
- status,
- body: data,
- });
-
- const message =
- (data && (data.message as string)) ||
- (status === HTTP_STATUS.GONE ? 'OAuth result expired. Please try again.' : 'Failed to complete OAuth.');
-
- return {
- status: 'error',
- state,
- error: message,
- };
-}
-
-/**
- * Create error result from response data
- */
-function createErrorResult(
- state: string,
- data: Record
-): OAuthHandshakeEvent {
- const errorMessage =
- (data && (data.error as string)) ||
- (data && (data.message as string)) ||
- 'OAuth failed. Please try again.';
-
- return {
- status: 'error',
- state,
- error: errorMessage,
- };
-}
-
-/**
- * Initiate OAuth result fetch if not already in progress
- */
-export function initiateHandshakeFetch(
- state: string,
- initialError?: string
-): void {
- if (processedStates.has(state) || pendingFetches.has(state)) {
- return;
- }
-
- const fetchPromise = (async () => {
- const result = await fetchOAuthResult(state);
-
- // If the worker result expired and we received an initial error message, surface it
- if (result.status === 'error' && initialError && !result.error) {
- result.error = initialError;
- }
-
- processedStates.add(state);
- notifyListeners(result);
- })()
- .catch((error) => {
- processedStates.add(state);
- notifyListeners({
- status: 'error',
- state,
- error: error instanceof Error ? error.message : 'OAuth handshake failed.',
- });
- })
- .finally(() => {
- pendingFetches.delete(state);
- });
-
- pendingFetches.set(state, fetchPromise);
-}
diff --git a/lib/sync/oauth-handshake/index.ts b/lib/sync/oauth-handshake/index.ts
deleted file mode 100644
index 5e9d7083..00000000
--- a/lib/sync/oauth-handshake/index.ts
+++ /dev/null
@@ -1,28 +0,0 @@
-/**
- * OAuth handshake module
- *
- * Unifies popup and redirect OAuth flows by broadcasting the OAuth state token
- * and fetching the authenticated session data from the worker.
- *
- * Module structure:
- * - types.ts: Type definitions
- * - state.ts: State management (listeners, processed states)
- * - fetcher.ts: OAuth result fetching from worker API
- * - broadcaster.ts: Multi-channel broadcasting for cross-tab/popup communication
- * - initializer.ts: Module initialization and event listener setup
- * - subscriber.ts: Public subscription API
- */
-
-"use client";
-
-// Re-export types
-export type {
- OAuthAuthData,
- OAuthHandshakeSuccess,
- OAuthHandshakeError,
- OAuthHandshakeEvent,
-} from './types';
-
-// Re-export public API
-export { subscribeToOAuthHandshake, retryOAuthHandshake } from './subscriber';
-export { announceOAuthState } from './broadcaster';
diff --git a/lib/sync/oauth-handshake/initializer.ts b/lib/sync/oauth-handshake/initializer.ts
deleted file mode 100644
index b4f0eded..00000000
--- a/lib/sync/oauth-handshake/initializer.ts
+++ /dev/null
@@ -1,207 +0,0 @@
-/**
- * OAuth handshake initialization
- * Handles module setup, event listeners, and existing result recovery
- */
-
-import { OAUTH_CONFIG } from '@/lib/constants/sync';
-import type { BroadcastPayload, OAuthHandshakeEvent } from './types';
-import {
- STORAGE_KEY,
- RESULT_KEY,
- CHANNEL_NAME,
- storage,
- safeLocalStorage,
- processedStates,
- notifyListeners,
- isInitialized,
- setInitialized,
- setBroadcastChannel,
-} from './state';
-import { initiateHandshakeFetch } from './fetcher';
-import { createLogger } from '@/lib/logger';
-
-const logger = createLogger('OAUTH');
-
-/**
- * Ensure the OAuth handshake module is initialized
- * Sets up event listeners and checks for existing results
- */
-export function ensureInitialized(): void {
- if (isInitialized() || typeof window === 'undefined') return;
- setInitialized();
-
- initBroadcastChannel();
- setupEventListeners();
- recoverExistingResult();
-}
-
-/**
- * Initialize BroadcastChannel for cross-tab communication
- */
-function initBroadcastChannel(): void {
- if ('BroadcastChannel' in window) {
- try {
- const channel = new BroadcastChannel(CHANNEL_NAME);
- channel.addEventListener('message', (event) => {
- handleBroadcastPayload(event.data as BroadcastPayload);
- });
- setBroadcastChannel(channel);
- } catch (error) {
- logger.warn('Failed to initialize BroadcastChannel', { error: String(error) });
- setBroadcastChannel(null);
- }
- }
-}
-
-/**
- * Set up window event listeners
- */
-function setupEventListeners(): void {
- window.addEventListener('message', handleMessageEvent);
- window.addEventListener('storage', handleStorageEvent);
-}
-
-/**
- * Handle postMessage events
- * Validates origin to prevent cross-origin message injection attacks (CWE-346)
- */
-function handleMessageEvent(event: MessageEvent): void {
- // Validate origin - only accept messages from same origin
- // OAuth callback page (public/oauth-callback.html) is hosted on same origin
- if (event.origin !== window.location.origin) {
- return;
- }
-
- if (!event.data || event.data.type !== 'oauth_handshake') return;
- handleBroadcastPayload(event.data as BroadcastPayload);
-}
-
-/**
- * Handle storage events (cross-tab communication fallback)
- */
-function handleStorageEvent(event: StorageEvent): void {
- if (!event.newValue) return;
-
- try {
- if (event.key === STORAGE_KEY) {
- const payload = JSON.parse(event.newValue) as BroadcastPayload;
- handleBroadcastPayload(payload);
- } else if (event.key === RESULT_KEY) {
- const result = JSON.parse(event.newValue) as OAuthHandshakeEvent;
- if (!processedStates.has(result.state)) {
- processedStates.add(result.state);
- notifyListeners(result);
- }
- }
- } catch (error) {
- logger.warn('Failed to parse storage payload', { error: String(error) });
- }
-}
-
-/**
- * Handle incoming broadcast payload
- */
-export function handleBroadcastPayload(payload: BroadcastPayload | null): void {
- if (!payload) return;
-
- // Handle error-only messages (e.g., when popup redirects to main app on OAuth failure)
- // These messages have success=false and error but may not have a state
- if (!payload.state && payload.success === false && payload.error) {
- logger.info('Error-only broadcast received', {
- error: payload.error,
- });
- // Notify listeners with a generic error event using a placeholder state
- notifyListeners({
- status: 'error',
- state: '__error_only__',
- error: payload.error,
- });
- return;
- }
-
- // Regular state-based messages require a valid state
- if (!payload.state) return;
-
- // Ignore duplicate notifications
- if (processedStates.has(payload.state)) {
- logger.debug('Duplicate state ignored', { state: payload.state.substring(0, 8) + '...' });
- return;
- }
-
- logger.info('Broadcast received', {
- state: payload.state.substring(0, 8) + '...',
- success: payload.success,
- });
-
- if (payload.result && !processedStates.has(payload.state)) {
- processedStates.add(payload.state);
- notifyListeners(payload.result);
- return;
- }
-
- initiateHandshakeFetch(payload.state, payload.error ?? undefined);
-}
-
-/**
- * Check for existing OAuth result in storage
- * Critical for PWA standalone mode where result may already be stored
- */
-function recoverExistingResult(): void {
- try {
- // Try sessionStorage first (preferred for security)
- let existingResult = storage?.getItem(RESULT_KEY);
- let storageSource = 'sessionStorage';
-
- // Fall back to localStorage if not found in sessionStorage
- if (!existingResult) {
- existingResult = safeLocalStorage?.getItem(RESULT_KEY) ?? null;
- storageSource = 'localStorage';
- }
-
- if (existingResult) {
- processExistingResult(existingResult, storageSource);
- }
- } catch (error) {
- logger.warn('Failed to check for existing result', { error: String(error) });
- }
-}
-
-/**
- * Process an existing OAuth result from storage
- */
-function processExistingResult(existingResult: string, storageSource: string): void {
- logger.info(`Found existing result in ${storageSource} on init`);
- const result = JSON.parse(existingResult) as OAuthHandshakeEvent;
-
- // Only process if we haven't already processed this state
- if (!processedStates.has(result.state)) {
- logger.info('Processing existing result', {
- state: result.state.substring(0, 8) + '...',
- status: result.status,
- source: storageSource,
- });
- processedStates.add(result.state);
-
- clearStoredResults();
-
- // Delay slightly to allow subscribers to register first
- setTimeout(() => {
- notifyListeners(result);
- }, OAUTH_CONFIG.LISTENER_REGISTRATION_DELAY_MS);
- }
-}
-
-/**
- * Clear OAuth results from both storage locations
- */
-function clearStoredResults(): void {
- try {
- storage?.removeItem(RESULT_KEY);
- storage?.removeItem(STORAGE_KEY);
- safeLocalStorage?.removeItem(RESULT_KEY);
- safeLocalStorage?.removeItem(STORAGE_KEY);
- logger.info('Cleared result from both storage locations');
- } catch (e) {
- logger.warn('Failed to clear processed result from storage', { error: String(e) });
- }
-}
diff --git a/lib/sync/oauth-handshake/state.ts b/lib/sync/oauth-handshake/state.ts
deleted file mode 100644
index e2dfa1be..00000000
--- a/lib/sync/oauth-handshake/state.ts
+++ /dev/null
@@ -1,90 +0,0 @@
-/**
- * OAuth handshake state management
- * Manages listeners, processed states, and pending fetches
- */
-
-import type { OAuthHandshakeEvent } from './types';
-import { createLogger } from '@/lib/logger';
-
-const logger = createLogger('OAUTH');
-
-/** Storage key for OAuth handshake state */
-export const STORAGE_KEY = 'oauth_handshake_state';
-
-/** Storage key for OAuth result */
-export const RESULT_KEY = 'oauth_handshake_result';
-
-/** BroadcastChannel name */
-export const CHANNEL_NAME = 'oauth-handshake';
-
-/** Registered event listeners */
-export const listeners = new Set<(event: OAuthHandshakeEvent) => void>();
-
-/** States that have already been processed (prevents duplicates) */
-export const processedStates = new Set();
-
-/** Pending fetch promises by state */
-export const pendingFetches = new Map>();
-
-/** Whether the module has been initialized */
-let _isInitialized = false;
-
-/** BroadcastChannel instance */
-let _broadcastChannel: BroadcastChannel | null = null;
-
-/** Storage instance (sessionStorage for security) */
-export const storage = typeof window !== 'undefined' ? sessionStorage : null;
-
-/**
- * Safe localStorage reference
- * In Safari private browsing or restricted iframes, accessing localStorage can throw
- */
-export const safeLocalStorage: Storage | null = (() => {
- if (typeof window === 'undefined') return null;
- try {
- return window.localStorage;
- } catch {
- return null;
- }
-})();
-
-/**
- * Get initialization state
- */
-export function isInitialized(): boolean {
- return _isInitialized;
-}
-
-/**
- * Mark as initialized
- */
-export function setInitialized(): void {
- _isInitialized = true;
-}
-
-/**
- * Get BroadcastChannel instance
- */
-export function getBroadcastChannel(): BroadcastChannel | null {
- return _broadcastChannel;
-}
-
-/**
- * Set BroadcastChannel instance
- */
-export function setBroadcastChannel(channel: BroadcastChannel | null): void {
- _broadcastChannel = channel;
-}
-
-/**
- * Notify all registered listeners of an OAuth event
- */
-export function notifyListeners(event: OAuthHandshakeEvent): void {
- for (const listener of listeners) {
- try {
- listener(event);
- } catch (error) {
- logger.error('Listener threw an error', error instanceof Error ? error : new Error(String(error)));
- }
- }
-}
diff --git a/lib/sync/oauth-handshake/subscriber.ts b/lib/sync/oauth-handshake/subscriber.ts
deleted file mode 100644
index 8b728d30..00000000
--- a/lib/sync/oauth-handshake/subscriber.ts
+++ /dev/null
@@ -1,60 +0,0 @@
-/**
- * OAuth handshake event subscription
- */
-
-import { toast } from 'sonner';
-import type { OAuthHandshakeEvent } from './types';
-import { listeners, processedStates } from './state';
-import { ensureInitialized } from './initializer';
-import { initiateHandshakeFetch } from './fetcher';
-import { createLogger } from '@/lib/logger';
-
-const logger = createLogger('OAUTH');
-
-/**
- * Subscribe to OAuth handshake events
- * @param listener Callback function to receive OAuth events
- * @returns Unsubscribe function
- */
-export function subscribeToOAuthHandshake(
- listener: (event: OAuthHandshakeEvent) => void
-): () => void {
- if (typeof window === 'undefined') {
- return () => {};
- }
-
- ensureInitialized();
- listeners.add(listener);
-
- return () => {
- listeners.delete(listener);
- };
-}
-
-/**
- * Retry OAuth handshake for a given state
- * Useful for manual retries or diagnostics
- */
-export async function retryOAuthHandshake(state: string): Promise {
- if (processedStates.has(state)) {
- processedStates.delete(state);
- }
- initiateHandshakeFetch(state);
-}
-
-/**
- * Default listener that shows toast errors when no other listeners are registered
- * Ensures user gets feedback even if app forgot to subscribe
- */
-subscribeToOAuthHandshake((event) => {
- logger.info('Event delivered to default listener', {
- status: event.status,
- state: event.state.substring(0, 8) + '...',
- });
- // Only show toast if this is the only listener (the default one)
- if (listeners.size <= 1) {
- if (event.status === 'error') {
- toast.error(event.error);
- }
- }
-});
diff --git a/lib/sync/oauth-handshake/types.ts b/lib/sync/oauth-handshake/types.ts
deleted file mode 100644
index 19b7e931..00000000
--- a/lib/sync/oauth-handshake/types.ts
+++ /dev/null
@@ -1,37 +0,0 @@
-/**
- * OAuth handshake type definitions
- */
-
-export interface OAuthAuthData {
- userId: string;
- deviceId: string;
- email: string;
- token: string;
- expiresAt: number;
- requiresEncryptionSetup?: boolean;
- encryptionSalt?: string;
- provider: string;
-}
-
-export interface OAuthHandshakeSuccess {
- status: 'success';
- state: string;
- authData: OAuthAuthData;
-}
-
-export interface OAuthHandshakeError {
- status: 'error';
- state: string;
- error: string;
-}
-
-export type OAuthHandshakeEvent = OAuthHandshakeSuccess | OAuthHandshakeError;
-
-export interface BroadcastPayload {
- type: 'oauth_handshake';
- state: string;
- success: boolean;
- error?: string | null;
- timestamp: number;
- result?: OAuthHandshakeEvent;
-}
diff --git a/lib/sync/queue-optimizer.ts b/lib/sync/queue-optimizer.ts
index f6edb61f..22ee42ba 100644
--- a/lib/sync/queue-optimizer.ts
+++ b/lib/sync/queue-optimizer.ts
@@ -4,135 +4,119 @@
*/
import { getDb } from '@/lib/db';
-import { mergeVectorClocks } from './vector-clock';
import { createLogger } from '@/lib/logger';
const logger = createLogger('SYNC_QUEUE');
export class QueueOptimizer {
/**
- * Consolidate operations for a specific task
- * Merges multiple updates into single operation with latest payload
+ * Consolidate operations for a specific task.
+ * Merges multiple operations into a single operation with the latest payload.
*/
async consolidateTask(taskId: string): Promise {
const db = getDb();
-
- // Get all operations for this task, ordered by timestamp
+
const operations = await db.syncQueue
.where('taskId')
.equals(taskId)
.sortBy('timestamp');
-
+
if (operations.length <= 1) {
- // Nothing to consolidate
return;
}
-
- logger.debug('Consolidating operations for task', { taskId, operationCount: operations.length });
- // Check if there's a delete operation (it supersedes everything)
+ logger.debug('Consolidating operations for task', {
+ taskId,
+ operationCount: operations.length,
+ });
+
+ // Delete supersedes everything
const deleteOp = operations.find(op => op.operation === 'delete');
if (deleteOp) {
- // Delete supersedes all previous operations
- // Remove all operations except the delete
const idsToRemove = operations
.filter(op => op.id !== deleteOp.id)
.map(op => op.id);
if (idsToRemove.length > 0) {
- logger.debug('Delete operation found, removing superseded operations', { taskId, removedCount: idsToRemove.length });
+ logger.debug('Delete operation found, removing superseded operations', {
+ taskId,
+ removedCount: idsToRemove.length,
+ });
await db.syncQueue.bulkDelete(idsToRemove);
-
- // Update delete operation to track what was consolidated
+
await db.syncQueue.update(deleteOp.id, {
consolidatedFrom: [...(deleteOp.consolidatedFrom || []), ...idsToRemove],
});
}
-
+
return;
}
-
- // No delete operation - consolidate creates and updates
+
+ // No delete — consolidate creates and updates
const createOp = operations.find(op => op.operation === 'create');
const updateOps = operations.filter(op => op.operation === 'update');
-
+
if (createOp && updateOps.length > 0) {
- // Consolidate create + updates into single create with final state
- logger.debug('Consolidating create + updates into single create', { taskId, updateCount: updateOps.length });
-
- // Use the latest update's payload (most recent state)
- // updateOps are already sorted by timestamp from the query
+ // Collapse create + updates into a single create with the final state
+ logger.debug('Consolidating create + updates into single create', {
+ taskId,
+ updateCount: updateOps.length,
+ });
+
const latestUpdate = updateOps[updateOps.length - 1];
-
- // Merge all vector clocks
- let mergedClock = createOp.vectorClock;
- for (const update of updateOps) {
- mergedClock = mergeVectorClocks(mergedClock, update.vectorClock);
- }
-
- // Track all consolidated operation IDs
const consolidatedIds = updateOps.map(op => op.id);
-
- // Update the create operation with latest payload and merged clock
+
await db.syncQueue.update(createOp.id, {
payload: latestUpdate.payload,
- vectorClock: mergedClock,
- timestamp: latestUpdate.timestamp, // Use latest timestamp
+ timestamp: latestUpdate.timestamp,
consolidatedFrom: [...(createOp.consolidatedFrom || []), ...consolidatedIds],
});
-
- // Remove the update operations
+
await db.syncQueue.bulkDelete(consolidatedIds);
-
} else if (updateOps.length > 1) {
- // Multiple updates - consolidate into single update with latest payload
- logger.debug('Consolidating multiple updates into single update', { taskId, updateCount: updateOps.length });
-
- // Keep the first update, merge others into it
+ // Multiple updates — keep the first, merge others into it
+ logger.debug('Consolidating multiple updates into single update', {
+ taskId,
+ updateCount: updateOps.length,
+ });
+
const firstUpdate = updateOps[0];
const laterUpdates = updateOps.slice(1);
const latestUpdate = updateOps[updateOps.length - 1];
-
- // Track all consolidated operation IDs
const consolidatedIds = laterUpdates.map(op => op.id);
-
- // Update the first operation with latest payload and vector clock
+
await db.syncQueue.update(firstUpdate.id, {
payload: latestUpdate.payload,
- vectorClock: latestUpdate.vectorClock, // Use latest vector clock
- timestamp: latestUpdate.timestamp, // Use latest timestamp
+ timestamp: latestUpdate.timestamp,
consolidatedFrom: [...(firstUpdate.consolidatedFrom || []), ...consolidatedIds],
});
-
- // Remove the later update operations
+
await db.syncQueue.bulkDelete(consolidatedIds);
}
}
-
+
/**
- * Consolidate all pending operations in the queue
- * Returns the number of operations removed
+ * Consolidate all pending operations in the queue.
+ * @returns Number of operations removed
*/
async consolidateAll(): Promise {
const db = getDb();
-
- // Get count before consolidation
+
const countBefore = await db.syncQueue.count();
-
+
if (countBefore === 0) {
return 0;
}
-
+
logger.debug('Starting queue consolidation', { operationCount: countBefore });
- // Get all unique task IDs in the queue
const allOperations = await db.syncQueue.toArray();
const taskIds = [...new Set(allOperations.map(op => op.taskId))];
logger.debug('Found unique tasks in queue', { taskCount: taskIds.length });
- // Log operations per task to detect duplicates
+ // Log tasks with multiple operations (potential consolidation targets)
const taskCounts = new Map();
for (const op of allOperations) {
taskCounts.set(op.taskId, (taskCounts.get(op.taskId) || 0) + 1);
@@ -142,68 +126,67 @@ export class QueueOptimizer {
if (duplicateTasks.length > 0) {
logger.debug('Tasks with multiple operations found', {
duplicateTaskCount: duplicateTasks.length,
- details: duplicateTasks.map(([taskId, count]) => ({ taskId, count }))
+ details: duplicateTasks.map(([taskId, count]) => ({ taskId, count })),
});
}
-
- // Consolidate operations for each task
+
for (const taskId of taskIds) {
await this.consolidateTask(taskId);
}
-
- // Get count after consolidation
+
const countAfter = await db.syncQueue.count();
const removed = countBefore - countAfter;
logger.info('Queue consolidation complete', { removed, countBefore, countAfter });
- // Verify no duplicate taskIds remain
+ // Verify no duplicate taskIds remain after consolidation
const afterOps = await db.syncQueue.toArray();
const afterTaskIds = afterOps.map(op => op.taskId);
- const afterDuplicates = afterTaskIds.filter((id, index) => afterTaskIds.indexOf(id) !== index);
+ const afterDuplicates = afterTaskIds.filter(
+ (id, index) => afterTaskIds.indexOf(id) !== index
+ );
if (afterDuplicates.length > 0) {
logger.error('Duplicate taskIds still exist after consolidation', undefined, {
- duplicates: [...new Set(afterDuplicates)]
+ duplicates: [...new Set(afterDuplicates)],
});
}
-
+
return removed;
}
-
+
/**
- * Remove operations superseded by a delete operation
- * This is called when a task is deleted to clean up any pending operations
+ * Remove operations superseded by a delete operation.
+ * Called when a task is deleted to clean up any pending operations.
*/
async pruneDeleted(taskId: string): Promise {
const db = getDb();
-
- // Get all operations for this task
+
const operations = await db.syncQueue
.where('taskId')
.equals(taskId)
.toArray();
-
+
if (operations.length === 0) {
return;
}
-
- // Check if there's a delete operation
+
const deleteOp = operations.find(op => op.operation === 'delete');
-
+
if (!deleteOp) {
return;
}
-
- // Remove all operations except the delete
+
const idsToRemove = operations
.filter(op => op.id !== deleteOp.id)
.map(op => op.id);
-
+
if (idsToRemove.length > 0) {
- logger.debug('Pruning operations superseded by delete', { taskId, prunedCount: idsToRemove.length });
+ logger.debug('Pruning operations superseded by delete', {
+ taskId,
+ prunedCount: idsToRemove.length,
+ });
await db.syncQueue.bulkDelete(idsToRemove);
-
- // Update delete operation to track what was pruned
+
await db.syncQueue.update(deleteOp.id, {
consolidatedFrom: [...(deleteOp.consolidatedFrom || []), ...idsToRemove],
});
diff --git a/lib/sync/queue.ts b/lib/sync/queue.ts
index a983043e..052b286f 100644
--- a/lib/sync/queue.ts
+++ b/lib/sync/queue.ts
@@ -5,7 +5,7 @@
import { getDb } from '@/lib/db';
import type { TaskRecord } from '@/lib/types';
-import type { SyncQueueItem, VectorClock } from './types';
+import type { SyncQueueItem } from './types';
import { generateId } from '@/lib/id-generator';
export class SyncQueue {
@@ -16,7 +16,6 @@ export class SyncQueue {
operation: 'create' | 'update' | 'delete',
taskId: string,
payload: TaskRecord | null,
- vectorClock: VectorClock
): Promise {
const db = getDb();
@@ -27,7 +26,6 @@ export class SyncQueue {
timestamp: Date.now(),
retryCount: 0,
payload,
- vectorClock,
};
await db.syncQueue.add(item);
@@ -115,7 +113,7 @@ export class SyncQueue {
// Check if this task is already in the queue
const existing = await this.getForTask(task.id);
if (existing.length === 0) {
- await this.enqueue('create', task.id, task, task.vectorClock || {});
+ await this.enqueue('create', task.id, task);
count++;
}
}
diff --git a/lib/sync/realtime-listener.ts b/lib/sync/realtime-listener.ts
new file mode 100644
index 00000000..83584f43
--- /dev/null
+++ b/lib/sync/realtime-listener.ts
@@ -0,0 +1,150 @@
+/**
+ * Supabase Realtime listener for instant cross-device sync
+ * Subscribes to encrypted_tasks changes and merges into local IndexedDB.
+ */
+
+"use client";
+
+import { getSupabaseClient } from '@/lib/supabase';
+import { getCryptoManager } from '@/lib/sync/crypto';
+import { getDb } from '@/lib/db';
+import { taskRecordSchema } from '@/lib/schema';
+import { createLogger } from '@/lib/logger';
+import type { RealtimeChannel } from '@supabase/supabase-js';
+import type { EncryptedTaskRow } from './types';
+
+const logger = createLogger('SYNC_REALTIME');
+
+export type RealtimeConnectionState = 'connecting' | 'connected' | 'disconnected' | 'reconnecting';
+
+type ConnectionStateListener = (state: RealtimeConnectionState) => void;
+
+let channel: RealtimeChannel | null = null;
+let currentUserId: string | null = null;
+let currentDeviceId: string | null = null;
+let connectionState: RealtimeConnectionState = 'disconnected';
+const stateListeners = new Set();
+
+function setConnectionState(state: RealtimeConnectionState): void {
+ connectionState = state;
+ for (const listener of stateListeners) {
+ listener(state);
+ }
+}
+
+/** Subscribe to connection state changes */
+export function onConnectionStateChange(listener: ConnectionStateListener): () => void {
+ stateListeners.add(listener);
+ return () => stateListeners.delete(listener);
+}
+
+/** Get current connection state */
+export function getConnectionState(): RealtimeConnectionState {
+ return connectionState;
+}
+
+/** Start listening for real-time task changes */
+export function startRealtimeListener(userId: string, deviceId: string): void {
+ // Already listening for this user
+ if (channel && currentUserId === userId) return;
+
+ // Clean up existing subscription
+ stopRealtimeListener();
+
+ currentUserId = userId;
+ currentDeviceId = deviceId;
+ setConnectionState('connecting');
+
+ const supabase = getSupabaseClient();
+
+ channel = supabase
+ .channel(`sync:${userId}`)
+ .on(
+ 'postgres_changes',
+ {
+ event: 'INSERT',
+ schema: 'public',
+ table: 'encrypted_tasks',
+ filter: `user_id=eq.${userId}`,
+ },
+ (payload) => handleTaskChange(payload.new as EncryptedTaskRow)
+ )
+ .on(
+ 'postgres_changes',
+ {
+ event: 'UPDATE',
+ schema: 'public',
+ table: 'encrypted_tasks',
+ filter: `user_id=eq.${userId}`,
+ },
+ (payload) => handleTaskChange(payload.new as EncryptedTaskRow)
+ )
+ .subscribe((status) => {
+ if (status === 'SUBSCRIBED') {
+ setConnectionState('connected');
+ logger.info('Realtime subscription active');
+ } else if (status === 'CLOSED') {
+ setConnectionState('disconnected');
+ logger.info('Realtime subscription closed');
+ } else if (status === 'CHANNEL_ERROR') {
+ setConnectionState('reconnecting');
+ logger.warn('Realtime channel error, will attempt reconnect');
+ }
+ });
+}
+
+/** Stop listening for real-time changes */
+export function stopRealtimeListener(): void {
+ if (channel) {
+ const supabase = getSupabaseClient();
+ supabase.removeChannel(channel);
+ channel = null;
+ }
+ currentUserId = null;
+ currentDeviceId = null;
+ setConnectionState('disconnected');
+}
+
+/** Handle an incoming task change from Realtime */
+async function handleTaskChange(row: EncryptedTaskRow): Promise {
+ try {
+ // Skip changes made by this device to avoid infinite loops
+ if (row.last_modified_device === currentDeviceId) return;
+
+ // Handle soft-deletes
+ if (row.deleted_at) {
+ const db = getDb();
+ await db.tasks.delete(row.id);
+ logger.info(`Realtime: deleted task ${row.id}`);
+ return;
+ }
+
+ // Decrypt the task
+ const crypto = getCryptoManager();
+ if (!crypto.isInitialized()) {
+ logger.warn('Realtime: crypto not initialized, skipping change');
+ return;
+ }
+
+ const decryptedJson = await crypto.decrypt(row.encrypted_blob, row.nonce);
+ const task = taskRecordSchema.parse(JSON.parse(decryptedJson));
+
+ // Apply LWW — only update if remote is newer
+ const db = getDb();
+ const existing = await db.tasks.get(row.id);
+
+ if (existing) {
+ const remoteTime = new Date(row.updated_at).getTime();
+ const localTime = new Date(existing.updatedAt).getTime();
+ if (remoteTime < localTime) {
+ logger.info(`Realtime: skipping older remote version of task ${row.id}`);
+ return;
+ }
+ }
+
+ await db.tasks.put(task);
+ logger.info(`Realtime: merged task ${row.id}`);
+ } catch (err) {
+ logger.error(`Realtime: failed to process change for task ${row.id}`, err as Error);
+ }
+}
diff --git a/lib/sync/supabase-sync-client.ts b/lib/sync/supabase-sync-client.ts
new file mode 100644
index 00000000..c5a6d231
--- /dev/null
+++ b/lib/sync/supabase-sync-client.ts
@@ -0,0 +1,221 @@
+/**
+ * Supabase sync client
+ * Wraps Supabase SDK for encrypted task sync operations.
+ * All task data is encrypted client-side before transmission.
+ */
+
+import { getSupabaseClient } from '@/lib/supabase';
+import type { EncryptedTaskRow, DeviceRow } from './types';
+
+/** Push an encrypted task (upsert with optimistic version locking) */
+export async function pushEncryptedTask(task: {
+ id: string;
+ userId: string;
+ encryptedBlob: string;
+ nonce: string;
+ checksum: string;
+ deviceId: string;
+ expectedVersion?: number;
+}): Promise<{ success: boolean; newVersion: number; conflict: boolean }> {
+ const supabase = getSupabaseClient();
+
+ // Check current version for optimistic locking
+ const { data: existing } = await supabase
+ .from('encrypted_tasks')
+ .select('version')
+ .eq('id', task.id)
+ .eq('user_id', task.userId)
+ .maybeSingle();
+
+ const currentVersion = existing?.version ?? 0;
+
+ // If caller expected a specific version and it doesn't match, conflict
+ if (task.expectedVersion !== undefined && existing && currentVersion !== task.expectedVersion) {
+ return { success: false, newVersion: currentVersion, conflict: true };
+ }
+
+ const newVersion = currentVersion + 1;
+
+ const { error } = await supabase
+ .from('encrypted_tasks')
+ .upsert({
+ id: task.id,
+ user_id: task.userId,
+ encrypted_blob: task.encryptedBlob,
+ nonce: task.nonce,
+ version: newVersion,
+ checksum: task.checksum,
+ last_modified_device: task.deviceId,
+ deleted_at: null, // Clear any soft-delete on re-push
+ }, {
+ onConflict: 'id,user_id',
+ });
+
+ if (error) throw new Error(`Failed to push task ${task.id}: ${error.message}`);
+
+ return { success: true, newVersion, conflict: false };
+}
+
+/** Soft-delete a task */
+export async function softDeleteTask(
+ taskId: string,
+ userId: string,
+ deviceId: string
+): Promise {
+ const supabase = getSupabaseClient();
+
+ const { error } = await supabase
+ .from('encrypted_tasks')
+ .update({
+ deleted_at: new Date().toISOString(),
+ last_modified_device: deviceId,
+ })
+ .eq('id', taskId)
+ .eq('user_id', userId);
+
+ if (error) throw new Error(`Failed to delete task ${taskId}: ${error.message}`);
+}
+
+/** Pull tasks updated since a given timestamp */
+export async function pullTasksSince(
+ userId: string,
+ sinceTimestamp: string | null
+): Promise {
+ const supabase = getSupabaseClient();
+
+ let query = supabase
+ .from('encrypted_tasks')
+ .select('*')
+ .eq('user_id', userId)
+ .is('deleted_at', null)
+ .order('updated_at', { ascending: true });
+
+ if (sinceTimestamp) {
+ query = query.gt('updated_at', sinceTimestamp);
+ }
+
+ const { data, error } = await query;
+
+ if (error) throw new Error(`Failed to pull tasks: ${error.message}`);
+ return data ?? [];
+}
+
+/** Pull IDs of tasks soft-deleted since a given timestamp */
+export async function pullDeletedTaskIds(
+ userId: string,
+ sinceTimestamp: string | null
+): Promise {
+ const supabase = getSupabaseClient();
+
+ let query = supabase
+ .from('encrypted_tasks')
+ .select('id')
+ .eq('user_id', userId)
+ .not('deleted_at', 'is', null);
+
+ if (sinceTimestamp) {
+ query = query.gt('updated_at', sinceTimestamp);
+ }
+
+ const { data, error } = await query;
+
+ if (error) throw new Error(`Failed to pull deleted task IDs: ${error.message}`);
+ return (data ?? []).map(row => row.id);
+}
+
+/** Get or update the user's encryption salt from their profile */
+export async function getEncryptionSalt(userId: string): Promise {
+ const supabase = getSupabaseClient();
+
+ const { data, error } = await supabase
+ .from('profiles')
+ .select('encryption_salt')
+ .eq('id', userId)
+ .single();
+
+ if (error) throw new Error(`Failed to get encryption salt: ${error.message}`);
+ return data?.encryption_salt ?? null;
+}
+
+/** Store the user's encryption salt in their profile */
+export async function setEncryptionSalt(userId: string, salt: string): Promise {
+ const supabase = getSupabaseClient();
+
+ const { error } = await supabase
+ .from('profiles')
+ .update({ encryption_salt: salt })
+ .eq('id', userId);
+
+ if (error) throw new Error(`Failed to set encryption salt: ${error.message}`);
+}
+
+/** Register or update a device */
+export async function upsertDevice(
+ deviceId: string,
+ userId: string,
+ deviceName: string
+): Promise {
+ const supabase = getSupabaseClient();
+
+ const { error } = await supabase
+ .from('devices')
+ .upsert({
+ id: deviceId,
+ user_id: userId,
+ device_name: deviceName,
+ last_seen_at: new Date().toISOString(),
+ }, {
+ onConflict: 'id,user_id',
+ });
+
+ if (error) throw new Error(`Failed to upsert device: ${error.message}`);
+}
+
+/** List all devices for a user */
+export async function listDevices(userId: string): Promise {
+ const supabase = getSupabaseClient();
+
+ const { data, error } = await supabase
+ .from('devices')
+ .select('*')
+ .eq('user_id', userId)
+ .order('last_seen_at', { ascending: false });
+
+ if (error) throw new Error(`Failed to list devices: ${error.message}`);
+ return data ?? [];
+}
+
+/** Remove a device */
+export async function removeDevice(deviceId: string, userId: string): Promise {
+ const supabase = getSupabaseClient();
+
+ const { error } = await supabase
+ .from('devices')
+ .delete()
+ .eq('id', deviceId)
+ .eq('user_id', userId);
+
+ if (error) throw new Error(`Failed to remove device: ${error.message}`);
+}
+
+/** Update sync metadata for the current device */
+export async function updateSyncMetadataRemote(
+ userId: string,
+ deviceId: string,
+ status: 'idle' | 'syncing' | 'error'
+): Promise {
+ const supabase = getSupabaseClient();
+
+ const { error } = await supabase
+ .from('sync_metadata')
+ .upsert({
+ user_id: userId,
+ device_id: deviceId,
+ last_sync_at: new Date().toISOString(),
+ sync_status: status,
+ }, {
+ onConflict: 'user_id,device_id',
+ });
+
+ if (error) throw new Error(`Failed to update sync metadata: ${error.message}`);
+}
diff --git a/lib/sync/token-manager.ts b/lib/sync/token-manager.ts
deleted file mode 100644
index 63408834..00000000
--- a/lib/sync/token-manager.ts
+++ /dev/null
@@ -1,175 +0,0 @@
-/**
- * Token Manager - handles authentication token lifecycle and automatic refresh
- * Prevents sync failures due to token expiration
- */
-
-import { getDb } from '@/lib/db';
-import { getApiClient } from './api-client';
-import { createLogger } from '@/lib/logger';
-import type { SyncConfig } from './types';
-import { normalizeTokenExpiration } from './utils';
-import { TOKEN_CONFIG } from '@/lib/constants/sync';
-
-const logger = createLogger('SYNC_TOKEN');
-
-export class TokenManager {
- /**
- * Check if token needs refresh (within 5 minutes of expiry)
- */
- async needsRefresh(): Promise {
- const config = await this.getSyncConfig();
-
- if (!config || !config.enabled || !config.tokenExpiresAt) {
- return false;
- }
-
- const timeUntilExpiry = await this.getTimeUntilExpiry();
-
- // Need refresh if token expires within threshold or already expired
- return timeUntilExpiry <= TOKEN_CONFIG.REFRESH_THRESHOLD_MS;
- }
-
- /**
- * Ensure token is valid, refresh if needed
- * Returns true if token is valid or was successfully refreshed
- */
- async ensureValidToken(): Promise {
- const config = await this.getSyncConfig();
-
- if (!config || !config.enabled) {
- throw new Error('Sync not configured');
- }
-
- if (!config.token || !config.tokenExpiresAt) {
- throw new Error('No authentication token available');
- }
-
- // Check if refresh is needed
- const needsRefresh = await this.needsRefresh();
-
- if (!needsRefresh) {
- logger.debug('Token is valid, no refresh needed');
- return true;
- }
-
- logger.info('Token needs refresh, attempting refresh');
-
- // Attempt to refresh token
- try {
- const api = getApiClient(config.serverUrl);
- api.setToken(config.token);
-
- const response = await api.refreshToken();
-
- // Update stored token and expiration
- await this.updateTokenInConfig(response.token, response.expiresAt);
-
- logger.info('Token refreshed successfully', {
- expiresAt: new Date(response.expiresAt).toISOString(),
- });
-
- return true;
- } catch (error) {
- const refreshError = error instanceof Error ? error : new Error('Token refresh failed');
- logger.error('Token refresh failed', refreshError);
- return false;
- }
- }
-
- /**
- * Handle 401 Unauthorized errors with automatic token refresh and retry
- * Returns true if token was refreshed successfully
- */
- async handleUnauthorized(): Promise {
- logger.info('Handling 401 Unauthorized error');
-
- const config = await this.getSyncConfig();
-
- if (!config || !config.enabled || !config.token) {
- logger.error('Cannot refresh token: sync not configured');
- return false;
- }
-
- try {
- const api = getApiClient(config.serverUrl);
- api.setToken(config.token);
-
- const response = await api.refreshToken();
-
- // Update stored token and expiration
- await this.updateTokenInConfig(response.token, response.expiresAt);
-
- logger.info('Token refreshed after 401 error', {
- expiresAt: new Date(response.expiresAt).toISOString(),
- });
-
- return true;
- } catch (error) {
- const refreshError = error instanceof Error ? error : new Error('Token refresh failed after 401');
- logger.error('Token refresh failed after 401', refreshError);
- return false;
- }
- }
-
- /**
- * Get time until token expires (in milliseconds)
- * Returns negative value if already expired
- */
- async getTimeUntilExpiry(): Promise {
- const config = await this.getSyncConfig();
-
- if (!config || !config.tokenExpiresAt) {
- return -1;
- }
-
- return config.tokenExpiresAt - Date.now();
- }
-
- /**
- * Get sync configuration from IndexedDB
- */
- private async getSyncConfig(): Promise {
- const db = getDb();
- const config = await db.syncMetadata.get('sync_config');
- return config as SyncConfig | null;
- }
-
- /**
- * Update token and expiration in sync config
- */
- private async updateTokenInConfig(token: string, expiresAt: number): Promise {
- const db = getDb();
- const config = await this.getSyncConfig();
-
- if (!config) {
- throw new Error('Sync config not found');
- }
-
- // Normalize token expiration to milliseconds (handles both seconds and milliseconds)
- const tokenExpiresAt = normalizeTokenExpiration(expiresAt);
-
- await db.syncMetadata.put({
- ...config,
- token,
- tokenExpiresAt,
- key: 'sync_config',
- });
-
- // Update token in API client
- const api = getApiClient(config.serverUrl);
- api.setToken(token);
- }
-}
-
-// Singleton instance
-let tokenManagerInstance: TokenManager | null = null;
-
-/**
- * Get or create token manager instance
- */
-export function getTokenManager(): TokenManager {
- if (!tokenManagerInstance) {
- tokenManagerInstance = new TokenManager();
- }
- return tokenManagerInstance;
-}
diff --git a/lib/sync/types.ts b/lib/sync/types.ts
index c01ae20e..e174659b 100644
--- a/lib/sync/types.ts
+++ b/lib/sync/types.ts
@@ -1,14 +1,9 @@
/**
- * Sync-specific types
+ * Sync-specific types (Supabase backend)
*/
import type { TaskRecord } from '@/lib/types';
-// Vector Clock for causality tracking
-export interface VectorClock {
- [deviceId: string]: number;
-}
-
// Sync configuration stored in IndexedDB
export interface SyncConfig {
key: 'sync_config';
@@ -17,12 +12,8 @@ export interface SyncConfig {
deviceId: string;
deviceName: string;
email: string | null;
- token: string | null;
- tokenExpiresAt: number | null;
lastSyncAt: number | null;
- vectorClock: VectorClock;
conflictStrategy: 'last_write_wins' | 'manual';
- serverUrl: string;
provider?: string | null;
// Retry tracking fields
consecutiveFailures: number;
@@ -51,7 +42,6 @@ export interface SyncQueueItem {
timestamp: number;
retryCount: number;
payload: TaskRecord | null;
- vectorClock: VectorClock;
consolidatedFrom?: string[]; // IDs of operations merged into this one
lastAttemptAt?: number; // Timestamp of last sync attempt
}
@@ -72,14 +62,17 @@ export interface EncryptionConfig {
};
}
-// Encrypted task blob for transmission
-export interface EncryptedTaskBlob {
+// Encrypted task row as stored in Supabase
+export interface EncryptedTaskRow {
id: string;
- encryptedBlob: string;
+ user_id: string;
+ encrypted_blob: string;
nonce: string;
version: number;
- vectorClock: VectorClock;
- updatedAt: number;
+ deleted_at: string | null;
+ created_at: string;
+ updated_at: string;
+ last_modified_device: string | null;
checksum: string;
}
@@ -88,8 +81,8 @@ export interface ConflictInfo {
taskId: string;
local: TaskRecord;
remote: TaskRecord;
- localClock: VectorClock;
- remoteClock: VectorClock;
+ localUpdatedAt: number;
+ remoteUpdatedAt: number;
}
// Sync result
@@ -103,59 +96,18 @@ export interface SyncResult {
error?: string;
}
-// API request/response types
-
-export interface PushRequest {
- deviceId: string;
- operations: SyncOperation[];
- clientVectorClock: VectorClock;
-}
-
-export interface SyncOperation {
- type: 'create' | 'update' | 'delete';
- taskId: string;
- encryptedBlob?: string;
- nonce?: string;
- vectorClock: VectorClock;
- checksum?: string;
-}
-
-export interface PushResponse {
- accepted: string[];
- rejected: RejectedOperation[];
- conflicts: ConflictInfo[];
- serverVectorClock: VectorClock;
-}
-
-export interface RejectedOperation {
- taskId: string;
- reason: 'version_mismatch' | 'conflict' | 'validation_error' | 'quota_exceeded';
- details: string;
-}
-
-export interface PullRequest {
- deviceId: string;
- lastVectorClock: VectorClock;
- sinceTimestamp?: number;
- limit?: number;
- cursor?: string;
-}
-
-export interface PullResponse {
- tasks: EncryptedTaskBlob[];
- deletedTaskIds: string[];
- serverVectorClock: VectorClock;
- conflicts: ConflictInfo[];
- hasMore: boolean;
- nextCursor?: string;
-}
-
-export interface SyncStatusResponse {
+// Supabase sync status (replaces SyncStatusResponse)
+export interface SyncStatusInfo {
lastSyncAt: number | null;
pendingPushCount: number;
- pendingPullCount: number;
- conflictCount: number;
deviceCount: number;
- storageUsed: number;
- storageQuota: number;
+}
+
+// Device row from Supabase
+export interface DeviceRow {
+ id: string;
+ user_id: string;
+ device_name: string;
+ last_seen_at: string;
+ created_at: string;
}
diff --git a/lib/sync/vector-clock.ts b/lib/sync/vector-clock.ts
deleted file mode 100644
index 491d0fba..00000000
--- a/lib/sync/vector-clock.ts
+++ /dev/null
@@ -1,112 +0,0 @@
-/**
- * Vector clock implementation for conflict detection
- * Tracks causality between distributed operations
- *
- * WHY VECTOR CLOCKS?
- * In distributed systems without a central clock, we need to determine the order
- * of events across multiple devices. Vector clocks solve this by tracking a counter
- * for each device (node) in the system.
- *
- * HOW IT WORKS:
- * - Each device maintains a vector (map) of counters: { deviceA: 3, deviceB: 5 }
- * - When a device makes a change, it increments its own counter
- * - When comparing two clocks, we can determine:
- * 1. If one happened-before the other (all counters ≤ and at least one <)
- * 2. If they're concurrent/conflicting (some counters higher, some lower)
- * 3. If they're identical (all counters equal)
- *
- * EXAMPLE:
- * Device A: { A: 3, B: 1 } and Device B: { A: 2, B: 5 }
- * → Neither "happened-before" the other → CONCURRENT (conflict!)
- *
- * This allows us to detect true conflicts vs. safe sequential updates in
- * a multi-device environment without requiring synchronized clocks.
- */
-
-import type { VectorClock } from './types';
-
-/**
- * Compare two vector clocks to determine causality
- *
- * Determines the relationship between two events in a distributed system:
- * - 'a_before_b': All of A's counters ≤ B's counters (safe to use B)
- * - 'b_before_a': All of B's counters ≤ A's counters (safe to use A)
- * - 'concurrent': Mixed counters (CONFLICT - needs resolution)
- * - 'identical': Exact same clocks (same event)
- */
-export function compareVectorClocks(
- a: VectorClock,
- b: VectorClock
-): 'concurrent' | 'a_before_b' | 'b_before_a' | 'identical' {
- const allDevices = new Set([...Object.keys(a), ...Object.keys(b)]);
-
- let aGreater = false;
- let bGreater = false;
-
- for (const device of allDevices) {
- const aVal = a[device] || 0;
- const bVal = b[device] || 0;
-
- if (aVal > bVal) aGreater = true;
- if (bVal > aVal) bGreater = true;
- }
-
- if (!aGreater && !bGreater) return 'identical';
- if (aGreater && !bGreater) return 'a_before_b';
- if (bGreater && !aGreater) return 'b_before_a';
- return 'concurrent'; // Conflict!
-}
-
-/**
- * Merge two vector clocks (take maximum for each device)
- */
-export function mergeVectorClocks(a: VectorClock, b: VectorClock): VectorClock {
- const result: VectorClock = { ...a };
-
- for (const [device, timestamp] of Object.entries(b)) {
- result[device] = Math.max(result[device] || 0, timestamp);
- }
-
- return result;
-}
-
-/**
- * Increment vector clock for a device
- */
-export function incrementVectorClock(
- clock: VectorClock,
- deviceId: string
-): VectorClock {
- return {
- ...clock,
- [deviceId]: (clock[deviceId] || 0) + 1,
- };
-}
-
-/**
- * Check if clock A happened before clock B
- */
-export function happensBefore(a: VectorClock, b: VectorClock): boolean {
- return compareVectorClocks(a, b) === 'a_before_b';
-}
-
-/**
- * Check if two clocks are concurrent (conflict)
- */
-export function areConcurrent(a: VectorClock, b: VectorClock): boolean {
- return compareVectorClocks(a, b) === 'concurrent';
-}
-
-/**
- * Create initial vector clock for a device
- */
-export function createVectorClock(deviceId: string): VectorClock {
- return { [deviceId]: 1 };
-}
-
-/**
- * Clone a vector clock
- */
-export function cloneVectorClock(clock: VectorClock): VectorClock {
- return { ...clock };
-}
diff --git a/lib/tasks/crud/create.ts b/lib/tasks/crud/create.ts
index fa505def..591ac406 100644
--- a/lib/tasks/crud/create.ts
+++ b/lib/tasks/crud/create.ts
@@ -6,7 +6,6 @@ import { taskDraftSchema } from "@/lib/schema";
import type { TaskDraft, TaskRecord } from "@/lib/types";
import { isoNow } from "@/lib/utils";
import {
- createNewVectorClock,
enqueueSyncOperation,
getSyncContext,
} from "./helpers";
@@ -19,9 +18,9 @@ const logger = createLogger("TASK_CRUD");
export async function createTask(input: TaskDraft): Promise {
try {
const validated = taskDraftSchema.parse(input);
- const { syncConfig, deviceId } = await getSyncContext();
+ const { syncConfig } = await getSyncContext();
- const record = buildTaskRecord(validated, deviceId);
+ const record = buildTaskRecord(validated);
const db = getDb();
await db.tasks.add(record);
@@ -32,7 +31,6 @@ export async function createTask(input: TaskDraft): Promise {
"create",
record.id,
record,
- record.vectorClock || {},
syncConfig?.enabled ?? false
);
@@ -57,9 +55,8 @@ export async function createTask(input: TaskDraft): Promise {
/**
* Build a complete TaskRecord from validated draft data
*/
-function buildTaskRecord(validated: TaskDraft, deviceId: string): TaskRecord {
+function buildTaskRecord(validated: TaskDraft): TaskRecord {
const now = isoNow();
- const vectorClock = createNewVectorClock(deviceId);
return {
...validated,
@@ -74,6 +71,5 @@ function buildTaskRecord(validated: TaskDraft, deviceId: string): TaskRecord {
dependencies: validated.dependencies ?? [],
notificationEnabled: validated.notificationEnabled ?? true,
notificationSent: false,
- vectorClock,
};
}
diff --git a/lib/tasks/crud/delete.ts b/lib/tasks/crud/delete.ts
index b54c964c..3cd35e70 100644
--- a/lib/tasks/crud/delete.ts
+++ b/lib/tasks/crud/delete.ts
@@ -1,7 +1,7 @@
import { getDb } from "@/lib/db";
import { createLogger } from "@/lib/logger";
import { removeDependencyReferences } from "@/lib/tasks/dependencies";
-import { enqueueSyncOperation, getSyncContext, updateVectorClock } from "./helpers";
+import { enqueueSyncOperation, getSyncContext } from "./helpers";
const logger = createLogger("TASK_CRUD");
@@ -12,8 +12,6 @@ export async function deleteTask(id: string): Promise {
try {
const db = getDb();
- // Read task BEFORE deleting to preserve vector clock
- // This is critical for conflict detection on the server
const task = await db.tasks.get(id);
if (!task) {
@@ -22,7 +20,6 @@ export async function deleteTask(id: string): Promise {
return;
}
- const vectorClock = task.vectorClock || {};
const taskTitle = task.title;
await removeDependencyReferences(id);
@@ -31,11 +28,10 @@ export async function deleteTask(id: string): Promise {
logger.info("Task deleted", { taskId: id, title: taskTitle });
// Enqueue sync operation if sync is enabled
- const { syncConfig, deviceId } = await getSyncContext();
+ const { syncConfig } = await getSyncContext();
if (syncConfig?.enabled) {
- const deleteClock = updateVectorClock(vectorClock, deviceId);
- await enqueueSyncOperation("delete", id, null, deleteClock, true);
+ await enqueueSyncOperation("delete", id, null, true);
logger.debug("Task deletion queued for sync", { taskId: id });
}
} catch (error) {
diff --git a/lib/tasks/crud/duplicate.ts b/lib/tasks/crud/duplicate.ts
index b84ac81a..14a70fe8 100644
--- a/lib/tasks/crud/duplicate.ts
+++ b/lib/tasks/crud/duplicate.ts
@@ -4,7 +4,6 @@ import { createLogger } from "@/lib/logger";
import type { TaskRecord } from "@/lib/types";
import { isoNow } from "@/lib/utils";
import {
- createNewVectorClock,
enqueueSyncOperation,
getSyncContext,
} from "./helpers";
@@ -23,8 +22,8 @@ export async function duplicateTask(id: string): Promise {
throw new Error(`Task with id ${id} not found`);
}
- const { syncConfig, deviceId } = await getSyncContext();
- const duplicate = buildDuplicateRecord(original, deviceId);
+ const { syncConfig } = await getSyncContext();
+ const duplicate = buildDuplicateRecord(original);
await db.tasks.add(duplicate);
@@ -32,7 +31,6 @@ export async function duplicateTask(id: string): Promise {
"create",
duplicate.id,
duplicate,
- duplicate.vectorClock || {},
syncConfig?.enabled ?? true // Default to true for backward compatibility
);
@@ -51,9 +49,8 @@ export async function duplicateTask(id: string): Promise {
/**
* Build a duplicate task record with fresh metadata
*/
-function buildDuplicateRecord(original: TaskRecord, deviceId: string): TaskRecord {
+function buildDuplicateRecord(original: TaskRecord): TaskRecord {
const now = isoNow();
- const vectorClock = createNewVectorClock(deviceId);
return {
...original,
@@ -67,6 +64,5 @@ function buildDuplicateRecord(original: TaskRecord, deviceId: string): TaskRecor
lastNotificationAt: undefined,
snoozedUntil: undefined,
archivedAt: undefined,
- vectorClock,
};
}
diff --git a/lib/tasks/crud/helpers.ts b/lib/tasks/crud/helpers.ts
index 4a1a4159..e2b375cf 100644
--- a/lib/tasks/crud/helpers.ts
+++ b/lib/tasks/crud/helpers.ts
@@ -1,8 +1,6 @@
import { getBackgroundSyncManager } from "@/lib/sync/background-sync";
import { getSyncConfig } from "@/lib/sync/config";
import { getSyncQueue } from "@/lib/sync/queue";
-import { incrementVectorClock } from "@/lib/sync/vector-clock";
-import type { VectorClock } from "@/lib/sync/types";
/**
* Schedule debounced background sync after task change
@@ -35,29 +33,11 @@ export async function enqueueSyncOperation(
operation: "create" | "update" | "delete",
taskId: string,
data: import("@/lib/types").TaskRecord | null,
- vectorClock: VectorClock,
syncEnabled: boolean
): Promise {
if (!syncEnabled) return;
const queue = getSyncQueue();
- await queue.enqueue(operation, taskId, data, vectorClock);
+ await queue.enqueue(operation, taskId, data);
scheduleSyncAfterChange();
}
-
-/**
- * Create a new vector clock for a new task
- */
-export function createNewVectorClock(deviceId: string): VectorClock {
- return incrementVectorClock({}, deviceId);
-}
-
-/**
- * Increment an existing vector clock
- */
-export function updateVectorClock(
- currentClock: VectorClock,
- deviceId: string
-): VectorClock {
- return incrementVectorClock(currentClock, deviceId);
-}
diff --git a/lib/tasks/crud/move.ts b/lib/tasks/crud/move.ts
index ffc2eeb6..71010885 100644
--- a/lib/tasks/crud/move.ts
+++ b/lib/tasks/crud/move.ts
@@ -3,7 +3,7 @@ import { createLogger } from "@/lib/logger";
import { parseQuadrantFlags } from "@/lib/quadrants";
import type { QuadrantId, TaskRecord } from "@/lib/types";
import { isoNow } from "@/lib/utils";
-import { enqueueSyncOperation, getSyncContext, updateVectorClock } from "./helpers";
+import { enqueueSyncOperation, getSyncContext } from "./helpers";
const logger = createLogger("TASK_CRUD");
@@ -25,8 +25,7 @@ export async function moveTaskToQuadrant(
}
const { urgent, important } = parseQuadrantFlags(targetQuadrant);
- const { syncConfig, deviceId } = await getSyncContext();
- const newClock = updateVectorClock(existing.vectorClock || {}, deviceId);
+ const { syncConfig } = await getSyncContext();
const nextRecord: TaskRecord = {
...existing,
@@ -34,7 +33,6 @@ export async function moveTaskToQuadrant(
important,
quadrant: targetQuadrant,
updatedAt: isoNow(),
- vectorClock: newClock,
};
await db.tasks.put(nextRecord);
@@ -50,7 +48,6 @@ export async function moveTaskToQuadrant(
"update",
id,
nextRecord,
- nextRecord.vectorClock || {},
syncConfig?.enabled ?? false
);
diff --git a/lib/tasks/crud/snooze.ts b/lib/tasks/crud/snooze.ts
index 9a3e7a71..ab53a7ca 100644
--- a/lib/tasks/crud/snooze.ts
+++ b/lib/tasks/crud/snooze.ts
@@ -2,7 +2,7 @@ import { getDb } from "@/lib/db";
import { createLogger } from "@/lib/logger";
import type { TaskRecord } from "@/lib/types";
import { isoNow } from "@/lib/utils";
-import { enqueueSyncOperation, getSyncContext, updateVectorClock } from "./helpers";
+import { enqueueSyncOperation, getSyncContext } from "./helpers";
import { TIME_TRACKING } from "@/lib/constants";
const logger = createLogger("TASK_CRUD");
@@ -34,8 +34,7 @@ export async function snoozeTask(
throw new Error(`Task ${id} not found`);
}
- const { syncConfig, deviceId } = await getSyncContext();
- const newClock = updateVectorClock(existing.vectorClock || {}, deviceId);
+ const { syncConfig } = await getSyncContext();
// Calculate snooze end time (or clear if minutes is 0)
const snoozedUntil = minutes > 0
@@ -46,7 +45,6 @@ export async function snoozeTask(
...existing,
snoozedUntil,
updatedAt: isoNow(),
- vectorClock: newClock,
};
await db.tasks.put(nextRecord);
@@ -61,7 +59,6 @@ export async function snoozeTask(
"update",
id,
nextRecord,
- nextRecord.vectorClock || {},
syncConfig?.enabled ?? false
);
diff --git a/lib/tasks/crud/time-tracking.ts b/lib/tasks/crud/time-tracking.ts
index e8817836..313df456 100644
--- a/lib/tasks/crud/time-tracking.ts
+++ b/lib/tasks/crud/time-tracking.ts
@@ -3,7 +3,7 @@ import { getDb } from "@/lib/db";
import { createLogger } from "@/lib/logger";
import type { TaskRecord, TimeEntry } from "@/lib/types";
import { isoNow } from "@/lib/utils";
-import { enqueueSyncOperation, getSyncContext, updateVectorClock } from "./helpers";
+import { enqueueSyncOperation, getSyncContext } from "./helpers";
import { TIME_TRACKING } from "@/lib/constants";
const logger = createLogger("TIME_TRACKING");
@@ -39,8 +39,7 @@ export async function startTimeTracking(taskId: string): Promise {
throw new Error("Task already has a running timer");
}
- const { syncConfig, deviceId } = await getSyncContext();
- const newClock = updateVectorClock(existing.vectorClock || {}, deviceId);
+ const { syncConfig } = await getSyncContext();
const newEntry: TimeEntry = {
id: nanoid(8),
@@ -53,7 +52,6 @@ export async function startTimeTracking(taskId: string): Promise {
...existing,
timeEntries: updatedEntries,
updatedAt: isoNow(),
- vectorClock: newClock,
};
await db.tasks.put(nextRecord);
@@ -64,7 +62,6 @@ export async function startTimeTracking(taskId: string): Promise {
"update",
taskId,
nextRecord,
- nextRecord.vectorClock || {},
syncConfig?.enabled ?? false
);
@@ -91,8 +88,7 @@ export async function stopTimeTracking(
throw new Error("No running timer found for this task");
}
- const { syncConfig, deviceId } = await getSyncContext();
- const newClock = updateVectorClock(existing.vectorClock || {}, deviceId);
+ const { syncConfig } = await getSyncContext();
const updatedEntries = [...(existing.timeEntries || [])];
updatedEntries[runningEntryIndex] = {
@@ -108,7 +104,6 @@ export async function stopTimeTracking(
timeEntries: updatedEntries,
timeSpent,
updatedAt: isoNow(),
- vectorClock: newClock,
};
await db.tasks.put(nextRecord);
@@ -123,7 +118,6 @@ export async function stopTimeTracking(
"update",
taskId,
nextRecord,
- nextRecord.vectorClock || {},
syncConfig?.enabled ?? false
);
@@ -152,8 +146,7 @@ export async function deleteTimeEntry(
throw new Error(`Time entry ${entryId} not found`);
}
- const { syncConfig, deviceId } = await getSyncContext();
- const newClock = updateVectorClock(existing.vectorClock || {}, deviceId);
+ const { syncConfig } = await getSyncContext();
const timeSpent = calculateTimeSpent(updatedEntries);
@@ -162,7 +155,6 @@ export async function deleteTimeEntry(
timeEntries: updatedEntries,
timeSpent,
updatedAt: isoNow(),
- vectorClock: newClock,
};
await db.tasks.put(nextRecord);
@@ -173,7 +165,6 @@ export async function deleteTimeEntry(
"update",
taskId,
nextRecord,
- nextRecord.vectorClock || {},
syncConfig?.enabled ?? false
);
diff --git a/lib/tasks/crud/toggle.ts b/lib/tasks/crud/toggle.ts
index 7c52f9d2..d65db5d9 100644
--- a/lib/tasks/crud/toggle.ts
+++ b/lib/tasks/crud/toggle.ts
@@ -4,10 +4,8 @@ import { createLogger } from "@/lib/logger";
import type { TaskRecord } from "@/lib/types";
import { isoNow } from "@/lib/utils";
import {
- createNewVectorClock,
enqueueSyncOperation,
getSyncContext,
- updateVectorClock,
} from "./helpers";
const logger = createLogger("TASK_CRUD");
@@ -28,16 +26,15 @@ export async function toggleCompleted(
throw new Error(`Task ${id} not found`);
}
- const { syncConfig, deviceId } = await getSyncContext();
+ const { syncConfig } = await getSyncContext();
// Handle recurring task instance creation
if (completed && existing.recurrence !== "none") {
- await createAndQueueRecurringInstance(existing, deviceId, syncConfig?.enabled ?? false);
+ await createAndQueueRecurringInstance(existing, syncConfig?.enabled ?? false);
}
// Update the original task
- const newClock = updateVectorClock(existing.vectorClock || {}, deviceId);
- const nextRecord = buildCompletedRecord(existing, completed, newClock);
+ const nextRecord = buildCompletedRecord(existing, completed);
await db.tasks.put(nextRecord);
@@ -51,7 +48,6 @@ export async function toggleCompleted(
"update",
id,
nextRecord,
- nextRecord.vectorClock || {},
syncConfig?.enabled ?? false
);
@@ -78,7 +74,6 @@ export async function toggleCompleted(
function buildCompletedRecord(
existing: TaskRecord,
completed: boolean,
- newClock: Record
): TaskRecord {
const now = isoNow();
return {
@@ -86,7 +81,6 @@ function buildCompletedRecord(
completed,
completedAt: completed ? now : undefined,
updatedAt: now,
- vectorClock: newClock,
};
}
@@ -95,10 +89,9 @@ function buildCompletedRecord(
*/
async function createAndQueueRecurringInstance(
existing: TaskRecord,
- deviceId: string,
syncEnabled: boolean
): Promise {
- const newInstance = buildRecurringInstance(existing, deviceId);
+ const newInstance = buildRecurringInstance(existing);
const db = getDb();
await db.tasks.add(newInstance);
@@ -112,7 +105,6 @@ async function createAndQueueRecurringInstance(
"create",
newInstance.id,
newInstance,
- newInstance.vectorClock || {},
syncEnabled
);
@@ -126,10 +118,9 @@ async function createAndQueueRecurringInstance(
/**
* Build a new recurring task instance based on completed task
*/
-function buildRecurringInstance(existing: TaskRecord, deviceId: string): TaskRecord {
+function buildRecurringInstance(existing: TaskRecord): TaskRecord {
const now = isoNow();
const nextDueDate = calculateNextDueDate(existing.dueDate, existing.recurrence);
- const vectorClock = createNewVectorClock(deviceId);
return {
...existing,
@@ -139,7 +130,6 @@ function buildRecurringInstance(existing: TaskRecord, deviceId: string): TaskRec
createdAt: now,
updatedAt: now,
parentTaskId: existing.parentTaskId ?? existing.id,
- vectorClock,
subtasks: existing.subtasks.map((subtask) => ({ ...subtask, completed: false })),
notificationSent: false,
lastNotificationAt: undefined,
diff --git a/lib/tasks/crud/update.ts b/lib/tasks/crud/update.ts
index af488516..cc4f6ca9 100644
--- a/lib/tasks/crud/update.ts
+++ b/lib/tasks/crud/update.ts
@@ -4,7 +4,7 @@ import { resolveQuadrantId } from "@/lib/quadrants";
import { taskDraftSchema } from "@/lib/schema";
import type { TaskDraft, TaskRecord } from "@/lib/types";
import { isoNow } from "@/lib/utils";
-import { enqueueSyncOperation, getSyncContext, updateVectorClock } from "./helpers";
+import { enqueueSyncOperation, getSyncContext } from "./helpers";
const logger = createLogger("TASK_CRUD");
@@ -27,10 +27,9 @@ export async function updateTask(
const nextDraft = mergeTaskUpdates(existing, updates);
const validated = taskDraftSchema.parse(nextDraft);
- const { syncConfig, deviceId } = await getSyncContext();
- const newClock = updateVectorClock(existing.vectorClock || {}, deviceId);
+ const { syncConfig } = await getSyncContext();
- const nextRecord = buildUpdatedRecord(existing, validated, updates, newClock);
+ const nextRecord = buildUpdatedRecord(existing, validated, updates);
await db.tasks.put(nextRecord);
@@ -40,7 +39,6 @@ export async function updateTask(
"update",
id,
nextRecord,
- nextRecord.vectorClock || {},
syncConfig?.enabled ?? false
);
@@ -93,7 +91,6 @@ function buildUpdatedRecord(
existing: TaskRecord,
validated: TaskDraft,
updates: Partial,
- newClock: Record
): TaskRecord {
// Check if due date or notification settings changed
const dueDateChanged =
@@ -115,7 +112,6 @@ function buildUpdatedRecord(
...validated,
quadrant: resolveQuadrantId(validated.urgent, validated.important),
updatedAt: isoNow(),
- vectorClock: newClock,
...notificationReset,
};
}
diff --git a/lib/tasks/dependencies.ts b/lib/tasks/dependencies.ts
index 49062b5e..3b0751af 100644
--- a/lib/tasks/dependencies.ts
+++ b/lib/tasks/dependencies.ts
@@ -2,7 +2,6 @@ import { getDb } from "@/lib/db";
import type { TaskRecord } from "@/lib/types";
import { isoNow } from "@/lib/utils";
import { getSyncQueue } from "@/lib/sync/queue";
-import { incrementVectorClock } from "@/lib/sync/vector-clock";
import { getSyncConfig } from "@/lib/sync/config";
/**
@@ -20,25 +19,19 @@ export async function addDependency(taskId: string, dependencyId: string): Promi
return existing;
}
- // Increment vector clock for sync
- const syncConfig = await getSyncConfig();
- const deviceId = syncConfig?.deviceId || 'local';
- const currentClock = existing.vectorClock || {};
- const newClock = incrementVectorClock(currentClock, deviceId);
-
const nextRecord: TaskRecord = {
...existing,
dependencies: [...existing.dependencies, dependencyId],
updatedAt: isoNow(),
- vectorClock: newClock
};
await db.tasks.put(nextRecord);
// Enqueue sync operation if sync is enabled
+ const syncConfig = await getSyncConfig();
if (syncConfig?.enabled) {
const queue = getSyncQueue();
- await queue.enqueue('update', taskId, nextRecord, nextRecord.vectorClock || {});
+ await queue.enqueue('update', taskId, nextRecord);
}
return nextRecord;
@@ -54,25 +47,19 @@ export async function removeDependency(taskId: string, dependencyId: string): Pr
throw new Error(`Task ${taskId} not found`);
}
- // Increment vector clock for sync
- const syncConfig = await getSyncConfig();
- const deviceId = syncConfig?.deviceId || 'local';
- const currentClock = existing.vectorClock || {};
- const newClock = incrementVectorClock(currentClock, deviceId);
-
const nextRecord: TaskRecord = {
...existing,
dependencies: existing.dependencies.filter(depId => depId !== dependencyId),
updatedAt: isoNow(),
- vectorClock: newClock
};
await db.tasks.put(nextRecord);
// Enqueue sync operation if sync is enabled
+ const syncConfig = await getSyncConfig();
if (syncConfig?.enabled) {
const queue = getSyncQueue();
- await queue.enqueue('update', taskId, nextRecord, nextRecord.vectorClock || {});
+ await queue.enqueue('update', taskId, nextRecord);
}
return nextRecord;
diff --git a/lib/tasks/subtasks.ts b/lib/tasks/subtasks.ts
index 31814f0c..a2a01814 100644
--- a/lib/tasks/subtasks.ts
+++ b/lib/tasks/subtasks.ts
@@ -3,7 +3,6 @@ import { generateId } from "@/lib/id-generator";
import type { TaskRecord } from "@/lib/types";
import { isoNow } from "@/lib/utils";
import { getSyncQueue } from "@/lib/sync/queue";
-import { incrementVectorClock } from "@/lib/sync/vector-clock";
import { getSyncConfig } from "@/lib/sync/config";
/**
@@ -20,25 +19,19 @@ export async function toggleSubtask(taskId: string, subtaskId: string, completed
st.id === subtaskId ? { ...st, completed } : st
);
- // Increment vector clock for sync
- const syncConfig = await getSyncConfig();
- const deviceId = syncConfig?.deviceId || 'local';
- const currentClock = existing.vectorClock || {};
- const newClock = incrementVectorClock(currentClock, deviceId);
-
const nextRecord: TaskRecord = {
...existing,
subtasks: updatedSubtasks,
updatedAt: isoNow(),
- vectorClock: newClock
};
await db.tasks.put(nextRecord);
// Enqueue sync operation if sync is enabled
+ const syncConfig = await getSyncConfig();
if (syncConfig?.enabled) {
const queue = getSyncQueue();
- await queue.enqueue('update', taskId, nextRecord, nextRecord.vectorClock || {});
+ await queue.enqueue('update', taskId, nextRecord);
}
return nextRecord;
@@ -60,25 +53,19 @@ export async function addSubtask(taskId: string, title: string): Promise st.id !== subtaskId),
updatedAt: isoNow(),
- vectorClock: newClock
};
await db.tasks.put(nextRecord);
// Enqueue sync operation if sync is enabled
+ const syncConfig = await getSyncConfig();
if (syncConfig?.enabled) {
const queue = getSyncQueue();
- await queue.enqueue('update', taskId, nextRecord, nextRecord.vectorClock || {});
+ await queue.enqueue('update', taskId, nextRecord);
}
return nextRecord;
diff --git a/lib/types.ts b/lib/types.ts
index 31167035..6fed21a7 100644
--- a/lib/types.ts
+++ b/lib/types.ts
@@ -40,8 +40,6 @@ export interface TaskRecord {
notificationSent: boolean;
lastNotificationAt?: string;
snoozedUntil?: string;
- // Sync fields
- vectorClock?: { [deviceId: string]: number }; // For distributed sync conflict detection
// Archive field
archivedAt?: string; // Timestamp when task was archived
// Time tracking fields
diff --git a/package.json b/package.json
index 219e9871..332e882a 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "gsd-taskmanager",
- "version": "6.8.8",
+ "version": "7.0.0",
"private": true,
"scripts": {
"dev": "next dev",
@@ -31,6 +31,7 @@
"@radix-ui/react-switch": "^1.2.6",
"@radix-ui/react-tooltip": "^1.2.8",
"@radix-ui/react-visually-hidden": "^1.2.4",
+ "@supabase/supabase-js": "^2.98.0",
"beautiful-mermaid": "^0.1.3",
"clsx": "^2.1.1",
"cmdk": "^1.1.1",
@@ -70,8 +71,7 @@
"tailwindcss": "^3.4.19",
"typescript": "^5.9.3",
"vite": "^7.3.1",
- "vitest": "^4.0.18",
- "wrangler": "^4.65.0"
+ "vitest": "^4.0.18"
},
"workspaces": [
"packages/*"
@@ -81,7 +81,6 @@
"baseline-browser-mapping": "^2.9.11",
"vite": "^7.3.0",
"esbuild": ">=0.25.0",
- "hono": ">=4.11.7",
"@isaacs/brace-expansion": ">=5.0.1",
"qs": ">=6.14.2"
},
diff --git a/packages/mcp-server/.env.example b/packages/mcp-server/.env.example
index 151ab6b6..aade9b9f 100644
--- a/packages/mcp-server/.env.example
+++ b/packages/mcp-server/.env.example
@@ -1,14 +1,20 @@
# GSD MCP Server Configuration Example
# Copy this to .env and fill in your values
-# Base URL of your GSD Worker API
-# Development: http://localhost:8787
-# Staging: https://sync-staging.gsd.vinny.dev
-# Production: https://sync.gsd.vinny.dev
-GSD_API_URL=https://sync.gsd.vinny.dev
-
-# JWT token from OAuth authentication
-# Get this from:
-# 1. Browser DevTools → Application → Local Storage → gsd_auth_token
-# 2. OAuth callback response (token field)
-GSD_AUTH_TOKEN=your-jwt-token-here
+# Supabase project URL
+# Find this in: Supabase Dashboard → Settings → API → URL
+GSD_SUPABASE_URL=https://your-project.supabase.co
+
+# Supabase service role key (NOT the anon key)
+# Find this in: Supabase Dashboard → Settings → API → service_role key
+# This key bypasses RLS and should never be exposed to clients
+GSD_SUPABASE_SERVICE_KEY=your-service-role-key-here
+
+# Email address used to sign into GSD app
+# Used to look up user_id in the profiles table
+GSD_USER_EMAIL=your-email@example.com
+
+# Optional: Encryption passphrase for decrypting task content
+# Must match the passphrase set in the GSD app (Settings → Sync)
+# Without this, only metadata-level tools will work
+# GSD_ENCRYPTION_PASSPHRASE=your-passphrase-here
diff --git a/packages/mcp-server/README.md b/packages/mcp-server/README.md
index 5bdc8d2c..687098cd 100644
--- a/packages/mcp-server/README.md
+++ b/packages/mcp-server/README.md
@@ -22,8 +22,9 @@ The wizard will guide you through configuration and test your connection. Once c
"command": "npx",
"args": ["-y", "gsd-mcp-server"],
"env": {
- "GSD_API_URL": "https://gsd.vinny.dev",
- "GSD_AUTH_TOKEN": "your-jwt-token-here",
+ "GSD_SUPABASE_URL": "https://your-project.supabase.co",
+ "GSD_SUPABASE_SERVICE_KEY": "your-service-role-key-here",
+ "GSD_USER_EMAIL": "your-email@example.com",
"GSD_ENCRYPTION_PASSPHRASE": "your-passphrase-here"
}
}
@@ -37,7 +38,7 @@ See [Installation](#installation) section below for detailed setup instructions.
**Reliability & Performance** (v0.6.0) 🆕 🔥
- ✅ **Retry Logic** - Automatic exponential backoff for transient failures (500, 502, 503, 504, 429)
-- ✅ **Token Monitoring** - Proactive warnings for expiring tokens (healthy/warning/critical/expired)
+- ✅ **Connection Monitoring** - Proactive warnings for Supabase connectivity issues
- ✅ **Caching** - In-memory TTL cache (30s) with automatic invalidation on writes
- ✅ **Dry-Run Mode** - Preview all write operations before committing
- ✅ **Dependency Validation** - Circular dependency detection using BFS algorithm
@@ -88,7 +89,7 @@ See [Installation](#installation) section below for detailed setup instructions.
## Prerequisites
1. **GSD Task Manager** with sync enabled
-2. **OAuth Authentication** completed (Google or Apple)
+2. **Supabase Authentication** completed (Google or Apple OAuth)
3. **Node.js** 18+ installed
4. **Claude Desktop** or another MCP-compatible client
@@ -111,15 +112,15 @@ npx gsd-mcp-server
```
**Setup Wizard Features:**
-- ✅ Tests API connectivity before configuration
-- ✅ Validates authentication token
+- ✅ Tests Supabase connectivity before configuration
+- ✅ Validates Supabase service role key
- ✅ Tests encryption passphrase (if provided)
- ✅ Generates ready-to-use Claude Desktop config
- ✅ Provides platform-specific config file paths
**Validation Tool Features:**
- ✅ Checks environment variables
-- ✅ Tests API connectivity and authentication
+- ✅ Tests Supabase connectivity and authentication
- ✅ Verifies encryption setup
- ✅ Validates device registration
- ✅ Provides actionable error messages
@@ -143,20 +144,15 @@ For development or if you want to modify the code:
## Setup
-### 1. Get Your Auth Token
+### 1. Get Your Supabase Credentials
-You'll need a JWT token from your GSD sync setup. Two options:
+You'll need a Supabase service role key and project URL from your Supabase project:
-**Option A: From Browser DevTools**
-1. Open GSD Task Manager in your browser
-2. Complete OAuth sign-in
-3. Open DevTools → Application → Local Storage
-4. Find `gsd_auth_token` and copy the value
-
-**Option B: From OAuth Callback** (Advanced)
-1. Trigger OAuth flow
-2. Intercept the callback response
-3. Extract the `token` field from the JSON response
+1. Go to your Supabase project dashboard
+2. Navigate to **Settings → API**
+3. Copy the **Project URL** (use as `GSD_SUPABASE_URL`)
+4. Copy the **service_role** secret key (use as `GSD_SUPABASE_SERVICE_KEY`)
+5. Use the email address associated with your GSD account for `GSD_USER_EMAIL`
### 2. Configure Claude Desktop
@@ -174,8 +170,9 @@ Add the MCP server to your Claude Desktop config:
"command": "npx",
"args": ["-y", "gsd-mcp-server"],
"env": {
- "GSD_API_URL": "https://gsd.vinny.dev",
- "GSD_AUTH_TOKEN": "your-jwt-token-here",
+ "GSD_SUPABASE_URL": "https://your-project.supabase.co",
+ "GSD_SUPABASE_SERVICE_KEY": "your-service-role-key-here",
+ "GSD_USER_EMAIL": "your-email@example.com",
"GSD_ENCRYPTION_PASSPHRASE": "your-passphrase-here"
}
}
@@ -194,8 +191,9 @@ Add the MCP server to your Claude Desktop config:
"/absolute/path/to/gsd-taskmanager/packages/mcp-server/dist/index.js"
],
"env": {
- "GSD_API_URL": "https://gsd.vinny.dev",
- "GSD_AUTH_TOKEN": "your-jwt-token-here",
+ "GSD_SUPABASE_URL": "https://your-project.supabase.co",
+ "GSD_SUPABASE_SERVICE_KEY": "your-service-role-key-here",
+ "GSD_USER_EMAIL": "your-email@example.com",
"GSD_ENCRYPTION_PASSPHRASE": "your-passphrase-here"
}
}
@@ -204,10 +202,10 @@ Add the MCP server to your Claude Desktop config:
```
**Configuration Notes**:
-- Replace `your-jwt-token-here` with your actual token from Step 1
+- Replace `your-project.supabase.co` with your actual Supabase project URL from Step 1
+- Replace `your-service-role-key-here` with your Supabase service role key
+- Replace `your-email@example.com` with the email address for your GSD account
- Replace `your-passphrase-here` with your sync encryption passphrase
-- `GSD_API_URL`: Use `https://gsd.vinny.dev` for production (or your custom Worker URL)
-- Token expires every 7 days - you'll need to update it periodically
- **Optional**: Add `GSD_ENCRYPTION_PASSPHRASE` to enable decrypted task access (v0.2.0)
- Without it: Only metadata tools work (sync status, devices, stats)
- With it: Full task content access (list, search, read tasks)
@@ -288,7 +286,7 @@ Once configured, you can ask Claude questions like:
- "Complete all tasks tagged #quick-wins"
- "Change due date of task #def456 to next Monday"
-Claude will use the MCP tools to fetch real-time data from your Worker API and can now modify your tasks!
+Claude will use the MCP tools to fetch real-time data from your Supabase backend and can now modify your tasks!
## Available Tools
@@ -380,8 +378,7 @@ List all decrypted tasks with optional filtering. **Requires `GSD_ENCRYPTION_PAS
"recurrence": "none",
"dependencies": [],
"createdAt": "2024-12-26T00:00:00.000Z",
- "updatedAt": "2024-12-27T00:00:00.000Z",
- "vectorClock": {}
+ "updatedAt": "2024-12-27T00:00:00.000Z"
}
]
```
@@ -495,14 +492,14 @@ Validate MCP server configuration and diagnose issues.
{
"checks": [
{
- "name": "API Connectivity",
+ "name": "Supabase Connectivity",
"status": "success",
- "details": "Connected to https://gsd.vinny.dev"
+ "details": "Connected to https://your-project.supabase.co"
},
{
"name": "Authentication",
"status": "success",
- "details": "Token valid (3 devices registered)"
+ "details": "Service key valid (3 devices registered)"
},
{
"name": "Encryption",
@@ -531,31 +528,6 @@ Get comprehensive help documentation including available tools, usage examples,
- "Show me analytics examples"
- "How do I troubleshoot authentication issues?"
-### `get_token_status` (v0.6.0)
-Check authentication token status including expiration date, days remaining, and warnings.
-
-**Returns**:
-```json
-{
- "status": "warning",
- "expiresAt": "2025-01-15T12:00:00.000Z",
- "daysRemaining": 5,
- "message": "Token expires in 5 days. Consider re-authenticating soon.",
- "needsReauth": false
-}
-```
-
-**Status Levels**:
-- `healthy` - More than 7 days until expiration
-- `warning` - 2-7 days until expiration
-- `critical` - Less than 2 days until expiration
-- `expired` - Token has expired
-
-**Example Usage:**
-- "Check my token status"
-- "Is my authentication about to expire?"
-- "Do I need to re-authenticate?"
-
### `get_cache_stats` (v0.6.0)
Get task cache statistics including hit rate, cache size, and TTL configuration.
@@ -739,11 +711,11 @@ Delete all completed tasks from last year
**Security Model**:
- 🔒 **End-to-end encryption maintained**: Tasks encrypted in database, decrypted locally
-- 🔒 **Zero-knowledge server**: Worker cannot decrypt your tasks
+- 🔒 **Zero-knowledge server**: Supabase cannot decrypt your tasks
- 🔒 **Passphrase stays local**: Never sent to server, stored only in Claude Desktop config
- 🔒 **Opt-in decryption**: Decryption disabled by default, requires explicit passphrase
- ✍️ **Write operations** (v0.4.0): Full task management with encryption
-- 🔐 **JWT authentication**: Uses existing OAuth tokens with 7-day expiry
+- 🔐 **Supabase Auth**: Uses Supabase authentication with OAuth (Google/Apple)
- 🛡️ **Safety limits**: Bulk operations limited to 50 tasks, clear validation
**See `DECRYPTION.md` for detailed security documentation.**
@@ -751,18 +723,18 @@ Delete all completed tasks from last year
## Troubleshooting
### "Configuration error: Required environment variables"
-- Check that `GSD_API_URL` and `GSD_AUTH_TOKEN` are set in your Claude Desktop config
+- Check that `GSD_SUPABASE_URL`, `GSD_SUPABASE_SERVICE_KEY`, and `GSD_USER_EMAIL` are set in your Claude Desktop config
- Ensure there are no typos in the environment variable names
### "API request failed: 401 Unauthorized"
-- Your JWT token has expired - get a new token from the OAuth flow
-- Update the `GSD_AUTH_TOKEN` in your config
+- Your Supabase service role key may be incorrect — copy it again from **Supabase → Settings → API**
+- Update the `GSD_SUPABASE_SERVICE_KEY` in your config
- Restart Claude Desktop
### "API request failed: 404 Not Found"
-- Check that `GSD_API_URL` is correct
-- Ensure your Worker is deployed and accessible
-- Try accessing the URL in your browser: `{GSD_API_URL}/health`
+- Check that `GSD_SUPABASE_URL` is correct (format: `https://your-project.supabase.co`)
+- Ensure your Supabase project is active and accessible
+- Verify the project URL in your Supabase dashboard under **Settings → API**
### "Cannot find module" error
- **If using npx**: Ensure you have internet connection (npx needs to download the package)
@@ -777,9 +749,10 @@ Delete all completed tasks from last year
- Restart Claude Desktop after adding the passphrase
### "Failed to fetch encryption salt" (v0.2.0)
-- The Worker endpoint for encryption salt is not accessible
-- Ensure Worker is deployed with v0.2.0+ (includes GET `/api/auth/encryption-salt`)
-- Check your JWT token is valid and not expired
+- The encryption salt could not be retrieved from the Supabase `profiles` table
+- Ensure your Supabase service role key has access to the `profiles` table
+- Verify the user identified by `GSD_USER_EMAIL` has a profile entry with a valid encryption salt
+- Check that your Supabase RLS policies allow service role access
### "Decryption failed - passphrase is incorrect" (v0.2.0)
- The provided passphrase doesn't match the one used to encrypt tasks
@@ -802,8 +775,9 @@ npm run build
**Testing Locally** (without Claude Desktop):
```bash
-export GSD_API_URL="https://gsd.vinny.dev"
-export GSD_AUTH_TOKEN="your-jwt-token"
+export GSD_SUPABASE_URL="https://your-project.supabase.co"
+export GSD_SUPABASE_SERVICE_KEY="your-service-role-key-here"
+export GSD_USER_EMAIL="your-email@example.com"
export GSD_ENCRYPTION_PASSPHRASE="your-passphrase"
npm start
```
@@ -851,9 +825,9 @@ packages/mcp-server/
│ │ │ ├── write-handlers.ts
│ │ │ ├── analytics-handlers.ts
│ │ │ └── system-handlers.ts
-│ │ └── schemas/ # MCP tool schemas (20 tools)
+│ │ └── schemas/ # MCP tool schemas (19 tools)
│ │ ├── index.ts
-│ │ ├── read-tools.ts # 7 read tools
+│ │ ├── read-tools.ts # 6 read tools
│ │ ├── write-tools.ts # 5 write tools
│ │ ├── analytics-tools.ts # 5 analytics tools
│ │ └── system-tools.ts # 3 system tools
@@ -886,15 +860,15 @@ Claude Desktop
↓ MCP Protocol (stdio)
GSD MCP Server
├─ Metadata queries (v0.1.0)
- │ ↓ HTTPS + JWT
- │ GSD Worker API
- │ ↓ D1 Queries
+ │ ↓ HTTPS + Service Role Key
+ │ Supabase backend
+ │ ↓ Postgres Queries
│ Metadata (counts, status)
│
└─ Decryption queries (v0.2.0)
- ↓ HTTPS + JWT
- GSD Worker API
- ↓ D1 Queries
+ ↓ HTTPS + Service Role Key
+ Supabase backend
+ ↓ Postgres Queries
Encrypted Task Blobs
↓ Local decryption (AES-256-GCM)
Decrypted Tasks → Claude
@@ -922,18 +896,17 @@ MIT - Same as GSD Task Manager
- 🚀 **Caching** - In-memory TTL cache (30s) with auto-invalidation on writes
- 🔍 **Dry-Run Mode** - Preview all write operations before committing
- 🔗 **Dependency Validation** - Circular dependency detection using BFS
-- 📊 **20 total MCP tools** (7 read + 5 write + 5 analytics + 3 system)
+- 📊 **19 total MCP tools** (6 read + 5 write + 5 analytics + 3 system)
- ✅ **70 passing tests** - Comprehensive schema and integration coverage
**New Tools (v0.6.0)**:
-- `get_token_status` - Check JWT token health and expiration
- `get_cache_stats` - Monitor cache performance and hit rates
**Bug Fixes (v0.4.1-v0.4.7)**:
-- v0.4.1: Fixed Worker API payload structure
-- v0.4.2: Fixed JWT token schema (sub, deviceId)
+- v0.4.1: Fixed Supabase API payload structure
+- v0.4.2: Fixed Supabase Auth user schema
- v0.4.3: Added SHA-256 checksum calculation
-- v0.4.4: Added Worker rejection array checking
+- v0.4.4: Added Supabase rejection array checking
- v0.4.5: Fixed field names (quadrant, timestamps)
- v0.4.6: Fixed type mismatches (dueDate, subtasks.title)
- v0.4.7: Fixed MCP tool input schemas
diff --git a/packages/mcp-server/package.json b/packages/mcp-server/package.json
index 62c55be5..a9505d06 100644
--- a/packages/mcp-server/package.json
+++ b/packages/mcp-server/package.json
@@ -78,6 +78,7 @@
},
"dependencies": {
"@modelcontextprotocol/sdk": "^1.26.0",
+ "@supabase/supabase-js": "^2.98.0",
"zod": "^4.3.5"
},
"devDependencies": {
diff --git a/packages/mcp-server/src/__tests__/tools/schemas.test.ts b/packages/mcp-server/src/__tests__/tools/schemas.test.ts
index 3f1af7f2..b57414e0 100644
--- a/packages/mcp-server/src/__tests__/tools/schemas.test.ts
+++ b/packages/mcp-server/src/__tests__/tools/schemas.test.ts
@@ -11,7 +11,6 @@ import {
listTasksTool,
getTaskTool,
searchTasksTool,
- getTokenStatusTool,
getProductivityMetricsTool,
getTagAnalyticsTool,
createTaskTool,
@@ -26,14 +25,14 @@ import {
describe('Tool Schemas', () => {
describe('Schema Count Validation', () => {
- it('should have exactly 20 tools total', () => {
- // 7 read + 5 write + 5 analytics + 3 system
- expect(allTools).toHaveLength(20);
+ it('should have exactly 19 tools total', () => {
+ // 6 read + 5 write + 5 analytics + 3 system
+ expect(allTools).toHaveLength(19);
});
- it('should have 7 read tools', () => {
- // get_sync_status, list_devices, get_task_stats, list_tasks, get_task, search_tasks, get_token_status
- expect(readTools).toHaveLength(7);
+ it('should have 6 read tools', () => {
+ // get_sync_status, list_devices, get_task_stats, list_tasks, get_task, search_tasks
+ expect(readTools).toHaveLength(6);
});
it('should have 5 write tools', () => {
@@ -120,11 +119,6 @@ describe('Tool Schemas', () => {
expect(searchTasksTool.inputSchema.required).toContain('query');
expect(searchTasksTool.inputSchema.properties).toHaveProperty('query');
});
-
- it('get_token_status should have no required parameters', () => {
- expect(getTokenStatusTool.name).toBe('get_token_status');
- expect(getTokenStatusTool.inputSchema.required).toHaveLength(0);
- });
});
describe('Analytics Tools', () => {
diff --git a/packages/mcp-server/src/api/client.ts b/packages/mcp-server/src/api/client.ts
index 83e193e7..6780bbdf 100644
--- a/packages/mcp-server/src/api/client.ts
+++ b/packages/mcp-server/src/api/client.ts
@@ -1,160 +1,62 @@
-import { z } from 'zod';
-import type { GsdConfig } from '../types.js';
-import { fetchWithRetry, DEFAULT_RETRY_CONFIG, type RetryConfig } from './retry.js';
-
/**
- * Make authenticated API request to GSD Worker
- * Handles all HTTP errors with detailed user-friendly messages
- * Includes automatic retry with exponential backoff for transient failures
+ * Supabase client for MCP server
+ * Uses service role key to bypass RLS, filters by user_id
*/
-export async function apiRequest(
- config: GsdConfig,
- endpoint: string,
- schema: z.ZodType,
- retryConfig: RetryConfig = DEFAULT_RETRY_CONFIG
-): Promise {
- const url = `${config.apiBaseUrl}${endpoint}`;
- const response = await fetchWithErrorHandling(url, config, retryConfig);
- await validateResponseStatus(response, endpoint, config);
+import { createClient, type SupabaseClient } from '@supabase/supabase-js';
+import type { GsdConfig } from '../types.js';
+import { createMcpLogger } from '../utils/logger.js';
- const data = await response.json();
- return schema.parse(data);
-}
+const logger = createMcpLogger('SUPABASE_CLIENT');
-/**
- * Fetch URL with network error handling and automatic retry
- */
-async function fetchWithErrorHandling(
- url: string,
- config: GsdConfig,
- retryConfig: RetryConfig
-): Promise {
- try {
- return await fetchWithRetry(
- () =>
- fetch(url, {
- method: 'GET',
- headers: {
- Authorization: `Bearer ${config.authToken}`,
- 'Content-Type': 'application/json',
- },
- }),
- retryConfig
- );
- } catch (error) {
- throw new Error(
- `❌ Failed to connect to ${config.apiBaseUrl}\n\n` +
- `Network error: ${error instanceof Error ? error.message : 'Unknown error'}\n\n` +
- `Please check:\n` +
- ` 1. Your internet connection\n` +
- ` 2. GSD_API_URL is correct (${config.apiBaseUrl})\n` +
- ` 3. The Worker is deployed and accessible\n\n` +
- `Retried ${retryConfig.maxRetries} times before giving up.\n\n` +
- `Run: npx gsd-mcp-server --validate`
- );
- }
-}
+let supabaseInstance: SupabaseClient | null = null;
+let cachedUserId: string | null = null;
/**
- * Validate HTTP response status and throw detailed errors
+ * Get or create Supabase client (singleton)
*/
-async function validateResponseStatus(
- response: Response,
- endpoint: string,
- config: GsdConfig
-): Promise {
- if (response.ok) return;
-
- const errorText = await response.text();
-
- if (response.status === 401) {
- throw createAuthError(config);
- }
-
- if (response.status === 404) {
- throw createNotFoundError(endpoint, config);
- }
-
- if (response.status === 403) {
- throw createForbiddenError();
- }
-
- if (response.status >= 500) {
- throw createServerError(response, errorText);
+export function getSupabaseClient(config: GsdConfig): SupabaseClient {
+ if (!supabaseInstance) {
+ supabaseInstance = createClient(config.supabaseUrl, config.serviceKey);
}
-
- throw createGenericError(response, errorText);
+ return supabaseInstance;
}
/**
- * Create 401 Unauthorized error message
+ * Resolve user_id from email address via profiles table
+ * Cached after first lookup
*/
-function createAuthError(config: GsdConfig): Error {
- return new Error(
- `❌ Authentication failed (401 Unauthorized)\n\n` +
- `Your auth token has expired or is invalid.\n\n` +
- `To fix:\n` +
- ` 1. Visit ${config.apiBaseUrl}\n` +
- ` 2. Complete OAuth login\n` +
- ` 3. Copy new token from DevTools → Application → Local Storage → gsd_auth_token\n` +
- ` 4. Update GSD_AUTH_TOKEN in Claude Desktop config\n` +
- ` 5. Restart Claude Desktop\n\n` +
- `Run: npx gsd-mcp-server --setup`
- );
-}
+export async function resolveUserId(config: GsdConfig): Promise {
+ if (cachedUserId) return cachedUserId;
-/**
- * Create 404 Not Found error message
- */
-function createNotFoundError(endpoint: string, config: GsdConfig): Error {
- return new Error(
- `❌ Endpoint not found (404 Not Found)\n\n` +
- `The API endpoint ${endpoint} does not exist.\n\n` +
- `Please check:\n` +
- ` 1. GSD_API_URL is correct (${config.apiBaseUrl})\n` +
- ` 2. Your Worker is deployed with the latest version\n` +
- ` 3. You're using a compatible MCP server version\n\n` +
- `Run: npx gsd-mcp-server --validate`
- );
-}
+ const supabase = getSupabaseClient(config);
+ const { data, error } = await supabase
+ .from('profiles')
+ .select('id')
+ .eq('email', config.userEmail)
+ .single();
-/**
- * Create 403 Forbidden error message
- */
-function createForbiddenError(): Error {
- return new Error(
- `❌ Access forbidden (403 Forbidden)\n\n` +
- `You don't have permission to access this resource.\n\n` +
- `This could mean:\n` +
- ` 1. Your token is for a different user/account\n` +
- ` 2. The resource has been revoked\n` +
- ` 3. CORS or access policy restrictions\n\n` +
- `Try logging in again: npx gsd-mcp-server --setup`
- );
-}
+ if (error || !data) {
+ throw new Error(
+ `❌ User not found: ${config.userEmail}\n\n` +
+ `No profile found for this email address.\n\n` +
+ `Please check:\n` +
+ ` 1. GSD_USER_EMAIL is correct\n` +
+ ` 2. You have signed into the GSD app at least once\n` +
+ ` 3. GSD_SUPABASE_URL points to the correct project\n\n` +
+ `Run: npx gsd-mcp-server --validate`
+ );
+ }
-/**
- * Create 5xx Server Error message
- */
-function createServerError(response: Response, errorText: string): Error {
- return new Error(
- `❌ Server error (${response.status} ${response.statusText})\n\n` +
- `The GSD Worker encountered an internal error.\n\n` +
- `Error details: ${errorText}\n\n` +
- `Please try again in a few moments. If the issue persists, check:\n` +
- ` - Worker logs in Cloudflare dashboard\n` +
- ` - GitHub issues: https://github.com/vscarpenter/gsd-taskmanager/issues`
- );
+ cachedUserId = data.id;
+ logger.debug('Resolved user ID', { email: config.userEmail });
+ return data.id;
}
/**
- * Create generic API error message
+ * Clear cached state (for testing or config changes)
*/
-function createGenericError(response: Response, errorText: string): Error {
- return new Error(
- `❌ API request failed (${response.status} ${response.statusText})\n\n` +
- `Error details: ${errorText}\n\n` +
- `Run: npx gsd-mcp-server --validate`
- );
+export function clearClientCache(): void {
+ supabaseInstance = null;
+ cachedUserId = null;
}
diff --git a/packages/mcp-server/src/cli/index.ts b/packages/mcp-server/src/cli/index.ts
index c88898f6..80257a90 100644
--- a/packages/mcp-server/src/cli/index.ts
+++ b/packages/mcp-server/src/cli/index.ts
@@ -58,9 +58,10 @@ EXAMPLES:
CONFIGURATION:
Environment variables (set in Claude Desktop config):
- GSD_API_URL - Worker API URL (e.g., https://gsd.vinny.dev)
- GSD_AUTH_TOKEN - JWT token from OAuth authentication
- GSD_ENCRYPTION_PASSPHRASE - Optional: encryption passphrase for task decryption
+ GSD_SUPABASE_URL - Supabase project URL
+ GSD_SUPABASE_SERVICE_KEY - Supabase service role key
+ GSD_USER_EMAIL - Your GSD account email
+ GSD_ENCRYPTION_PASSPHRASE - Optional: encryption passphrase for task decryption
Claude Desktop config location:
macOS: ~/Library/Application Support/Claude/claude_desktop_config.json
@@ -70,7 +71,7 @@ DOCUMENTATION:
Full docs: https://github.com/vscarpenter/gsd-taskmanager/tree/main/packages/mcp-server
Issues: https://github.com/vscarpenter/gsd-taskmanager/issues
-VERSION: 0.6.0
+VERSION: 0.7.0
`);
}
diff --git a/packages/mcp-server/src/cli/setup-wizard.ts b/packages/mcp-server/src/cli/setup-wizard.ts
index fcf1413c..c0cbd1c5 100644
--- a/packages/mcp-server/src/cli/setup-wizard.ts
+++ b/packages/mcp-server/src/cli/setup-wizard.ts
@@ -1,78 +1,41 @@
/**
* Interactive setup wizard for MCP server configuration
- * Guides users through API URL, auth token, and encryption setup
+ * Guides users through Supabase URL, service key, email, and encryption setup
*/
import type { GsdConfig } from '../tools.js';
import { getSyncStatus, listTasks } from '../tools.js';
import { prompt, promptPassword, getClaudeConfigPath } from './index.js';
-/** Default production GSD Worker URL used as prompt default value */
-const DEFAULT_GSD_API_URL = 'https://gsd.vinny.dev';
-
-/**
- * Test API connectivity
- */
-async function validateConnectivity(apiUrl: string): Promise {
- process.stdout.write('Testing connectivity... ');
- try {
- const response = await fetch(`${apiUrl}/health`);
- if (response.ok) {
- console.log('✓ Success!');
- return true;
- } else {
- console.log(`⚠ Warning: Got status ${response.status}`);
- return false;
- }
- } catch {
- console.log('✗ Failed to connect');
- console.log('Continuing anyway - you may need to check your URL later.\n');
- return false;
- }
-}
-
/**
- * Configure and validate authentication token
+ * Test Supabase connectivity by querying sync status
*/
-async function configureAuthentication(apiUrl: string): Promise {
- console.log('Step 2/5: Authentication Token');
- console.log('Visit', apiUrl, 'and complete OAuth login');
- console.log('Copy the token from: DevTools → Application → Local Storage → gsd_auth_token');
- const authToken = await promptPassword('Paste token');
-
- if (!authToken) {
- console.log('✗ Token is required. Exiting setup.');
- process.exit(1);
- }
-
- // Validate token
- process.stdout.write('Validating token... ');
+async function validateConnectivity(
+ supabaseUrl: string,
+ serviceKey: string,
+ userEmail: string
+): Promise {
+ process.stdout.write('Testing Supabase connectivity... ');
try {
- const config: GsdConfig = { apiBaseUrl: apiUrl, authToken };
+ const config: GsdConfig = { supabaseUrl, serviceKey, userEmail };
const status = await getSyncStatus(config);
console.log('✓ Success!');
- console.log(` Device count: ${status.deviceCount}`);
- console.log();
- return authToken;
+ console.log(` Devices: ${status.deviceCount}`);
+ return true;
} catch (error) {
- console.log('✗ Token validation failed');
+ console.log('✗ Failed to connect');
console.log('Error:', error instanceof Error ? error.message : 'Unknown error');
- console.log('\nPlease check your token and try again.');
- process.exit(1);
+ console.log('Please check your Supabase URL, service key, and user email.\n');
+ return false;
}
}
/**
* Test encryption passphrase by attempting to decrypt tasks
*/
-async function testDecryption(
- apiUrl: string,
- authToken: string,
- passphrase: string
-): Promise {
+async function testDecryption(config: GsdConfig): Promise {
process.stdout.write('Testing decryption... ');
try {
- const config: GsdConfig = { apiBaseUrl: apiUrl, authToken, encryptionPassphrase: passphrase };
const tasks = await listTasks(config);
console.log(`✓ Success! (Found ${tasks.length} tasks)`);
return true;
@@ -87,11 +50,8 @@ async function testDecryption(
/**
* Configure and test encryption passphrase
*/
-async function configureEncryption(
- apiUrl: string,
- authToken: string
-): Promise {
- console.log('Step 3/5: Encryption (Optional)');
+async function configureEncryption(config: GsdConfig): Promise {
+ console.log('Step 4/5: Encryption (Optional)');
const enableEncryption = await prompt(
'Enable task decryption? This allows Claude to read task content. [y/N]',
'N'
@@ -106,19 +66,15 @@ async function configureEncryption(
return undefined;
}
- const success = await testDecryption(apiUrl, authToken, encryptionPassphrase);
+ const success = await testDecryption({ ...config, encryptionPassphrase });
return success ? encryptionPassphrase : undefined;
}
/**
* Display generated configuration JSON
*/
-function displayConfiguration(
- apiUrl: string,
- authToken: string,
- encryptionPassphrase?: string
-): void {
- console.log('Step 4/5: Generated Configuration');
+function displayConfiguration(config: GsdConfig): void {
+ console.log('Step 5/5: Generated Configuration');
console.log(`Add this to ${getClaudeConfigPath()}:\n`);
const configJson = {
@@ -127,9 +83,12 @@ function displayConfiguration(
command: 'npx',
args: ['-y', 'gsd-mcp-server'],
env: {
- GSD_API_URL: apiUrl,
- GSD_AUTH_TOKEN: authToken,
- ...(encryptionPassphrase ? { GSD_ENCRYPTION_PASSPHRASE: encryptionPassphrase } : {}),
+ GSD_SUPABASE_URL: config.supabaseUrl,
+ GSD_SUPABASE_SERVICE_KEY: config.serviceKey,
+ GSD_USER_EMAIL: config.userEmail,
+ ...(config.encryptionPassphrase
+ ? { GSD_ENCRYPTION_PASSPHRASE: config.encryptionPassphrase }
+ : {}),
},
},
},
@@ -143,8 +102,8 @@ function displayConfiguration(
* Display next steps for user
*/
function displayNextSteps(): void {
- console.log('Step 5/5: Next Steps');
- console.log('1. Copy the config above');
+ console.log('Next Steps:');
+ console.log(`1. Copy the config above`);
console.log(`2. Open ${getClaudeConfigPath()}`);
console.log('3. Add the configuration to the "mcpServers" section');
console.log('4. Restart Claude Desktop');
@@ -164,23 +123,49 @@ Welcome! This wizard will help you configure the MCP server for Claude Desktop.
`);
try {
- // Step 1: API URL
- console.log('Step 1/5: API URL');
- const apiUrl = await prompt('Enter your GSD Worker URL', DEFAULT_GSD_API_URL);
- await validateConnectivity(apiUrl);
+ // Step 1: Supabase URL
+ console.log('Step 1/5: Supabase Project URL');
+ const supabaseUrl = await prompt('Enter your Supabase project URL');
+ if (!supabaseUrl) {
+ console.log('✗ Supabase URL is required. Exiting setup.');
+ process.exit(1);
+ }
console.log();
- // Step 2: Auth Token
- const authToken = await configureAuthentication(apiUrl);
+ // Step 2: Service Key
+ console.log('Step 2/5: Supabase Service Role Key');
+ console.log('Find this in: Supabase Dashboard → Settings → API → service_role key');
+ const serviceKey = await promptPassword('Paste service role key');
+ if (!serviceKey) {
+ console.log('✗ Service role key is required. Exiting setup.');
+ process.exit(1);
+ }
+ console.log();
+
+ // Step 3: User Email
+ console.log('Step 3/5: User Email');
+ const userEmail = await prompt('Enter the email you use to sign into GSD');
+ if (!userEmail) {
+ console.log('✗ User email is required. Exiting setup.');
+ process.exit(1);
+ }
- // Step 3: Encryption
- const encryptionPassphrase = await configureEncryption(apiUrl, authToken);
+ // Test connectivity
+ const connected = await validateConnectivity(supabaseUrl, serviceKey, userEmail);
+ if (!connected) {
+ process.exit(1);
+ }
console.log();
- // Step 4: Display Config
- displayConfiguration(apiUrl, authToken, encryptionPassphrase);
+ const config: GsdConfig = { supabaseUrl, serviceKey, userEmail };
+
+ // Step 4: Encryption
+ const encryptionPassphrase = await configureEncryption(config);
+ config.encryptionPassphrase = encryptionPassphrase;
+ console.log();
- // Step 5: Next Steps
+ // Step 5: Display Config
+ displayConfiguration(config);
displayNextSteps();
} catch (error) {
console.error('\n✗ Setup failed:', error instanceof Error ? error.message : 'Unknown error');
diff --git a/packages/mcp-server/src/cli/validation.ts b/packages/mcp-server/src/cli/validation.ts
index 6548d271..41f9ec0a 100644
--- a/packages/mcp-server/src/cli/validation.ts
+++ b/packages/mcp-server/src/cli/validation.ts
@@ -1,10 +1,11 @@
/**
* Configuration validation utilities
- * Tests environment variables, API connectivity, auth, encryption, and device access
+ * Tests environment variables, Supabase connectivity, user resolution, encryption, and device access
*/
import type { GsdConfig, SyncStatus } from '../tools.js';
import { getSyncStatus, listDevices, listTasks } from '../tools.js';
+import { resolveUserId } from '../api/client.js';
/**
* Validation check result
@@ -19,52 +20,27 @@ export interface ValidationCheck {
* Check required environment variables
*/
function validateEnvironmentVariables(): {
- apiUrl: string;
- authToken: string;
+ supabaseUrl: string;
+ serviceKey: string;
+ userEmail: string;
encryptionPassphrase?: string;
} {
- const apiUrl = process.env.GSD_API_URL;
- const authToken = process.env.GSD_AUTH_TOKEN;
+ const supabaseUrl = process.env.GSD_SUPABASE_URL;
+ const serviceKey = process.env.GSD_SUPABASE_SERVICE_KEY;
+ const userEmail = process.env.GSD_USER_EMAIL;
const encryptionPassphrase = process.env.GSD_ENCRYPTION_PASSPHRASE;
- if (!apiUrl || !authToken) {
+ if (!supabaseUrl || !serviceKey || !userEmail) {
console.log('✗ Configuration Error\n');
console.log('Missing required environment variables:');
- if (!apiUrl) console.log(' - GSD_API_URL');
- if (!authToken) console.log(' - GSD_AUTH_TOKEN');
+ if (!supabaseUrl) console.log(' - GSD_SUPABASE_URL');
+ if (!serviceKey) console.log(' - GSD_SUPABASE_SERVICE_KEY');
+ if (!userEmail) console.log(' - GSD_USER_EMAIL');
console.log('\nRun setup wizard: npx gsd-mcp-server --setup');
process.exit(1);
}
- return { apiUrl, authToken, encryptionPassphrase };
-}
-
-/**
- * Test API connectivity
- */
-async function validateApiConnection(apiUrl: string): Promise {
- try {
- const response = await fetch(`${apiUrl}/health`);
- if (response.ok) {
- return {
- name: 'API Connectivity',
- status: '✓',
- details: `Connected to ${apiUrl}`,
- };
- } else {
- return {
- name: 'API Connectivity',
- status: '⚠',
- details: `Connected but got status ${response.status}`,
- };
- }
- } catch {
- return {
- name: 'API Connectivity',
- status: '✗',
- details: `Failed to connect to ${apiUrl}`,
- };
- }
+ return { supabaseUrl, serviceKey, userEmail, encryptionPassphrase };
}
/**
@@ -82,24 +58,41 @@ function createSyncStatusCheck(status: SyncStatus): ValidationCheck {
}
/**
- * Validate authentication token and check sync status
+ * Validate Supabase connectivity and user resolution
*/
-async function validateAuthentication(config: GsdConfig): Promise {
+async function validateSupabaseConnection(config: GsdConfig): Promise {
const checks: ValidationCheck[] = [];
+ // Test connectivity via sync status query
try {
const status = await getSyncStatus(config);
checks.push({
- name: 'Authentication',
+ name: 'Supabase Connectivity',
status: '✓',
- details: `Token valid (${status.deviceCount} devices registered)`,
+ details: `Connected to ${config.supabaseUrl} (${status.deviceCount} devices)`,
});
checks.push(createSyncStatusCheck(status));
} catch (error) {
checks.push({
- name: 'Authentication',
+ name: 'Supabase Connectivity',
status: '✗',
- details: error instanceof Error ? error.message : 'Token validation failed',
+ details: error instanceof Error ? error.message : 'Connection failed',
+ });
+ }
+
+ // Test user resolution
+ try {
+ const userId = await resolveUserId(config);
+ checks.push({
+ name: 'User Resolution',
+ status: '✓',
+ details: `User found: ${config.userEmail} (${userId.slice(0, 8)}...)`,
+ });
+ } catch (error) {
+ checks.push({
+ name: 'User Resolution',
+ status: '✗',
+ details: error instanceof Error ? error.message : 'User lookup failed',
});
}
@@ -201,29 +194,25 @@ export async function runValidation(): Promise {
const checks: ValidationCheck[] = [];
// Step 1: Environment variables
- const { apiUrl, authToken, encryptionPassphrase } = validateEnvironmentVariables();
+ const { supabaseUrl, serviceKey, userEmail, encryptionPassphrase } = validateEnvironmentVariables();
checks.push({
name: 'Environment Variables',
status: '✓',
- details: `GSD_API_URL and GSD_AUTH_TOKEN are set${encryptionPassphrase ? ' (with passphrase)' : ''}`,
+ details: `GSD_SUPABASE_URL, GSD_SUPABASE_SERVICE_KEY, and GSD_USER_EMAIL are set${encryptionPassphrase ? ' (with passphrase)' : ''}`,
});
- const config: GsdConfig = { apiBaseUrl: apiUrl, authToken, encryptionPassphrase };
-
- // Step 2: API connectivity
- const connectivityCheck = await validateApiConnection(apiUrl);
- checks.push(connectivityCheck);
+ const config: GsdConfig = { supabaseUrl, serviceKey, userEmail, encryptionPassphrase };
- // Step 3: Authentication & sync status
- const authChecks = await validateAuthentication(config);
- checks.push(...authChecks);
+ // Step 2: Supabase connectivity & user resolution
+ const connectionChecks = await validateSupabaseConnection(config);
+ checks.push(...connectionChecks);
- // Step 4: Encryption
+ // Step 3: Encryption
const encryptionCheck = await validateEncryption(config, !!encryptionPassphrase);
checks.push(encryptionCheck);
- // Step 5: Device access
+ // Step 4: Device access
const deviceCheck = await validateDeviceAccess(config);
checks.push(deviceCheck);
diff --git a/packages/mcp-server/src/crypto.ts b/packages/mcp-server/src/crypto.ts
index 4bd2a4e1..706decae 100644
--- a/packages/mcp-server/src/crypto.ts
+++ b/packages/mcp-server/src/crypto.ts
@@ -111,11 +111,11 @@ export class CryptoManager {
const plaintextBuffer = await webcrypto.subtle.decrypt(
{
name: ALGORITHM,
- iv: nonceBuffer,
+ iv: nonceBuffer as Uint8Array,
tagLength: TAG_LENGTH,
},
this.encryptionKey,
- ciphertextBuffer
+ ciphertextBuffer as Uint8Array
);
const decoder = new TextDecoder();
diff --git a/packages/mcp-server/src/encryption/manager.ts b/packages/mcp-server/src/encryption/manager.ts
index b216885e..de908234 100644
--- a/packages/mcp-server/src/encryption/manager.ts
+++ b/packages/mcp-server/src/encryption/manager.ts
@@ -1,16 +1,17 @@
import { getCryptoManager } from '../crypto.js';
+import { getSupabaseClient, resolveUserId } from '../api/client.js';
import type { GsdConfig } from '../types.js';
/**
* Initialize encryption with user's passphrase
- * Fetches salt from server and derives encryption key
+ * Fetches salt from Supabase profiles table and derives encryption key
*/
export async function initializeEncryption(config: GsdConfig): Promise {
validateEncryptionConfig(config);
const cryptoManager = getCryptoManager();
if (cryptoManager.isInitialized()) {
- return; // Already initialized
+ return;
}
const encryptionSalt = await fetchEncryptionSalt(config);
@@ -35,74 +36,39 @@ function validateEncryptionConfig(config: GsdConfig): void {
}
/**
- * Fetch user's encryption salt from server
+ * Fetch user's encryption salt from Supabase profiles table
*/
async function fetchEncryptionSalt(config: GsdConfig): Promise {
- const response = await fetchSaltEndpoint(config);
- validateSaltResponse(response);
+ const userId = await resolveUserId(config);
+ const supabase = getSupabaseClient(config);
- const data = (await response.json()) as { encryptionSalt: string };
- validateSaltData(data, config);
+ const { data, error } = await supabase
+ .from('profiles')
+ .select('encryption_salt')
+ .eq('id', userId)
+ .single();
- return data.encryptionSalt;
-}
-
-/**
- * Make HTTP request to encryption salt endpoint
- */
-async function fetchSaltEndpoint(config: GsdConfig): Promise {
- try {
- return await fetch(`${config.apiBaseUrl}/api/auth/encryption-salt`, {
- method: 'GET',
- headers: {
- Authorization: `Bearer ${config.authToken}`,
- 'Content-Type': 'application/json',
- },
- });
- } catch (error) {
+ if (error) {
throw new Error(
`❌ Failed to fetch encryption salt\n\n` +
- `Network error: ${error instanceof Error ? error.message : 'Unknown error'}\n\n` +
- `Run: npx gsd-mcp-server --validate`
- );
- }
-}
-
-/**
- * Validate salt endpoint HTTP response
- */
-function validateSaltResponse(response: Response): void {
- if (!response.ok) {
- if (response.status === 401) {
- throw new Error(
- `❌ Authentication failed while fetching encryption salt\n\n` +
- `Your token has expired. Run: npx gsd-mcp-server --setup`
- );
- }
- throw new Error(
- `❌ Failed to fetch encryption salt (${response.status})\n\n` +
- `The Worker API endpoint may not support encryption.\n` +
- `Ensure you're using Worker v0.2.0+\n\n` +
+ `Database error: ${error.message}\n\n` +
`Run: npx gsd-mcp-server --validate`
);
}
-}
-/**
- * Validate salt data from response
- */
-function validateSaltData(data: { encryptionSalt: string }, config: GsdConfig): void {
- if (!data.encryptionSalt) {
+ if (!data?.encryption_salt) {
throw new Error(
`❌ Encryption not set up for this account\n\n` +
`Please set up encryption in the GSD app first:\n` +
- ` 1. Visit ${config.apiBaseUrl}\n` +
+ ` 1. Open the GSD app\n` +
` 2. Go to Settings → Sync\n` +
` 3. Set an encryption passphrase\n` +
` 4. Complete initial sync\n\n` +
`Then run: npx gsd-mcp-server --setup`
);
}
+
+ return data.encryption_salt;
}
/**
diff --git a/packages/mcp-server/src/jwt.ts b/packages/mcp-server/src/jwt.ts
deleted file mode 100644
index 8bbcf7ee..00000000
--- a/packages/mcp-server/src/jwt.ts
+++ /dev/null
@@ -1,100 +0,0 @@
-/**
- * JWT utilities for parsing device ID and token metadata
- */
-
-import { z } from 'zod';
-
-/**
- * JWT payload schema (matches Worker's jwt.ts structure)
- */
-const jwtPayloadSchema = z.object({
- sub: z.string(), // User ID (subject)
- email: z.string(),
- deviceId: z.string(), // Device ID (camelCase)
- jti: z.string(), // JWT ID
- iat: z.number(), // Issued at
- exp: z.number(), // Expiration
-});
-
-export type JWTPayload = z.infer;
-
-/**
- * Parse JWT token and extract payload
- * Does NOT validate signature (server validates on API calls)
- */
-export function parseJWT(token: string): JWTPayload {
- try {
- // JWT format: header.payload.signature
- const parts = token.split('.');
- if (parts.length !== 3) {
- throw new Error('Invalid JWT format');
- }
-
- // Decode base64url payload
- const payload = parts[1];
- const decoded = Buffer.from(payload, 'base64url').toString('utf-8');
- const parsed = JSON.parse(decoded);
-
- return jwtPayloadSchema.parse(parsed);
- } catch (error) {
- throw new Error(
- `Failed to parse JWT: ${error instanceof Error ? error.message : 'Invalid token format'}`
- );
- }
-}
-
-/**
- * Extract device ID from JWT token
- */
-export function getDeviceIdFromToken(token: string): string {
- const payload = parseJWT(token);
- return payload.deviceId;
-}
-
-/**
- * Extract user ID from JWT token
- */
-export function getUserIdFromToken(token: string): string {
- const payload = parseJWT(token);
- return payload.sub;
-}
-
-/**
- * Check if JWT token is expired
- */
-export function isTokenExpired(token: string): boolean {
- try {
- const payload = parseJWT(token);
- const now = Math.floor(Date.now() / 1000);
- return payload.exp < now;
- } catch {
- return true; // Treat invalid tokens as expired
- }
-}
-
-/**
- * Get days until token expires
- * Returns negative number if already expired
- */
-export function getDaysUntilExpiration(token: string): number {
- try {
- const payload = parseJWT(token);
- const now = Math.floor(Date.now() / 1000);
- const secondsRemaining = payload.exp - now;
- return Math.floor(secondsRemaining / (60 * 60 * 24));
- } catch {
- return -1;
- }
-}
-
-/**
- * Get token expiration as Date object
- */
-export function getTokenExpiration(token: string): Date | null {
- try {
- const payload = parseJWT(token);
- return new Date(payload.exp * 1000);
- } catch {
- return null;
- }
-}
diff --git a/packages/mcp-server/src/server/config.ts b/packages/mcp-server/src/server/config.ts
index ae78729a..27b77610 100644
--- a/packages/mcp-server/src/server/config.ts
+++ b/packages/mcp-server/src/server/config.ts
@@ -1,17 +1,18 @@
import { z } from 'zod';
-import type { GsdConfig } from '../tools.js';
+import type { GsdConfig } from '../types.js';
import { createMcpLogger } from '../utils/logger.js';
const logger = createMcpLogger('CONFIG');
/**
- * Configuration schema for GSD MCP Server
+ * Configuration schema for GSD MCP Server (Supabase backend)
* Validates environment variables and ensures required fields are present
*/
export const configSchema = z.object({
- apiBaseUrl: z.string().url(),
- authToken: z.string().min(1),
- encryptionPassphrase: z.string().optional(), // Optional: for decrypting tasks
+ supabaseUrl: z.string().url(),
+ serviceKey: z.string().min(1),
+ userEmail: z.string().email(),
+ encryptionPassphrase: z.string().optional(),
});
export type ConfigSchema = z.infer;
@@ -23,13 +24,14 @@ export type ConfigSchema = z.infer;
export function loadConfig(): GsdConfig {
try {
return configSchema.parse({
- apiBaseUrl: process.env.GSD_API_URL,
- authToken: process.env.GSD_AUTH_TOKEN,
+ supabaseUrl: process.env.GSD_SUPABASE_URL,
+ serviceKey: process.env.GSD_SUPABASE_SERVICE_KEY,
+ userEmail: process.env.GSD_USER_EMAIL,
encryptionPassphrase: process.env.GSD_ENCRYPTION_PASSPHRASE,
});
} catch (error) {
logger.error('Configuration error', error instanceof Error ? error : new Error(String(error)));
- logger.info('Required environment variables: GSD_API_URL, GSD_AUTH_TOKEN | Optional: GSD_ENCRYPTION_PASSPHRASE');
+ logger.info('Required environment variables: GSD_SUPABASE_URL, GSD_SUPABASE_SERVICE_KEY, GSD_USER_EMAIL | Optional: GSD_ENCRYPTION_PASSPHRASE');
logger.info('Run setup wizard with: npx gsd-mcp-server --setup');
throw error;
}
@@ -37,14 +39,12 @@ export function loadConfig(): GsdConfig {
/**
* Check if configuration is valid without throwing
- * @returns {boolean} True if config is valid, false otherwise
*/
export function isConfigValid(): boolean {
try {
loadConfig();
return true;
} catch {
- // loadConfig throws on invalid config - return false without propagating
return false;
}
}
@@ -53,14 +53,16 @@ export function isConfigValid(): boolean {
* Get configuration status for diagnostics
*/
export function getConfigStatus(): {
- hasApiUrl: boolean;
- hasAuthToken: boolean;
+ hasSupabaseUrl: boolean;
+ hasServiceKey: boolean;
+ hasUserEmail: boolean;
hasEncryptionPassphrase: boolean;
isValid: boolean;
} {
return {
- hasApiUrl: !!process.env.GSD_API_URL,
- hasAuthToken: !!process.env.GSD_AUTH_TOKEN,
+ hasSupabaseUrl: !!process.env.GSD_SUPABASE_URL,
+ hasServiceKey: !!process.env.GSD_SUPABASE_SERVICE_KEY,
+ hasUserEmail: !!process.env.GSD_USER_EMAIL,
hasEncryptionPassphrase: !!process.env.GSD_ENCRYPTION_PASSPHRASE,
isValid: isConfigValid(),
};
diff --git a/packages/mcp-server/src/tools.ts b/packages/mcp-server/src/tools.ts
index 585edf3c..aa62f0b4 100644
--- a/packages/mcp-server/src/tools.ts
+++ b/packages/mcp-server/src/tools.ts
@@ -3,7 +3,7 @@
*
* This file maintains backward compatibility after modularization.
* All functionality has been split into focused modules:
- * - api/client.ts - HTTP requests and error handling
+ * - api/client.ts - Supabase client and user resolution
* - encryption/manager.ts - Encryption initialization
* - tools/*.ts - Individual tool implementations
* - types.ts - Shared type definitions
@@ -19,7 +19,6 @@ export type {
EncryptedTaskBlob,
DecryptedTask,
TaskFilters,
- PullTasksResponse,
} from './types.js';
export {
@@ -33,7 +32,7 @@ export {
export { MAX_TASKS_PER_PULL } from './constants.js';
// Re-export API client
-export { apiRequest } from './api/client.js';
+export { getSupabaseClient, resolveUserId } from './api/client.js';
// Re-export encryption utilities
export { initializeEncryption } from './encryption/manager.js';
diff --git a/packages/mcp-server/src/tools/devices.ts b/packages/mcp-server/src/tools/devices.ts
index 043cda39..05128771 100644
--- a/packages/mcp-server/src/tools/devices.ts
+++ b/packages/mcp-server/src/tools/devices.ts
@@ -1,12 +1,33 @@
-import { z } from 'zod';
-import { apiRequest } from '../api/client.js';
-import { deviceSchema } from '../types.js';
+import { getSupabaseClient, resolveUserId } from '../api/client.js';
import type { GsdConfig, Device } from '../types.js';
/**
- * List all registered devices for the authenticated user
+ * List all registered devices for the user
* Does not require encryption (metadata only)
*/
export async function listDevices(config: GsdConfig): Promise {
- return apiRequest(config, '/api/devices', z.array(deviceSchema));
+ const userId = await resolveUserId(config);
+ const supabase = getSupabaseClient(config);
+
+ const { data, error } = await supabase
+ .from('devices')
+ .select('device_id, device_name, last_seen_at, is_active')
+ .eq('user_id', userId)
+ .order('last_seen_at', { ascending: false });
+
+ if (error) {
+ throw new Error(
+ `❌ Failed to fetch devices\n\n` +
+ `Database error: ${error.message}\n\n` +
+ `Run: npx gsd-mcp-server --validate`
+ );
+ }
+
+ return (data ?? []).map((row) => ({
+ id: row.device_id,
+ name: row.device_name,
+ lastSeenAt: new Date(row.last_seen_at).getTime(),
+ isActive: row.is_active ?? true,
+ isCurrent: false, // MCP server is not a device
+ }));
}
diff --git a/packages/mcp-server/src/tools/handlers/index.ts b/packages/mcp-server/src/tools/handlers/index.ts
index 47b39e10..9a417ddb 100644
--- a/packages/mcp-server/src/tools/handlers/index.ts
+++ b/packages/mcp-server/src/tools/handlers/index.ts
@@ -11,7 +11,6 @@ import {
handleListTasks,
handleGetTask,
handleSearchTasks,
- handleGetTokenStatus,
} from './read-handlers.js';
import {
handleGetProductivityMetrics,
@@ -80,9 +79,6 @@ export async function handleToolCall(
case 'search_tasks':
return await handleSearchTasks(config, typedArgs);
- case 'get_token_status':
- return await handleGetTokenStatus(config);
-
// Analytics tools
case 'get_productivity_metrics':
return await handleGetProductivityMetrics(config);
diff --git a/packages/mcp-server/src/tools/handlers/read-handlers.ts b/packages/mcp-server/src/tools/handlers/read-handlers.ts
index 4717637c..1539b258 100644
--- a/packages/mcp-server/src/tools/handlers/read-handlers.ts
+++ b/packages/mcp-server/src/tools/handlers/read-handlers.ts
@@ -7,123 +7,19 @@ import {
searchTasks,
type GsdConfig,
} from '../../tools.js';
-import {
- isTokenExpired,
- getDaysUntilExpiration,
- getTokenExpiration,
- parseJWT,
-} from '../../jwt.js';
-import { createMcpLogger } from '../../utils/logger.js';
import type { McpToolResponse } from './types.js';
-const logger = createMcpLogger('READ_HANDLERS');
-
/**
* Read-only tool handlers for accessing task data and metadata
*/
-/**
- * Get token status with expiration warnings
- */
-function getTokenStatus(token: string) {
- const expired = isTokenExpired(token);
- const daysRemaining = getDaysUntilExpiration(token);
- const expirationDate = getTokenExpiration(token);
-
- let status: 'expired' | 'critical' | 'warning' | 'healthy';
- let message: string;
-
- if (expired) {
- status = 'expired';
- message = '❌ Token has expired. Please re-authenticate.';
- } else if (daysRemaining <= 1) {
- status = 'critical';
- message = `⚠️ Token expires in ${daysRemaining <= 0 ? 'less than a day' : '1 day'}! Re-authenticate soon.`;
- } else if (daysRemaining <= 3) {
- status = 'warning';
- message = `⚠️ Token expires in ${daysRemaining} days. Consider re-authenticating.`;
- } else {
- status = 'healthy';
- message = `✓ Token valid for ${daysRemaining} more days.`;
- }
-
- return {
- status,
- expired,
- daysRemaining,
- expiresAt: expirationDate?.toISOString() || null,
- message,
- };
-}
-
export async function handleGetSyncStatus(config: GsdConfig): Promise {
const status = await getSyncStatus(config);
- const tokenStatus = getTokenStatus(config.authToken);
-
- // Include token status in response
- const enrichedStatus = {
- ...status,
- tokenStatus: {
- status: tokenStatus.status,
- daysRemaining: tokenStatus.daysRemaining,
- expiresAt: tokenStatus.expiresAt,
- message: tokenStatus.message,
- },
- };
-
- return {
- content: [
- {
- type: 'text' as const,
- text: JSON.stringify(enrichedStatus, null, 2),
- },
- ],
- };
-}
-
-/**
- * Handle get_token_status tool
- * Provides detailed token information and expiration warnings
- */
-export async function handleGetTokenStatus(config: GsdConfig): Promise {
- const tokenStatus = getTokenStatus(config.authToken);
-
- let payload;
- try {
- payload = parseJWT(config.authToken);
- } catch (error) {
- // JWT parse failed - token may be malformed; display with null details
- logger.debug('Failed to parse JWT payload for display', { error: String(error) });
- payload = null;
- }
-
- const result = {
- ...tokenStatus,
- details: payload
- ? {
- userId: payload.sub,
- email: payload.email,
- deviceId: payload.deviceId,
- issuedAt: new Date(payload.iat * 1000).toISOString(),
- }
- : null,
- instructions:
- tokenStatus.status !== 'healthy'
- ? [
- '1. Visit https://gsd.vinny.dev and log in',
- '2. Open DevTools → Application → Local Storage',
- '3. Copy the gsd_auth_token value',
- '4. Update GSD_AUTH_TOKEN in Claude Desktop config',
- '5. Restart Claude Desktop',
- ]
- : null,
- };
-
return {
content: [
{
type: 'text' as const,
- text: JSON.stringify(result, null, 2),
+ text: JSON.stringify(status, null, 2),
},
],
};
diff --git a/packages/mcp-server/src/tools/handlers/system-handlers.ts b/packages/mcp-server/src/tools/handlers/system-handlers.ts
index 7a042931..c6b273c6 100644
--- a/packages/mcp-server/src/tools/handlers/system-handlers.ts
+++ b/packages/mcp-server/src/tools/handlers/system-handlers.ts
@@ -13,43 +13,36 @@ export async function handleValidateConfig(config: GsdConfig): Promise = [];
- // Check API connectivity
+ // Check Supabase connectivity
try {
- const response = await fetch(`${config.apiBaseUrl}/health`);
- if (response.ok) {
- checks.push({
- name: 'API Connectivity',
- status: 'success',
- details: `Connected to ${config.apiBaseUrl}`,
- });
- } else {
- checks.push({
- name: 'API Connectivity',
- status: 'warning',
- details: `Connected but got status ${response.status}`,
- });
- }
+ const status = await getSyncStatus(config);
+ checks.push({
+ name: 'Supabase Connectivity',
+ status: 'success',
+ details: `Connected to ${config.supabaseUrl} (${status.deviceCount} devices)`,
+ });
} catch (error) {
checks.push({
- name: 'API Connectivity',
+ name: 'Supabase Connectivity',
status: 'error',
details: `Failed to connect: ${error instanceof Error ? error.message : 'Unknown error'}`,
});
}
- // Check authentication
+ // Check user resolution
try {
- const status = await getSyncStatus(config);
+ const { resolveUserId } = await import('../../api/client.js');
+ const userId = await resolveUserId(config);
checks.push({
- name: 'Authentication',
+ name: 'User Resolution',
status: 'success',
- details: `Token valid (${status.deviceCount} devices registered)`,
+ details: `User found: ${config.userEmail} (${userId.slice(0, 8)}...)`,
});
} catch (error) {
checks.push({
- name: 'Authentication',
+ name: 'User Resolution',
status: 'error',
- details: error instanceof Error ? error.message : 'Token validation failed',
+ details: error instanceof Error ? error.message : 'User lookup failed',
});
}
@@ -95,7 +88,7 @@ export async function handleValidateConfig(config: GsdConfig): Promise {
- const payload = JSON.stringify({
- deviceId,
- lastVectorClock: {}, // Empty clock to get all tasks
- sinceTimestamp: 1, // Start from epoch + 1ms to get all tasks
- limit: MAX_TASKS_PER_PULL,
- });
-
- let response: Response;
- try {
- response = await fetchWithRetry(
- () =>
- fetch(`${config.apiBaseUrl}/api/sync/pull`, {
- method: 'POST',
- headers: {
- Authorization: `Bearer ${config.authToken}`,
- 'Content-Type': 'application/json',
- },
- body: payload,
- }),
- DEFAULT_RETRY_CONFIG
- );
- } catch (error) {
+ config: GsdConfig
+): Promise> {
+ const userId = await resolveUserId(config);
+ const supabase = getSupabaseClient(config);
+
+ const { data, error } = await supabase
+ .from('encrypted_tasks')
+ .select('id, encrypted_blob, nonce, updated_at')
+ .eq('user_id', userId)
+ .is('deleted_at', null)
+ .order('updated_at', { ascending: false })
+ .limit(MAX_TASKS_PER_PULL);
+
+ if (error) {
throw new Error(
`❌ Failed to fetch tasks\n\n` +
- `Network error: ${error instanceof Error ? error.message : 'Unknown error'}\n\n` +
- `Retried ${DEFAULT_RETRY_CONFIG.maxRetries} times.\n` +
+ `Database error: ${error.message}\n\n` +
`Run: npx gsd-mcp-server --validate`
);
}
- if (!response.ok) {
- throw new Error(`Failed to fetch tasks: ${response.status}`);
- }
-
- const data = (await response.json()) as PullTasksResponse;
- return data.tasks;
+ return (data ?? []).map((row) => ({
+ id: row.id,
+ encryptedBlob: row.encrypted_blob,
+ nonce: row.nonce,
+ updatedAt: row.updated_at,
+ }));
}
/**
* Decrypt multiple tasks in batch
*/
async function decryptTaskBatch(
- encryptedTasks: PullTasksResponse['tasks'],
+ encryptedTasks: Array<{ id: string; encryptedBlob: string; nonce: string }>,
config: GsdConfig
): Promise {
- // Initialize encryption once before the loop, not per-task
await initializeEncryption(config);
const cryptoManager = getCryptoManager();
@@ -135,7 +103,6 @@ async function decryptTaskBatch(
decryptedTasks.push(JSON.parse(decryptedJson) as DecryptedTask);
} catch (error) {
logger.error(`Failed to decrypt task ${encryptedTask.id}`, error instanceof Error ? error : new Error(String(error)));
- // Skip tasks that fail to decrypt
}
}
diff --git a/packages/mcp-server/src/tools/schemas/index.ts b/packages/mcp-server/src/tools/schemas/index.ts
index d2304a1b..d4570804 100644
--- a/packages/mcp-server/src/tools/schemas/index.ts
+++ b/packages/mcp-server/src/tools/schemas/index.ts
@@ -16,10 +16,10 @@ export * from './analytics-tools.js';
export * from './system-tools.js';
/**
- * All MCP tool schemas (20 total)
+ * All MCP tool schemas (19 total)
*/
export const allTools: Tool[] = [
- ...readTools, // 7 tools (includes get_token_status)
+ ...readTools, // 6 tools
...analyticsTools, // 5 tools
...writeTools, // 5 tools
...systemTools, // 3 tools (includes get_cache_stats)
diff --git a/packages/mcp-server/src/tools/schemas/read-tools.ts b/packages/mcp-server/src/tools/schemas/read-tools.ts
index fd5fd9be..b246b646 100644
--- a/packages/mcp-server/src/tools/schemas/read-tools.ts
+++ b/packages/mcp-server/src/tools/schemas/read-tools.ts
@@ -102,17 +102,6 @@ export const searchTasksTool: Tool = {
},
};
-export const getTokenStatusTool: Tool = {
- name: 'get_token_status',
- description:
- 'Check authentication token status including expiration date, days remaining, and warnings. Use this to proactively check if re-authentication is needed. Returns status (healthy/warning/critical/expired), expiration details, and re-authentication instructions if needed.',
- inputSchema: {
- type: 'object',
- properties: {},
- required: [],
- },
-};
-
export const readTools: Tool[] = [
getSyncStatusTool,
listDevicesTool,
@@ -120,5 +109,4 @@ export const readTools: Tool[] = [
listTasksTool,
getTaskTool,
searchTasksTool,
- getTokenStatusTool,
];
diff --git a/packages/mcp-server/src/tools/sync-status.ts b/packages/mcp-server/src/tools/sync-status.ts
index e8856098..b190d991 100644
--- a/packages/mcp-server/src/tools/sync-status.ts
+++ b/packages/mcp-server/src/tools/sync-status.ts
@@ -1,65 +1,119 @@
-import { apiRequest } from '../api/client.js';
-import { syncStatusSchema, statsResponseSchema } from '../types.js';
+import { getSupabaseClient, resolveUserId } from '../api/client.js';
import type { GsdConfig, SyncStatus, TaskStats } from '../types.js';
import { createMcpLogger } from '../utils/logger.js';
const logger = createMcpLogger('SYNC_STATUS');
/**
- * Get sync status including storage, device count, and conflict information
+ * Get sync status by querying Supabase tables directly
* Does not require encryption (metadata only)
*/
export async function getSyncStatus(config: GsdConfig): Promise {
- return apiRequest(config, '/api/sync/status', syncStatusSchema);
+ const userId = await resolveUserId(config);
+ const supabase = getSupabaseClient(config);
+
+ // Count devices
+ const { count: deviceCount } = await supabase
+ .from('devices')
+ .select('*', { count: 'exact', head: true })
+ .eq('user_id', userId);
+
+ // Count active tasks
+ const { count: activeCount } = await supabase
+ .from('encrypted_tasks')
+ .select('*', { count: 'exact', head: true })
+ .eq('user_id', userId)
+ .is('deleted_at', null);
+
+ // Get last sync timestamp from sync_metadata
+ const { data: syncMeta } = await supabase
+ .from('sync_metadata')
+ .select('last_sync_at')
+ .eq('user_id', userId)
+ .order('last_sync_at', { ascending: false })
+ .limit(1)
+ .single();
+
+ const lastSyncAt = syncMeta?.last_sync_at
+ ? new Date(syncMeta.last_sync_at).getTime()
+ : null;
+
+ return {
+ lastSyncAt,
+ pendingPushCount: 0, // MCP server doesn't track push/pull queues
+ pendingPullCount: 0,
+ conflictCount: 0,
+ deviceCount: deviceCount ?? 0,
+ storageUsed: activeCount ?? 0,
+ storageQuota: -1, // No quota with Supabase
+ };
}
/**
- * Get task statistics using the dedicated /api/stats endpoint
- * Returns metadata without decrypting tasks (more efficient than getDetailedTaskStats)
+ * Get task statistics by querying Supabase tables directly
+ * More efficient than decrypting — just counts and timestamps
*/
export async function getTaskStats(config: GsdConfig): Promise {
+ const userId = await resolveUserId(config);
+ const supabase = getSupabaseClient(config);
+
try {
- // Try new /api/stats endpoint for better metadata
- const response = await fetch(`${config.apiBaseUrl}/api/stats`, {
- method: 'GET',
- headers: {
- Authorization: `Bearer ${config.authToken}`,
- 'Content-Type': 'application/json',
- },
- });
+ // Count active tasks
+ const { count: activeCount } = await supabase
+ .from('encrypted_tasks')
+ .select('*', { count: 'exact', head: true })
+ .eq('user_id', userId)
+ .is('deleted_at', null);
- if (response.ok) {
- const json: unknown = await response.json();
- const data = statsResponseSchema.parse(json);
- return {
- totalTasks: data.metadata.totalCount,
- activeTasks: data.metadata.activeCount,
- deletedTasks: data.metadata.deletedCount,
- lastUpdated: data.metadata.newestTaskDate,
- oldestTask: data.metadata.oldestTaskDate,
- newestTask: data.metadata.newestTaskDate,
- };
- }
+ // Count deleted tasks
+ const { count: deletedCount } = await supabase
+ .from('encrypted_tasks')
+ .select('*', { count: 'exact', head: true })
+ .eq('user_id', userId)
+ .not('deleted_at', 'is', null);
+
+ // Get oldest task
+ const { data: oldest } = await supabase
+ .from('encrypted_tasks')
+ .select('created_at')
+ .eq('user_id', userId)
+ .is('deleted_at', null)
+ .order('created_at', { ascending: true })
+ .limit(1)
+ .single();
+
+ // Get newest task
+ const { data: newest } = await supabase
+ .from('encrypted_tasks')
+ .select('updated_at')
+ .eq('user_id', userId)
+ .is('deleted_at', null)
+ .order('updated_at', { ascending: false })
+ .limit(1)
+ .single();
+
+ const total = (activeCount ?? 0) + (deletedCount ?? 0);
+
+ return {
+ totalTasks: total,
+ activeTasks: activeCount ?? 0,
+ deletedTasks: deletedCount ?? 0,
+ lastUpdated: newest?.updated_at ? new Date(newest.updated_at).getTime() : null,
+ oldestTask: oldest?.created_at ? new Date(oldest.created_at).getTime() : null,
+ newestTask: newest?.updated_at ? new Date(newest.updated_at).getTime() : null,
+ };
} catch (error) {
- // Only fall back for network errors; re-throw auth failures
- if (error instanceof TypeError) {
- logger.warn('Network error fetching /api/stats, falling back to /api/sync/status');
- } else {
- logger.warn('Failed to fetch from /api/stats, falling back to /api/sync/status', {
- error: error instanceof Error ? error.message : String(error),
- });
- }
- }
+ logger.warn('Failed to fetch task stats', {
+ error: error instanceof Error ? error.message : String(error),
+ });
- // Fallback: use the status endpoint — only sync metadata is available,
- // task counts cannot be derived from pending sync counts
- const status = await getSyncStatus(config);
- return {
- totalTasks: null,
- activeTasks: null,
- deletedTasks: null,
- lastUpdated: status.lastSyncAt,
- oldestTask: null,
- newestTask: null,
- };
+ return {
+ totalTasks: null,
+ activeTasks: null,
+ deletedTasks: null,
+ lastUpdated: null,
+ oldestTask: null,
+ newestTask: null,
+ };
+ }
}
diff --git a/packages/mcp-server/src/tools/task-stats.ts b/packages/mcp-server/src/tools/task-stats.ts
index c44b4fb3..2a04e757 100644
--- a/packages/mcp-server/src/tools/task-stats.ts
+++ b/packages/mcp-server/src/tools/task-stats.ts
@@ -1,40 +1,27 @@
-import { apiRequest } from '../api/client.js';
-import { initializeEncryption } from '../encryption/manager.js';
-import { getCryptoManager } from '../crypto.js';
-import { statsResponseSchema } from '../types.js';
-import type { GsdConfig, StatsResponse, DecryptedTask } from '../types.js';
-import { createMcpLogger } from '../utils/logger.js';
-
-const logger = createMcpLogger('TASK_STATS');
-
+import { listTasks } from './list-tasks.js';
+import { getTaskStats } from './sync-status.js';
+import type { GsdConfig, DecryptedTask } from '../types.js';
/**
* Detailed task statistics derived from decrypted tasks
*/
export interface DetailedTaskStats {
- // Overall counts
totalTasks: number;
activeTasks: number;
deletedTasks: number;
completedTasks: number;
incompleteTasks: number;
-
- // Quadrant distribution
quadrantCounts: {
'urgent-important': number;
'not-urgent-important': number;
'urgent-not-important': number;
'not-urgent-not-important': number;
};
-
- // Tag statistics
tagStats: Array<{
tag: string;
count: number;
completedCount: number;
completionRate: number;
}>;
-
- // Temporal metadata
oldestTaskDate: number | null;
newestTaskDate: number | null;
lastUpdated: number | null;
@@ -48,51 +35,13 @@ export interface DetailedTaskStats {
export async function getDetailedTaskStats(
config: GsdConfig
): Promise {
- // Fetch encrypted tasks and metadata from new /api/stats endpoint
- const statsResponse = await apiRequest(
- config,
- '/api/stats',
- statsResponseSchema
- );
-
- // Decrypt tasks
- const decryptedTasks = await decryptTasks(statsResponse.tasks, config);
-
- // Calculate detailed statistics
- return calculateDetailedStats(decryptedTasks, statsResponse.metadata);
-}
-
-/**
- * Decrypt all encrypted task blobs
- */
-async function decryptTasks(
- encryptedTasks: StatsResponse['tasks'],
- config: GsdConfig
-): Promise {
- await initializeEncryption(config);
- const cryptoManager = getCryptoManager();
- const decryptedTasks: DecryptedTask[] = [];
-
- for (const encryptedTask of encryptedTasks) {
- try {
- // Skip deleted tasks
- if (encryptedTask.deletedAt) {
- continue;
- }
+ // Fetch metadata counts from Supabase (no decryption needed)
+ const metadata = await getTaskStats(config);
- const decryptedJson = await cryptoManager.decrypt(
- encryptedTask.encryptedBlob,
- encryptedTask.nonce
- );
- const task = JSON.parse(decryptedJson) as DecryptedTask;
- decryptedTasks.push(task);
- } catch (error) {
- logger.error(`Failed to decrypt task ${encryptedTask.id}`, error instanceof Error ? error : new Error(String(error)));
- // Skip tasks that fail to decrypt
- }
- }
+ // Fetch and decrypt all active tasks for detailed stats
+ const decryptedTasks = await listTasks(config);
- return decryptedTasks;
+ return calculateDetailedStats(decryptedTasks, metadata);
}
/**
@@ -100,16 +49,18 @@ async function decryptTasks(
*/
function calculateDetailedStats(
tasks: DecryptedTask[],
- metadata: StatsResponse['metadata']
+ metadata: {
+ totalTasks: number | null;
+ activeTasks: number | null;
+ deletedTasks: number | null;
+ lastUpdated: number | null;
+ oldestTask: number | null;
+ newestTask: number | null;
+ }
): DetailedTaskStats {
- // Overall counts
- const totalTasks = metadata.totalCount;
- const activeTasks = metadata.activeCount;
- const deletedTasks = metadata.deletedCount;
const completedTasks = tasks.filter((t) => t.completed).length;
const incompleteTasks = tasks.filter((t) => !t.completed).length;
- // Quadrant distribution
const quadrantCounts = {
'urgent-important': 0,
'not-urgent-important': 0,
@@ -124,11 +75,38 @@ function calculateDetailedStats(
}
}
- // Tag statistics
- const tagMap = new Map<
- string,
- { count: number; completedCount: number }
- >();
+ const tagStats = calculateTagStats(tasks);
+
+ // Use reduce instead of Math.max(...spread) to avoid stack overflow on large arrays
+ const lastUpdated = tasks.length > 0
+ ? tasks.reduce((max, t) => {
+ const ts = new Date(t.updatedAt).getTime();
+ return ts > max ? ts : max;
+ }, -Infinity)
+ : null;
+
+ return {
+ totalTasks: metadata.totalTasks ?? 0,
+ activeTasks: metadata.activeTasks ?? 0,
+ deletedTasks: metadata.deletedTasks ?? 0,
+ completedTasks,
+ incompleteTasks,
+ quadrantCounts,
+ tagStats,
+ oldestTaskDate: metadata.oldestTask,
+ newestTaskDate: metadata.newestTask,
+ lastUpdated,
+ storageUsed: metadata.activeTasks ?? 0,
+ };
+}
+
+/**
+ * Calculate per-tag statistics from decrypted tasks
+ */
+function calculateTagStats(
+ tasks: DecryptedTask[]
+): DetailedTaskStats['tagStats'] {
+ const tagMap = new Map();
for (const task of tasks) {
for (const tag of task.tags || []) {
@@ -141,7 +119,7 @@ function calculateDetailedStats(
}
}
- const tagStats = Array.from(tagMap.entries())
+ return Array.from(tagMap.entries())
.map(([tag, stats]) => ({
tag,
count: stats.count,
@@ -151,27 +129,5 @@ function calculateDetailedStats(
? Math.round((stats.completedCount / stats.count) * 100)
: 0,
}))
- .sort((a, b) => b.count - a.count); // Sort by count descending
-
- // Temporal metadata — use reduce instead of Math.max(...spread) to avoid stack overflow on large arrays
- const lastUpdated = tasks.length > 0
- ? tasks.reduce((max, t) => {
- const ts = new Date(t.updatedAt).getTime();
- return ts > max ? ts : max;
- }, -Infinity)
- : null;
-
- return {
- totalTasks,
- activeTasks,
- deletedTasks,
- completedTasks,
- incompleteTasks,
- quadrantCounts,
- tagStats,
- oldestTaskDate: metadata.oldestTaskDate,
- newestTaskDate: metadata.newestTaskDate,
- lastUpdated,
- storageUsed: metadata.storageUsed,
- };
+ .sort((a, b) => b.count - a.count);
}
diff --git a/packages/mcp-server/src/types.ts b/packages/mcp-server/src/types.ts
index d69edffa..fc203fd7 100644
--- a/packages/mcp-server/src/types.ts
+++ b/packages/mcp-server/src/types.ts
@@ -2,12 +2,29 @@ import { z } from 'zod';
// Configuration
export interface GsdConfig {
- apiBaseUrl: string;
- authToken: string;
+ supabaseUrl: string;
+ serviceKey: string;
+ userEmail: string;
encryptionPassphrase?: string; // Optional: for decrypting tasks
}
-// Response schemas based on worker types
+// Supabase encrypted_tasks row schema
+export const encryptedTaskRowSchema = z.object({
+ id: z.string(),
+ user_id: z.string(),
+ encrypted_blob: z.string(),
+ nonce: z.string(),
+ version: z.number(),
+ deleted_at: z.string().nullable(),
+ created_at: z.string(),
+ updated_at: z.string(),
+ last_modified_device: z.string().nullable(),
+ checksum: z.string(),
+});
+
+export type EncryptedTaskRow = z.infer;
+
+// Response schemas for tool output
export const syncStatusSchema = z.object({
lastSyncAt: z.number().nullable(),
pendingPushCount: z.number(),
@@ -35,42 +52,13 @@ export const taskStatsSchema = z.object({
newestTask: z.number().nullable(),
});
-export const statsResponseSchema = z.object({
- tasks: z.array(
- z.object({
- id: z.string(),
- encryptedBlob: z.string(),
- nonce: z.string(),
- createdAt: z.number(),
- updatedAt: z.number(),
- deletedAt: z.number().nullable(),
- })
- ),
- metadata: z.object({
- totalCount: z.number(),
- activeCount: z.number(),
- deletedCount: z.number(),
- oldestTaskDate: z.number().nullable(),
- newestTaskDate: z.number().nullable(),
- storageUsed: z.number(),
- }),
-});
-
export type SyncStatus = z.infer;
export type Device = z.infer;
export type TaskStats = z.infer;
-export type StatsResponse = z.infer;
-// Encrypted task blob from API
-export const encryptedTaskBlobSchema = z.object({
- id: z.string(),
- encrypted_blob: z.string(),
- nonce: z.string(),
- updated_at: z.number(),
- created_at: z.number(),
-});
-
-export type EncryptedTaskBlob = z.infer;
+// Legacy alias for backward compatibility in re-exports
+export const encryptedTaskBlobSchema = encryptedTaskRowSchema;
+export type EncryptedTaskBlob = EncryptedTaskRow;
// Decrypted task structure (matches GSD TaskRecord from frontend)
export interface DecryptedTask {
@@ -79,31 +67,20 @@ export interface DecryptedTask {
description: string;
urgent: boolean;
important: boolean;
- quadrant: string; // Frontend uses 'quadrant', not 'quadrantId'
+ quadrant: string;
completed: boolean;
- completedAt?: string; // ISO datetime when task was completed
- dueDate?: string; // ISO datetime string, optional (NOT null)
+ completedAt?: string;
+ dueDate?: string;
tags: string[];
subtasks: Array<{
id: string;
- title: string; // Frontend uses 'title', not 'text'
+ title: string;
completed: boolean;
}>;
recurrence: 'none' | 'daily' | 'weekly' | 'monthly';
dependencies: string[];
- createdAt: string; // Frontend expects ISO datetime string
- updatedAt: string; // Frontend expects ISO datetime string
- vectorClock?: Record; // For sync conflict resolution
-}
-
-// API response types
-export interface PullTasksResponse {
- tasks: Array<{
- id: string;
- encryptedBlob: string;
- nonce: string;
- updatedAt: number;
- }>;
+ createdAt: string;
+ updatedAt: string;
}
// Task filters
diff --git a/packages/mcp-server/src/write-ops/bulk-operations.ts b/packages/mcp-server/src/write-ops/bulk-operations.ts
index af4b9b0b..56d30cef 100644
--- a/packages/mcp-server/src/write-ops/bulk-operations.ts
+++ b/packages/mcp-server/src/write-ops/bulk-operations.ts
@@ -5,7 +5,6 @@
import type { GsdConfig, DecryptedTask } from '../tools.js';
import type { BulkOperation, SyncOperation } from './types.js';
-import { getCryptoManager } from '../crypto.js';
import { listTasks } from '../tools.js';
import { deriveQuadrant, ensureEncryption, pushToSync } from './helpers.js';
@@ -27,7 +26,7 @@ export async function bulkUpdateTasks(
// Safety check: limit bulk operations
if (taskIds.length > maxTasks) {
throw new Error(
- `❌ Bulk operation limit exceeded\n\n` +
+ `Bulk operation limit exceeded\n\n` +
`Requested: ${taskIds.length} tasks\n` +
`Maximum: ${maxTasks} tasks\n\n` +
`Please reduce the number of tasks or split into multiple operations.`
@@ -49,83 +48,12 @@ export async function bulkUpdateTasks(
const errors: string[] = [];
const operations: SyncOperation[] = [];
- const cryptoManager = getCryptoManager();
const now = new Date().toISOString();
for (const task of tasksToUpdate) {
try {
- let updatedTask: DecryptedTask;
-
- switch (operation.type) {
- case 'complete':
- updatedTask = { ...task, completed: operation.completed, updatedAt: now };
- // Set/clear completedAt
- if (operation.completed && !task.completed) {
- updatedTask.completedAt = now;
- } else if (!operation.completed) {
- delete updatedTask.completedAt;
- }
- break;
-
- case 'move_quadrant':
- updatedTask = {
- ...task,
- urgent: operation.urgent,
- important: operation.important,
- quadrant: deriveQuadrant(operation.urgent, operation.important),
- updatedAt: now,
- };
- break;
-
- case 'add_tags': {
- const newTags = [...new Set([...task.tags, ...operation.tags])];
- updatedTask = { ...task, tags: newTags, updatedAt: now };
- break;
- }
-
- case 'remove_tags': {
- const tagsToRemove = new Set(operation.tags);
- const filteredTags = task.tags.filter((tag) => !tagsToRemove.has(tag));
- updatedTask = { ...task, tags: filteredTags, updatedAt: now };
- break;
- }
-
- case 'set_due_date':
- updatedTask = { ...task, updatedAt: now };
- // Set or clear dueDate
- if (operation.dueDate) {
- updatedTask.dueDate = operation.dueDate;
- } else {
- delete updatedTask.dueDate;
- }
- break;
-
- case 'delete':
- // Delete operation
- operations.push({
- type: 'delete',
- taskId: task.id,
- vectorClock: {}, // Simplified: let server manage
- });
- continue;
-
- default:
- throw new Error(`Unknown operation type: ${(operation as { type: string }).type}`);
- }
-
- // Encrypt updated task and calculate checksum
- const taskJson = JSON.stringify(updatedTask);
- const { ciphertext, nonce } = await cryptoManager.encrypt(taskJson);
- const checksum = await cryptoManager.hash(taskJson);
-
- operations.push({
- type: 'update',
- taskId: task.id,
- encryptedBlob: ciphertext,
- nonce,
- vectorClock: {}, // Simplified: let server manage
- checksum,
- });
+ const syncOp = applyBulkOperation(task, operation, now);
+ operations.push(syncOp);
} catch (error) {
errors.push(
`Task ${task.id}: ${error instanceof Error ? error.message : 'Unknown error'}`
@@ -141,13 +69,13 @@ export async function bulkUpdateTasks(
return { updated: updateCount, deleted: deleteCount, errors, dryRun: true };
}
- // Push all updates at once
+ // Push all updates at once (pushToSync handles encryption)
if (operations.length > 0) {
try {
await pushToSync(config, operations);
} catch (error) {
throw new Error(
- `❌ Bulk update failed\n\n` +
+ `Bulk update failed\n\n` +
`Error: ${error instanceof Error ? error.message : 'Unknown error'}\n\n` +
`None of the ${operations.length} tasks were updated.`
);
@@ -161,3 +89,64 @@ export async function bulkUpdateTasks(
dryRun: false,
};
}
+
+/**
+ * Apply a bulk operation to a single task and return the sync operation
+ */
+function applyBulkOperation(
+ task: DecryptedTask,
+ operation: BulkOperation,
+ now: string
+): SyncOperation {
+ switch (operation.type) {
+ case 'complete': {
+ const updatedTask: DecryptedTask = { ...task, completed: operation.completed, updatedAt: now };
+ if (operation.completed && !task.completed) {
+ updatedTask.completedAt = now;
+ } else if (!operation.completed) {
+ delete updatedTask.completedAt;
+ }
+ return { type: 'update', taskId: task.id, data: updatedTask };
+ }
+
+ case 'move_quadrant':
+ return {
+ type: 'update',
+ taskId: task.id,
+ data: {
+ ...task,
+ urgent: operation.urgent,
+ important: operation.important,
+ quadrant: deriveQuadrant(operation.urgent, operation.important),
+ updatedAt: now,
+ },
+ };
+
+ case 'add_tags': {
+ const newTags = [...new Set([...task.tags, ...operation.tags])];
+ return { type: 'update', taskId: task.id, data: { ...task, tags: newTags, updatedAt: now } };
+ }
+
+ case 'remove_tags': {
+ const tagsToRemove = new Set(operation.tags);
+ const filteredTags = task.tags.filter((tag) => !tagsToRemove.has(tag));
+ return { type: 'update', taskId: task.id, data: { ...task, tags: filteredTags, updatedAt: now } };
+ }
+
+ case 'set_due_date': {
+ const updatedTask: DecryptedTask = { ...task, updatedAt: now };
+ if (operation.dueDate) {
+ updatedTask.dueDate = operation.dueDate;
+ } else {
+ delete updatedTask.dueDate;
+ }
+ return { type: 'update', taskId: task.id, data: updatedTask };
+ }
+
+ case 'delete':
+ return { type: 'delete', taskId: task.id };
+
+ default:
+ throw new Error(`Unknown operation type: ${(operation as { type: string }).type}`);
+ }
+}
diff --git a/packages/mcp-server/src/write-ops/helpers.ts b/packages/mcp-server/src/write-ops/helpers.ts
index d3624495..e28771c8 100644
--- a/packages/mcp-server/src/write-ops/helpers.ts
+++ b/packages/mcp-server/src/write-ops/helpers.ts
@@ -3,20 +3,18 @@
* Includes ID generation, quadrant logic, encryption setup, and sync push
*/
-import type { GsdConfig } from '../tools.js';
+import type { GsdConfig } from '../types.js';
import type { SyncOperation } from './types.js';
import { getCryptoManager } from '../crypto.js';
-import { getDeviceIdFromToken } from '../jwt.js';
-import { fetchWithRetry, DEFAULT_RETRY_CONFIG } from '../api/retry.js';
+import { getSupabaseClient, resolveUserId } from '../api/client.js';
+import { initializeEncryption } from '../encryption/manager.js';
import { getTaskCache } from '../cache.js';
/**
* Generate unique ID for new tasks
*/
export function generateTaskId(): string {
- // Use crypto.randomUUID() for secure random IDs
const uuid = crypto.randomUUID();
- // Remove hyphens to match frontend format
return uuid.replace(/-/g, '');
}
@@ -32,7 +30,6 @@ export function deriveQuadrant(urgent: boolean, important: boolean): string {
/**
* Initialize encryption for write operations
- * Includes retry logic for fetching encryption salt
*/
export async function ensureEncryption(config: GsdConfig): Promise {
if (!config.encryptionPassphrase) {
@@ -44,99 +41,80 @@ export async function ensureEncryption(config: GsdConfig): Promise {
);
}
- const cryptoManager = getCryptoManager();
- if (!cryptoManager.isInitialized()) {
- // Fetch salt with retry logic
- const response = await fetchWithRetry(
- () =>
- fetch(`${config.apiBaseUrl}/api/auth/encryption-salt`, {
- method: 'GET',
- headers: {
- Authorization: `Bearer ${config.authToken}`,
- 'Content-Type': 'application/json',
- },
- }),
- DEFAULT_RETRY_CONFIG
- );
-
- if (!response.ok) {
- throw new Error(`Failed to fetch encryption salt: ${response.status}`);
- }
-
- const data = (await response.json()) as { encryptionSalt: string };
- if (!data.encryptionSalt) {
- throw new Error('Encryption not set up for this account');
- }
-
- await cryptoManager.deriveKey(config.encryptionPassphrase, data.encryptionSalt);
- }
+ await initializeEncryption(config);
}
/**
- * Push encrypted task data to sync API
- * Includes retry logic for transient failures
+ * Push encrypted task data to Supabase
*/
export async function pushToSync(
config: GsdConfig,
operations: SyncOperation[]
): Promise {
- const deviceId = getDeviceIdFromToken(config.authToken);
- const payload = JSON.stringify({
- deviceId,
- operations,
- clientVectorClock: {}, // Simplified: let server handle vector clock
- });
+ const userId = await resolveUserId(config);
+ const supabase = getSupabaseClient(config);
+ const cryptoManager = getCryptoManager();
- const response = await fetchWithRetry(
- () =>
- fetch(`${config.apiBaseUrl}/api/sync/push`, {
- method: 'POST',
- headers: {
- Authorization: `Bearer ${config.authToken}`,
- 'Content-Type': 'application/json',
- },
- body: payload,
- }),
- DEFAULT_RETRY_CONFIG
- );
+ for (const op of operations) {
+ if (op.type === 'delete') {
+ // Soft-delete the task
+ const { error } = await supabase
+ .from('encrypted_tasks')
+ .update({ deleted_at: new Date().toISOString() })
+ .eq('id', op.taskId)
+ .eq('user_id', userId);
- if (!response.ok) {
- const errorText = await response.text();
- throw new Error(
- `❌ Failed to push task changes (${response.status})\n\n` +
- `Error: ${errorText}\n\n` +
- `Your changes were not saved to the server.\n` +
- `Retried ${DEFAULT_RETRY_CONFIG.maxRetries} times before giving up.`
- );
- }
+ if (error) {
+ throw new Error(
+ `❌ Failed to delete task ${op.taskId}\n\n` +
+ `Error: ${error.message}`
+ );
+ }
+ continue;
+ }
- // Check response for rejected operations and conflicts
- const result = (await response.json()) as {
- accepted?: string[];
- rejected?: Array<{ taskId: string; reason: string; details: string }>;
- conflicts?: Array;
- serverVectorClock?: Record;
- };
+ // Encrypt task data
+ const taskJson = JSON.stringify(op.data);
+ const { ciphertext, nonce } = await cryptoManager.encrypt(taskJson);
+ const checksum = await computeChecksum(taskJson);
- // Check for rejected operations
- if (result.rejected && result.rejected.length > 0) {
- const rejectionDetails = result.rejected
- .map((r) => ` - Task ${r.taskId}: ${r.reason} - ${r.details}`)
- .join('\n');
- throw new Error(
- `❌ Worker rejected ${result.rejected.length} operation(s)\n\n` +
- `${rejectionDetails}\n\n` +
- `Your changes were not saved to the server.`
- );
- }
+ // Upsert encrypted task
+ const { error } = await supabase
+ .from('encrypted_tasks')
+ .upsert({
+ id: op.taskId,
+ user_id: userId,
+ encrypted_blob: ciphertext,
+ nonce,
+ version: 1,
+ deleted_at: null,
+ updated_at: new Date().toISOString(),
+ last_modified_device: 'mcp-server',
+ checksum,
+ }, {
+ onConflict: 'id,user_id',
+ });
- // Check for conflicts
- if (result.conflicts && result.conflicts.length > 0) {
- console.warn(`⚠️ Warning: ${result.conflicts.length} conflict(s) detected`);
- console.warn('Last-write-wins strategy applied - your changes took precedence');
+ if (error) {
+ throw new Error(
+ `❌ Failed to push task ${op.taskId}\n\n` +
+ `Error: ${error.message}\n\n` +
+ `Your changes were not saved to the server.`
+ );
+ }
}
// Invalidate cache after successful write
const cache = getTaskCache();
cache.invalidate();
}
+
+/**
+ * Compute simple checksum for integrity verification
+ */
+async function computeChecksum(data: string): Promise {
+ const encoder = new TextEncoder();
+ const hashBuffer = await globalThis.crypto.subtle.digest('SHA-256', encoder.encode(data));
+ const hashArray = Array.from(new Uint8Array(hashBuffer));
+ return hashArray.map(b => b.toString(16).padStart(2, '0')).join('').slice(0, 16);
+}
diff --git a/packages/mcp-server/src/write-ops/task-operations.ts b/packages/mcp-server/src/write-ops/task-operations.ts
index 07187c60..6a3a7cb9 100644
--- a/packages/mcp-server/src/write-ops/task-operations.ts
+++ b/packages/mcp-server/src/write-ops/task-operations.ts
@@ -5,7 +5,6 @@
import type { GsdConfig, DecryptedTask } from '../tools.js';
import type { CreateTaskInput, UpdateTaskInput } from './types.js';
-import { getCryptoManager } from '../crypto.js';
import { listTasks } from '../tools.js';
import { generateTaskId, deriveQuadrant, ensureEncryption, pushToSync } from './helpers.js';
import {
@@ -77,14 +76,13 @@ export async function createTask(
important: input.important,
quadrant,
completed: false,
- ...(input.dueDate && { dueDate: input.dueDate }), // Only include if set
+ ...(input.dueDate && { dueDate: input.dueDate }),
tags: input.tags || [],
subtasks: subtasksWithIds,
recurrence: input.recurrence || 'none',
dependencies: input.dependencies || [],
createdAt: now,
updatedAt: now,
- vectorClock: {}, // Initialize with empty vector clock
};
// If dry-run, return without saving
@@ -96,22 +94,9 @@ export async function createTask(
};
}
- // Encrypt task and calculate checksum
- const cryptoManager = getCryptoManager();
- const taskJson = JSON.stringify(newTask);
- const { ciphertext, nonce } = await cryptoManager.encrypt(taskJson);
- const checksum = await cryptoManager.hash(taskJson);
-
- // Push to sync
+ // Push to Supabase (pushToSync handles encryption)
await pushToSync(config, [
- {
- type: 'create',
- taskId,
- encryptedBlob: ciphertext,
- nonce,
- vectorClock: {}, // Simplified: let server manage
- checksum,
- },
+ { type: 'create', taskId, data: newTask },
]);
return {
@@ -149,7 +134,7 @@ export async function updateTask(
const currentTask = tasks.find((t) => t.id === input.id);
if (!currentTask) {
- throw new Error(`❌ Task not found: ${input.id}\n\nThe task may have been deleted.`);
+ throw new Error(`Task not found: ${input.id}\n\nThe task may have been deleted.`);
}
// Validate dependencies if changing
@@ -214,7 +199,7 @@ export async function updateTask(
if (input.dueDate) {
updatedTask.dueDate = input.dueDate;
} else {
- delete updatedTask.dueDate; // Remove field if clearing
+ delete updatedTask.dueDate;
}
}
@@ -222,7 +207,7 @@ export async function updateTask(
if (input.completed === true && !currentTask.completed) {
updatedTask.completedAt = new Date().toISOString();
} else if (input.completed === false) {
- delete updatedTask.completedAt; // Clear when uncompleting
+ delete updatedTask.completedAt;
}
// Recalculate quadrant if urgent/important changed
@@ -243,22 +228,9 @@ export async function updateTask(
};
}
- // Encrypt task and calculate checksum
- const cryptoManager = getCryptoManager();
- const taskJson = JSON.stringify(updatedTask);
- const { ciphertext, nonce } = await cryptoManager.encrypt(taskJson);
- const checksum = await cryptoManager.hash(taskJson);
-
- // Push to sync
+ // Push to Supabase (pushToSync handles encryption)
await pushToSync(config, [
- {
- type: 'update',
- taskId: updatedTask.id,
- encryptedBlob: ciphertext,
- nonce,
- vectorClock: {}, // Simplified: let server manage
- checksum,
- },
+ { type: 'update', taskId: updatedTask.id, data: updatedTask },
]);
return {
@@ -313,7 +285,7 @@ export async function deleteTask(
const task = tasks.find((t) => t.id === taskId);
if (!task) {
- throw new Error(`❌ Task not found: ${taskId}\n\nThe task may have already been deleted.`);
+ throw new Error(`Task not found: ${taskId}\n\nThe task may have already been deleted.`);
}
// Check for tasks that depend on this one
@@ -330,13 +302,9 @@ export async function deleteTask(
};
}
- // Push deletion
+ // Push deletion to Supabase
await pushToSync(config, [
- {
- type: 'delete',
- taskId,
- vectorClock: {}, // Simplified: let server manage
- },
+ { type: 'delete', taskId },
]);
return {
diff --git a/packages/mcp-server/src/write-ops/types.ts b/packages/mcp-server/src/write-ops/types.ts
index 64167815..229460f3 100644
--- a/packages/mcp-server/src/write-ops/types.ts
+++ b/packages/mcp-server/src/write-ops/types.ts
@@ -2,6 +2,8 @@
* Type definitions for write operations
*/
+import type { DecryptedTask } from '../types.js';
+
/**
* Common options for write operations
*/
@@ -55,13 +57,10 @@ export type BulkOperation =
| { type: 'delete' };
/**
- * Sync operation for push request
+ * Sync operation for pushing to Supabase
+ * For create/update: carries plaintext task data (pushToSync handles encryption)
+ * For delete: only needs taskId
*/
-export interface SyncOperation {
- type: 'create' | 'update' | 'delete';
- taskId: string;
- encryptedBlob?: string;
- nonce?: string;
- vectorClock: Record;
- checksum?: string; // SHA-256 hash of plaintext JSON (required for create/update)
-}
+export type SyncOperation =
+ | { type: 'create' | 'update'; taskId: string; data: DecryptedTask }
+ | { type: 'delete'; taskId: string };
diff --git a/packages/mcp-server/test-env-vars.js b/packages/mcp-server/test-env-vars.js
index a66d5c3e..e1492267 100644
--- a/packages/mcp-server/test-env-vars.js
+++ b/packages/mcp-server/test-env-vars.js
@@ -5,23 +5,26 @@ console.log('Environment Variable Test');
console.log('=========================\n');
const vars = {
- GSD_API_URL: process.env.GSD_API_URL,
- GSD_AUTH_TOKEN: process.env.GSD_AUTH_TOKEN,
+ GSD_SUPABASE_URL: process.env.GSD_SUPABASE_URL,
+ GSD_SUPABASE_SERVICE_KEY: process.env.GSD_SUPABASE_SERVICE_KEY,
+ GSD_USER_EMAIL: process.env.GSD_USER_EMAIL,
GSD_ENCRYPTION_PASSPHRASE: process.env.GSD_ENCRYPTION_PASSPHRASE,
};
-console.log('GSD_API_URL:', vars.GSD_API_URL ? '✅ Set' : '❌ Missing');
-console.log('GSD_AUTH_TOKEN:', vars.GSD_AUTH_TOKEN ? '✅ Set' : '❌ Missing');
+console.log('GSD_SUPABASE_URL:', vars.GSD_SUPABASE_URL ? '✅ Set' : '❌ Missing');
+console.log('GSD_SUPABASE_SERVICE_KEY:', vars.GSD_SUPABASE_SERVICE_KEY ? '✅ Set' : '❌ Missing');
+console.log('GSD_USER_EMAIL:', vars.GSD_USER_EMAIL ? '✅ Set' : '❌ Missing');
console.log('GSD_ENCRYPTION_PASSPHRASE:', vars.GSD_ENCRYPTION_PASSPHRASE ? '✅ Set' : '❌ Missing');
console.log('\nValues:');
-console.log('GSD_API_URL:', vars.GSD_API_URL || '(not set)');
-console.log('GSD_AUTH_TOKEN:', vars.GSD_AUTH_TOKEN ? `${vars.GSD_AUTH_TOKEN.substring(0, 20)}...` : '(not set)');
-console.log('GSD_ENCRYPTION_PASSPHRASE:', vars.GSD_ENCRYPTION_PASSPHRASE || '(not set)');
+console.log('GSD_SUPABASE_URL:', vars.GSD_SUPABASE_URL || '(not set)');
+console.log('GSD_SUPABASE_SERVICE_KEY:', vars.GSD_SUPABASE_SERVICE_KEY ? `${vars.GSD_SUPABASE_SERVICE_KEY.substring(0, 20)}...` : '(not set)');
+console.log('GSD_USER_EMAIL:', vars.GSD_USER_EMAIL || '(not set)');
+console.log('GSD_ENCRYPTION_PASSPHRASE:', vars.GSD_ENCRYPTION_PASSPHRASE ? '✅ (set)' : '(not set)');
-if (!vars.GSD_ENCRYPTION_PASSPHRASE) {
- console.log('\n❌ GSD_ENCRYPTION_PASSPHRASE is not set!');
- console.log('\nThis means Claude Desktop is not passing the environment variable.');
+const missing = Object.entries(vars).filter(([, v]) => !v).map(([k]) => k);
+if (missing.length > 0) {
+ console.log(`\n❌ Missing variables: ${missing.join(', ')}`);
console.log('\nPossible fixes:');
console.log('1. Verify claude_desktop_config.json has correct syntax');
console.log('2. Completely quit Claude Desktop (Cmd+Q)');
diff --git a/public/oauth-callback.html b/public/oauth-callback.html
deleted file mode 100644
index 007f912a..00000000
--- a/public/oauth-callback.html
+++ /dev/null
@@ -1,507 +0,0 @@
-
-
-
-
-
- Completing Sign In...
-
-
-
-
-
-
-
Completing sign in...
-
Please wait while we finish setting up your account.
-
-
-
-
-
Sign in successful!
-
You can now close this window.
-
-
-
-
-
-
-
-
Sign in failed
-
-
-
-
-
-
-
-
diff --git a/security-review-prompt.md b/security-review-prompt.md
index c2fe220e..84d025e1 100644
--- a/security-review-prompt.md
+++ b/security-review-prompt.md
@@ -1,6 +1,6 @@
# Next.js 16 TypeScript Web Application Security & Standards Review
-You are conducting a comprehensive security audit and standards compliance review of this Next.js 16 TypeScript web application deployed on Cloudflare Workers, using React 19 and Dexie (IndexedDB).
+You are conducting a comprehensive security audit and standards compliance review of this Next.js 16 TypeScript web application with Supabase backend, using React 19 and Dexie (IndexedDB).
## Setup Instructions
@@ -8,10 +8,10 @@ You are conducting a comprehensive security audit and standards compliance revie
2. Use the `view` tool to examine the project structure, focusing on:
- `/app` directory (App Router structure)
- `/pages/api` or `/app/api` (API routes)
- - Cloudflare Workers files (`wrangler.toml`, worker scripts)
+ - Supabase configuration (`lib/supabase.ts`, `lib/sync/supabase-sync-client.ts`)
- Database schema and Dexie configuration
- Middleware files
-3. Review `package.json`, `package-lock.json`, and `wrangler.toml` for dependencies and configuration
+3. Review `package.json` and `bun.lock` for dependencies and configuration
## Part 1: Security Vulnerability Analysis
@@ -44,21 +44,19 @@ You are conducting a comprehensive security audit and standards compliance revie
- XSS prevention in JSX (avoid dangerouslySetInnerHTML)
- Form action security with proper validation
-### Cloudflare Workers Specific Security
+### Supabase Backend Security
-**Edge Runtime Constraints**
-- Proper handling of limited Node.js API availability
-- Secure use of Cloudflare bindings (KV, R2, D1, Durable Objects)
-- Environment variable security in `wrangler.toml` and deployment
-- Secrets management (use `wrangler secret` not hardcoded values)
-- Request size limits and validation
+**Row Level Security (RLS)**
+- All tables must have RLS policies enabled
+- Verify `auth.uid() = user_id` policies on all data tables
+- Service role key usage restricted to MCP server (never in client)
+- Anon key only used in client-side code
-**Worker Configuration**
-- `wrangler.toml` security settings review
-- Route patterns that might expose unintended endpoints
-- CORS configuration in Workers
-- CSP headers implementation in middleware
-- WAF and rate limiting through Cloudflare dashboard integration
+**Supabase Auth Configuration**
+- OAuth provider settings (Google/Apple) properly configured
+- Redirect URLs restricted to known domains
+- Session management and token refresh handled by SDK
+- No manual JWT handling in application code
### Dexie/IndexedDB Security
@@ -105,7 +103,7 @@ You are conducting a comprehensive security audit and standards compliance revie
- Console.log statements
- Error messages and stack traces
- Network requests (DevTools inspection)
-- Environment variables properly segregated (`.env.local`, Cloudflare secrets)
+- Environment variables properly segregated (`.env.local`, Supabase dashboard secrets)
- API keys and tokens never in client code
- PII handling compliance
- Data retention and cleanup policies
@@ -129,7 +127,7 @@ You are conducting a comprehensive security audit and standards compliance revie
- Next.js version (using latest 16.x patches)
- React 19 version (stable vs RC/beta)
- Dexie version and known vulnerabilities
-- Cloudflare Workers runtime compatibility
+- Supabase SDK version and known vulnerabilities
- Unnecessary packages that expand attack surface
### TypeScript Configuration Security
@@ -166,12 +164,12 @@ Review against `coding-standards.md` requirements:
- Transaction usage
- Error handling in database operations
-### Cloudflare Workers Patterns
-- Request/Response handling patterns
-- Binding usage (KV, R2, etc.)
-- Error handling and logging
-- Performance optimization
-- Cold start considerations
+### Supabase Patterns
+- RLS policy consistency across tables
+- Supabase client usage (anon key vs service role key)
+- Error handling for Supabase SDK calls
+- Realtime subscription security
+- Encryption/decryption patterns
### General Standards
- File and folder naming conventions
@@ -186,7 +184,7 @@ Review against `coding-standards.md` requirements:
### Priority Order
1. **Server Actions and API Routes** - Highest risk for data exposure
-2. **Cloudflare Worker configurations** - Infrastructure security
+2. **Supabase configuration and RLS policies** - Infrastructure security
3. **Authentication/Authorization middleware** - Access control
4. **Client-Server data boundaries** - Data leakage prevention
5. **IndexedDB/Dexie usage** - Client-side data security
@@ -199,7 +197,7 @@ Review against `coding-standards.md` requirements:
- `middleware.ts` or `middleware.js`
- `app/api/**/*.ts` - API routes
- Server Actions files (functions with `'use server'`)
-- `wrangler.toml` and Worker entry points
+- Supabase client configuration and RLS policies
- `next.config.js`
- Dexie database schema files
- Authentication utilities
@@ -246,12 +244,12 @@ Section 3.2 of coding-standards.md (if applicable)
- [ ] Image optimization properly configured
- [ ] Metadata doesn't leak sensitive info
-### Cloudflare Workers Checklist
-- [ ] All secrets use `wrangler secret` not environment vars
-- [ ] CORS properly configured for your domains
-- [ ] Rate limiting implemented on sensitive endpoints
-- [ ] Worker bindings properly typed and secured
-- [ ] Request size validation implemented
+### Supabase Checklist
+- [ ] RLS policies enabled and tested on all tables
+- [ ] Service role key never exposed in client-side code
+- [ ] Anon key used only for client-side operations
+- [ ] Realtime subscriptions filter by user_id
+- [ ] OAuth redirect URLs restricted to known domains
### Dexie/IndexedDB Checklist
- [ ] No sensitive data stored unencrypted
@@ -287,7 +285,7 @@ After review completion:
- Generate `SECURITY_REVIEW.md` with all findings
- Create separate `REMEDIATION_PLAN.md` with prioritized fix schedule
- Suggest specific ESLint rules for Next.js/React/TypeScript security
-- Recommend Cloudflare security features to enable (WAF rules, rate limiting)
+- Recommend Supabase security features to enable (RLS policies, auth settings)
---
diff --git a/tests/data/sync/api-client.test.ts b/tests/data/sync/api-client.test.ts
deleted file mode 100644
index 28c4a8f0..00000000
--- a/tests/data/sync/api-client.test.ts
+++ /dev/null
@@ -1,505 +0,0 @@
-import { describe, it, expect, beforeEach, afterEach, vi, type Mock } from 'vitest';
-import { SyncApiClient, getApiClient, clearApiClient } from '@/lib/sync/api-client';
-import {
- SyncNetworkError,
- SyncAuthError,
- SyncValidationError,
-} from '@/lib/sync/errors';
-
-describe('SyncApiClient', () => {
- let client: SyncApiClient;
- const baseUrl = 'https://test-api.example.com';
- const testToken = 'test-token-123';
-
- beforeEach(() => {
- client = new SyncApiClient(baseUrl);
- // Mock fetch
- global.fetch = vi.fn();
- });
-
- afterEach(() => {
- vi.restoreAllMocks();
- clearApiClient();
- });
-
- describe('Constructor', () => {
- it('should initialize with base URL', () => {
- expect(client).toBeInstanceOf(SyncApiClient);
- });
-
- it('should remove trailing slash from base URL', () => {
- const clientWithSlash = new SyncApiClient('https://api.example.com/');
- expect(clientWithSlash).toBeInstanceOf(SyncApiClient);
- });
- });
-
- describe('Token Management', () => {
- it('should set authentication token', () => {
- client.setToken(testToken);
- // Token is set internally (can't directly test, but verify no errors)
- expect(() => client.setToken(testToken)).not.toThrow();
- });
-
- it('should clear authentication token', () => {
- client.setToken(testToken);
- client.setToken(null);
- expect(() => client.setToken(null)).not.toThrow();
- });
- });
-
- describe('Authentication Endpoints', () => {
- describe('logout()', () => {
- beforeEach(() => {
- client.setToken(testToken);
- });
-
- it('should make POST request to /api/auth/logout with auth', async () => {
- const mockResponse = { success: true };
-
- (global.fetch as unknown as Mock).mockResolvedValueOnce({
- ok: true,
- json: async () => mockResponse,
- });
-
- const result = await client.logout();
-
- expect(result).toEqual(mockResponse);
- expect(global.fetch).toHaveBeenCalledWith(
- `${baseUrl}/api/auth/logout`,
- expect.objectContaining({
- method: 'POST',
- headers: expect.objectContaining({
- Authorization: `Bearer ${testToken}`,
- }),
- })
- );
- });
- });
-
- describe('refreshToken()', () => {
- beforeEach(() => {
- client.setToken(testToken);
- });
-
- it('should make POST request to /api/auth/refresh with auth', async () => {
- const mockResponse = {
- token: 'refreshed-token',
- expiresAt: Date.now() + 86400000,
- };
-
- (global.fetch as unknown as Mock).mockResolvedValueOnce({
- ok: true,
- json: async () => mockResponse,
- });
-
- const result = await client.refreshToken();
-
- expect(result).toEqual(mockResponse);
- expect(global.fetch).toHaveBeenCalledWith(
- `${baseUrl}/api/auth/refresh`,
- expect.objectContaining({
- method: 'POST',
- headers: expect.objectContaining({
- Authorization: `Bearer ${testToken}`,
- }),
- })
- );
- });
- });
- });
-
- describe('Sync Endpoints', () => {
- beforeEach(() => {
- client.setToken(testToken);
- });
-
- describe('push()', () => {
- it('should make POST request to /api/sync/push with auth', async () => {
- const mockResponse = {
- accepted: ['task-1', 'task-2'],
- rejected: [],
- conflicts: [],
- };
-
- (global.fetch as unknown as Mock).mockResolvedValueOnce({
- ok: true,
- json: async () => mockResponse,
- });
-
- const result = await client.push({
- operations: [
- {
- type: 'create',
- taskId: 'task-1',
- encryptedBlob: 'encrypted-data',
- nonce: 'test-nonce',
- vectorClock: {},
- checksum: 'test-checksum',
- },
- ],
- deviceId: 'device-123',
- clientVectorClock: {},
- });
-
- expect(result).toEqual(mockResponse);
- expect(global.fetch).toHaveBeenCalledWith(
- `${baseUrl}/api/sync/push`,
- expect.objectContaining({
- method: 'POST',
- headers: expect.objectContaining({
- Authorization: `Bearer ${testToken}`,
- }),
- })
- );
- });
-
- it('should throw SyncAuthError on 401 error', async () => {
- (global.fetch as unknown as Mock).mockResolvedValueOnce({
- ok: false,
- status: 401,
- statusText: 'Unauthorized',
- json: async () => ({ error: 'Token expired' }),
- });
-
- await expect(
- client.push({
- operations: [],
- deviceId: 'device-123',
- clientVectorClock: {},
- })
- ).rejects.toThrow(SyncAuthError);
- });
- });
-
- describe('pull()', () => {
- it('should make POST request to /api/sync/pull with auth', async () => {
- const mockResponse = {
- tasks: [
- {
- id: 'task-1',
- data: { title: 'Synced Task' },
- vectorClock: {},
- },
- ],
- deletedTaskIds: [],
- syncTimestamp: Date.now(),
- };
-
- (global.fetch as unknown as Mock).mockResolvedValueOnce({
- ok: true,
- json: async () => mockResponse,
- });
-
- const result = await client.pull({
- deviceId: 'device-123',
- lastVectorClock: {},
- });
-
- expect(result).toEqual(mockResponse);
- expect(global.fetch).toHaveBeenCalledWith(
- `${baseUrl}/api/sync/pull`,
- expect.objectContaining({
- method: 'POST',
- headers: expect.objectContaining({
- Authorization: `Bearer ${testToken}`,
- }),
- })
- );
- });
- });
-
- describe('getStatus()', () => {
- it('should make GET request to /api/sync/status with auth', async () => {
- const mockResponse = {
- userId: 'user-123',
- deviceCount: 2,
- lastSyncAt: Date.now(),
- storageUsed: 1024,
- };
-
- (global.fetch as unknown as Mock).mockResolvedValueOnce({
- ok: true,
- json: async () => mockResponse,
- });
-
- const result = await client.getStatus();
-
- expect(result).toEqual(mockResponse);
- expect(global.fetch).toHaveBeenCalledWith(
- `${baseUrl}/api/sync/status`,
- expect.objectContaining({
- method: 'GET',
- headers: expect.objectContaining({
- Authorization: `Bearer ${testToken}`,
- }),
- })
- );
- });
- });
- });
-
- describe('Device Management Endpoints', () => {
- beforeEach(() => {
- client.setToken(testToken);
- });
-
- describe('listDevices()', () => {
- it('should make GET request to /api/devices with auth', async () => {
- const mockResponse = {
- devices: [
- {
- id: 'device-1',
- name: 'Device 1',
- lastSeen: Date.now(),
- },
- {
- id: 'device-2',
- name: 'Device 2',
- lastSeen: Date.now(),
- },
- ],
- };
-
- (global.fetch as unknown as Mock).mockResolvedValueOnce({
- ok: true,
- json: async () => mockResponse,
- });
-
- const result = await client.listDevices();
-
- expect(result).toEqual(mockResponse);
- expect(global.fetch).toHaveBeenCalledWith(
- `${baseUrl}/api/devices`,
- expect.objectContaining({
- method: 'GET',
- headers: expect.objectContaining({
- Authorization: `Bearer ${testToken}`,
- }),
- })
- );
- });
- });
-
- describe('revokeDevice()', () => {
- it('should make DELETE request to /api/devices/:deviceId with auth', async () => {
- const mockResponse = { success: true };
-
- (global.fetch as unknown as Mock).mockResolvedValueOnce({
- ok: true,
- json: async () => mockResponse,
- });
-
- const result = await client.revokeDevice('device-123');
-
- expect(result).toEqual(mockResponse);
- expect(global.fetch).toHaveBeenCalledWith(
- `${baseUrl}/api/devices/device-123`,
- expect.objectContaining({
- method: 'DELETE',
- headers: expect.objectContaining({
- Authorization: `Bearer ${testToken}`,
- }),
- })
- );
- });
- });
- });
-
- describe('Error Handling', () => {
- it('should categorize 401 as SyncAuthError', async () => {
- (global.fetch as unknown as Mock).mockResolvedValueOnce({
- ok: false,
- status: 401,
- statusText: 'Unauthorized',
- json: async () => ({ error: 'Authentication failed' }),
- });
-
- await expect(client.listDevices()).rejects.toThrow(SyncAuthError);
- });
-
- it('should categorize 403 as SyncAuthError', async () => {
- (global.fetch as unknown as Mock).mockResolvedValueOnce({
- ok: false,
- status: 403,
- statusText: 'Forbidden',
- json: async () => ({ error: 'Access denied' }),
- });
-
- await expect(client.listDevices()).rejects.toThrow(SyncAuthError);
- });
-
- it('should categorize 500 as SyncNetworkError', async () => {
- (global.fetch as unknown as Mock).mockResolvedValueOnce({
- ok: false,
- status: 500,
- statusText: 'Internal Server Error',
- json: async () => ({ error: 'Server error' }),
- });
-
- await expect(client.listDevices()).rejects.toThrow(SyncNetworkError);
- });
-
- it('should categorize 503 as SyncNetworkError', async () => {
- (global.fetch as unknown as Mock).mockResolvedValueOnce({
- ok: false,
- status: 503,
- statusText: 'Service Unavailable',
- json: async () => ({ error: 'Service unavailable' }),
- });
-
- await expect(client.listDevices()).rejects.toThrow(SyncNetworkError);
- });
-
- it('should categorize 400 as SyncValidationError', async () => {
- (global.fetch as unknown as Mock).mockResolvedValueOnce({
- ok: false,
- status: 400,
- statusText: 'Bad Request',
- json: async () => ({ error: 'Invalid request' }),
- });
-
- await expect(client.listDevices()).rejects.toThrow(SyncValidationError);
- });
-
- it('should categorize 422 as SyncValidationError', async () => {
- (global.fetch as unknown as Mock).mockResolvedValueOnce({
- ok: false,
- status: 422,
- statusText: 'Unprocessable Entity',
- json: async () => ({ error: 'Validation failed' }),
- });
-
- await expect(client.listDevices()).rejects.toThrow(SyncValidationError);
- });
-
- it('should handle network errors (fetch throws)', async () => {
- (global.fetch as unknown as Mock).mockRejectedValueOnce(new Error('Network error'));
-
- await expect(client.listDevices()).rejects.toThrow(SyncNetworkError);
- });
-
- it('should handle invalid JSON response', async () => {
- (global.fetch as unknown as Mock).mockResolvedValueOnce({
- ok: false,
- status: 500,
- statusText: 'Internal Server Error',
- json: async () => {
- throw new Error('Invalid JSON');
- },
- });
-
- await expect(client.listDevices()).rejects.toThrow(SyncNetworkError);
- });
-
- it('should preserve error context', async () => {
- const errorMessage = 'Custom error message';
- (global.fetch as unknown as Mock).mockResolvedValueOnce({
- ok: false,
- status: 401,
- statusText: 'Unauthorized',
- json: async () => ({ error: errorMessage }),
- });
-
- try {
- await client.listDevices();
- expect.fail('Should have thrown');
- } catch (error) {
- expect(error).toBeInstanceOf(SyncAuthError);
- expect((error as SyncAuthError).message).toContain(errorMessage);
- expect((error as SyncAuthError).statusCode).toBe(401);
- }
- });
- });
-
- describe('Singleton Pattern', () => {
- it('should return same instance from getApiClient', () => {
- const instance1 = getApiClient(baseUrl);
- const instance2 = getApiClient();
-
- expect(instance1).toBe(instance2);
- });
-
- it('should clear singleton instance', () => {
- const instance1 = getApiClient(baseUrl);
- clearApiClient();
-
- const instance2 = getApiClient(baseUrl);
- expect(instance1).not.toBe(instance2);
- });
-
- it('should throw error when creating without server URL on first call', () => {
- clearApiClient();
-
- expect(() => getApiClient()).toThrow('Server URL required');
- });
- });
-
- describe('Request Headers', () => {
- beforeEach(() => {
- client.setToken(testToken);
- });
-
- it('should include Content-Type header', async () => {
- (global.fetch as unknown as Mock).mockResolvedValueOnce({
- ok: true,
- json: async () => ({ success: true }),
- });
-
- await client.push({
- operations: [
- {
- type: 'create',
- taskId: 'task-1',
- encryptedBlob: 'encrypted-data',
- nonce: 'test-nonce',
- vectorClock: {},
- checksum: 'test-checksum',
- },
- ],
- deviceId: 'device-123',
- clientVectorClock: {},
- });
-
- expect(global.fetch).toHaveBeenCalledWith(
- expect.any(String),
- expect.objectContaining({
- headers: expect.objectContaining({
- 'Content-Type': 'application/json',
- }),
- })
- );
- });
-
- it('should include Authorization header when token is set and auth is required', async () => {
- (global.fetch as unknown as Mock).mockResolvedValueOnce({
- ok: true,
- json: async () => ({ success: true }),
- });
-
- await client.logout();
-
- expect(global.fetch).toHaveBeenCalledWith(
- expect.any(String),
- expect.objectContaining({
- headers: expect.objectContaining({
- Authorization: `Bearer ${testToken}`,
- }),
- })
- );
- });
-
- it('should omit Authorization header when token is missing', async () => {
- (global.fetch as unknown as Mock).mockResolvedValueOnce({
- ok: true,
- json: async () => ({ success: true }),
- });
-
- client.setToken(null);
-
- await client.listDevices();
-
- const callArgs = (global.fetch as unknown as Mock).mock.calls[0][1];
- expect(callArgs.headers.Authorization).toBeUndefined();
- });
- });
-});
diff --git a/tests/data/sync/config.test.ts b/tests/data/sync/config.test.ts
index f80a47c6..3bf668fa 100644
--- a/tests/data/sync/config.test.ts
+++ b/tests/data/sync/config.test.ts
@@ -21,14 +21,6 @@ vi.mock('@/lib/sync/crypto', () => ({
getCryptoManager: vi.fn(() => mockCrypto),
}));
-const mockApiClient = {
- setToken: vi.fn(),
-};
-
-vi.mock('@/lib/sync/api-client', () => ({
- getApiClient: vi.fn(() => mockApiClient),
-}));
-
const mockQueue = {
populateFromExistingTasks: vi.fn().mockResolvedValue(5),
};
@@ -37,14 +29,16 @@ vi.mock('@/lib/sync/queue', () => ({
getSyncQueue: vi.fn(() => mockQueue),
}));
-const mockMonitor = {
- isActive: vi.fn().mockReturnValue(false),
- start: vi.fn(),
- stop: vi.fn(),
-};
+vi.mock('@/lib/sync/realtime-listener', () => ({
+ stopRealtimeListener: vi.fn(),
+}));
-vi.mock('@/lib/sync/health-monitor', () => ({
- getHealthMonitor: vi.fn(() => mockMonitor),
+vi.mock('@/lib/supabase', () => ({
+ getSupabaseClient: vi.fn(() => ({
+ auth: {
+ signOut: vi.fn().mockResolvedValue({ error: null }),
+ },
+ })),
}));
describe('Sync Config', () => {
@@ -56,12 +50,8 @@ describe('Sync Config', () => {
deviceId: 'device-123',
deviceName: 'Test Device',
email: null,
- token: null,
- tokenExpiresAt: null,
lastSyncAt: null,
- vectorClock: {},
conflictStrategy: 'last_write_wins',
- serverUrl: 'https://test-api.example.com',
consecutiveFailures: 0,
lastFailureAt: null,
lastFailureReason: null,
@@ -94,30 +84,12 @@ describe('Sync Config', () => {
expect(config?.deviceName).toBe('Test Device');
});
- it('should auto-create config if it does not exist', async () => {
+ it('should return null if config does not exist', async () => {
await db.syncMetadata.clear();
const config = await getSyncConfig();
- // Config should be auto-created by ensureSyncConfigInitialized()
- expect(config).not.toBeNull();
- expect(config?.enabled).toBe(false);
- expect(config?.deviceId).toBeDefined();
- });
-
- it('should migrate legacy config', async () => {
- // This test ensures backward compatibility with old config formats
- const legacyConfig = {
- ...mockSyncConfig,
- // Add any legacy fields that might need migration
- };
-
- await db.syncMetadata.put(legacyConfig);
-
- const config = await getSyncConfig();
-
- expect(config).not.toBeNull();
- expect(config?.key).toBe('sync_config');
+ expect(config).toBeNull();
});
});
@@ -137,26 +109,12 @@ describe('Sync Config', () => {
expect(updated?.deviceId).toBe('device-123'); // Original value preserved
});
- it('should auto-create config and update it', async () => {
+ it('should throw when config does not exist', async () => {
await db.syncMetadata.clear();
- // updateSyncConfig calls getSyncConfig which auto-initializes
- await updateSyncConfig({ enabled: true });
-
- const config = await getSyncConfig();
- expect(config?.enabled).toBe(true);
- });
-
- it('should update vector clock', async () => {
- const newVectorClock = { 'device-123': 5, 'device-456': 3 };
-
- await updateSyncConfig({
- vectorClock: newVectorClock,
- });
-
- const updated = await getSyncConfig();
-
- expect(updated?.vectorClock).toEqual(newVectorClock);
+ await expect(updateSyncConfig({ enabled: true })).rejects.toThrow(
+ 'Sync config not initialized'
+ );
});
it('should update lastSyncAt timestamp', async () => {
@@ -176,20 +134,16 @@ describe('Sync Config', () => {
it('should enable sync with auth credentials', async () => {
const userId = 'user-789';
const email = 'test@example.com';
- const token = 'test-token-123';
- const expiresAt = Date.now() + 86400000;
const salt = 'test-salt';
const password = 'password123';
- await enableSync(userId, email, token, expiresAt, salt, password);
+ await enableSync(userId, email, salt, password);
const config = await getSyncConfig();
expect(config?.enabled).toBe(true);
expect(config?.userId).toBe(userId);
expect(config?.email).toBe(email);
- expect(config?.token).toBe(token);
- expect(config?.tokenExpiresAt).toBe(expiresAt);
});
it('should initialize crypto with password and salt', async () => {
@@ -200,8 +154,6 @@ describe('Sync Config', () => {
await enableSync(
'user-123',
'test@example.com',
- 'token',
- Date.now() + 86400000,
'salt',
'password'
);
@@ -209,21 +161,6 @@ describe('Sync Config', () => {
expect(mockCrypto.deriveKey).toHaveBeenCalledWith('password', 'salt');
});
- it('should set token in API client', async () => {
- const token = 'test-token-456';
-
- await enableSync(
- 'user-123',
- 'test@example.com',
- token,
- Date.now() + 86400000,
- 'salt',
- 'password'
- );
-
- expect(mockApiClient.setToken).toHaveBeenCalledWith(token);
- });
-
it('should queue existing tasks when tasks exist', async () => {
// Add some tasks
await db.tasks.bulkAdd([
@@ -266,8 +203,6 @@ describe('Sync Config', () => {
await enableSync(
'user-123',
'test@example.com',
- 'token',
- Date.now() + 86400000,
'salt',
'password'
);
@@ -275,39 +210,30 @@ describe('Sync Config', () => {
expect(mockQueue.populateFromExistingTasks).toHaveBeenCalled();
});
- it('should start health monitor', async () => {
- const { getHealthMonitor } = await import('@/lib/sync/health-monitor');
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- const mockMonitor = (getHealthMonitor as any)();
-
+ it('should work with existing config', async () => {
await enableSync(
'user-123',
'test@example.com',
- 'token',
- Date.now() + 86400000,
'salt',
'password'
);
- expect(mockMonitor.start).toHaveBeenCalled();
+ const config = await getSyncConfig();
+ expect(config?.enabled).toBe(true);
+ expect(config?.userId).toBe('user-123');
});
- it('should work with auto-created config', async () => {
- await db.syncMetadata.clear();
-
- // enableSync calls getSyncConfig which auto-initializes
+ it('should store provider when provided', async () => {
await enableSync(
'user-123',
'test@example.com',
- 'token',
- Date.now() + 86400000,
'salt',
- 'password'
+ 'password',
+ 'google'
);
const config = await getSyncConfig();
- expect(config?.enabled).toBe(true);
- expect(config?.userId).toBe('user-123');
+ expect(config?.provider).toBe('google');
});
});
@@ -318,8 +244,6 @@ describe('Sync Config', () => {
enabled: true,
userId: 'user-123',
email: 'test@example.com',
- token: 'test-token',
- tokenExpiresAt: Date.now() + 86400000,
});
// Add some queue items
@@ -330,7 +254,6 @@ describe('Sync Config', () => {
timestamp: Date.now(),
retryCount: 0,
payload: null,
- vectorClock: {},
});
});
@@ -342,10 +265,7 @@ describe('Sync Config', () => {
expect(config?.enabled).toBe(false);
expect(config?.userId).toBeNull();
expect(config?.email).toBeNull();
- expect(config?.token).toBeNull();
- expect(config?.tokenExpiresAt).toBeNull();
expect(config?.lastSyncAt).toBeNull();
- expect(config?.vectorClock).toEqual({});
});
it('should clear sync queue', async () => {
@@ -358,17 +278,6 @@ describe('Sync Config', () => {
expect(queueCountAfter).toBe(0);
});
- it('should stop health monitor', async () => {
- const { getHealthMonitor } = await import('@/lib/sync/health-monitor');
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- const mockMonitor = (getHealthMonitor as any)();
- mockMonitor.isActive.mockReturnValue(true);
-
- await disableSync();
-
- expect(mockMonitor.stop).toHaveBeenCalled();
- });
-
it('should clear crypto manager', async () => {
const { getCryptoManager } = await import('@/lib/sync/crypto');
// eslint-disable-next-line @typescript-eslint/no-explicit-any
@@ -426,7 +335,6 @@ describe('Sync Config', () => {
timestamp: Date.now(),
retryCount: 0,
payload: null,
- vectorClock: {},
});
const status = await getSyncStatus();
@@ -436,18 +344,15 @@ describe('Sync Config', () => {
expect(status.lastSyncAt).toBe(1234567890);
expect(status.pendingCount).toBe(1);
expect(status.deviceId).toBe('device-123');
- expect(status.serverUrl).toBe('https://test-api.example.com');
});
it('should return default values when sync not enabled', async () => {
- // Don't clear syncMetadata - just use the default config from beforeEach
const status = await getSyncStatus();
expect(status.enabled).toBe(false);
expect(status.email).toBeNull();
expect(status.lastSyncAt).toBeNull();
- expect(status.deviceId).toBeDefined(); // Auto-created by DB migration
- expect(status.serverUrl).toBeDefined(); // Auto-created by DB migration
+ expect(status.deviceId).toBeDefined();
});
it('should include pending operation count', async () => {
@@ -460,7 +365,6 @@ describe('Sync Config', () => {
timestamp: Date.now(),
retryCount: 0,
payload: null,
- vectorClock: {},
},
{
id: 'queue-2',
@@ -469,7 +373,6 @@ describe('Sync Config', () => {
timestamp: Date.now(),
retryCount: 0,
payload: null,
- vectorClock: {},
},
{
id: 'queue-3',
@@ -478,7 +381,6 @@ describe('Sync Config', () => {
timestamp: Date.now(),
retryCount: 0,
payload: null,
- vectorClock: {},
},
]);
@@ -495,9 +397,7 @@ describe('Sync Config', () => {
enabled: true,
userId: 'user-123',
email: 'test@example.com',
- token: 'test-token',
lastSyncAt: Date.now() - 3600000,
- vectorClock: { 'device-123': 10, 'device-456': 5 },
});
// Add tasks
@@ -527,7 +427,6 @@ describe('Sync Config', () => {
timestamp: Date.now(),
retryCount: 0,
payload: null,
- vectorClock: {},
});
});
@@ -536,8 +435,7 @@ describe('Sync Config', () => {
const config = await getSyncConfig();
- expect(config?.lastSyncAt).toBe(0);
- expect(config?.vectorClock).toEqual({});
+ expect(config?.lastSyncAt).toBeNull();
const taskCount = await db.tasks.count();
expect(taskCount).toBe(0);
@@ -554,7 +452,6 @@ describe('Sync Config', () => {
expect(config?.enabled).toBe(true);
expect(config?.userId).toBe('user-123');
expect(config?.email).toBe('test@example.com');
- expect(config?.token).toBe('test-token');
});
it('should throw error when sync not enabled', async () => {
diff --git a/tests/data/sync/queue.test.ts b/tests/data/sync/queue.test.ts
index bda06ab6..88013c07 100644
--- a/tests/data/sync/queue.test.ts
+++ b/tests/data/sync/queue.test.ts
@@ -35,10 +35,9 @@ describe('SyncQueue', () => {
tags: [],
subtasks: [],
dependencies: [],
- vectorClock: { 'device-1': 1 },
};
- await queue.enqueue('create', 'task-1', task, task.vectorClock);
+ await queue.enqueue('create', 'task-1', task);
const pending = await queue.getPending();
@@ -46,14 +45,13 @@ describe('SyncQueue', () => {
expect(pending[0].taskId).toBe('task-1');
expect(pending[0].operation).toBe('create');
expect(pending[0].payload).toEqual(task);
- expect(pending[0].vectorClock).toEqual({ 'device-1': 1 });
expect(pending[0].retryCount).toBe(0);
});
it('should enqueue multiple operations', async () => {
- await queue.enqueue('create', 'task-1', null, {});
- await queue.enqueue('update', 'task-2', null, {});
- await queue.enqueue('delete', 'task-3', null, {});
+ await queue.enqueue('create', 'task-1', null);
+ await queue.enqueue('update', 'task-2', null);
+ await queue.enqueue('delete', 'task-3', null);
const pending = await queue.getPending();
@@ -65,7 +63,7 @@ describe('SyncQueue', () => {
it('should set timestamp on enqueue', async () => {
const beforeEnqueue = Date.now();
- await queue.enqueue('create', 'task-1', null, {});
+ await queue.enqueue('create', 'task-1', null);
const pending = await queue.getPending();
const afterEnqueue = Date.now();
@@ -75,7 +73,7 @@ describe('SyncQueue', () => {
});
it('should initialize retry count to 0', async () => {
- await queue.enqueue('create', 'task-1', null, {});
+ await queue.enqueue('create', 'task-1', null);
const pending = await queue.getPending();
@@ -92,11 +90,11 @@ describe('SyncQueue', () => {
it('should return all pending operations ordered by timestamp', async () => {
// Add operations with small delays to ensure different timestamps
- await queue.enqueue('delete', 'task-3', null, {});
+ await queue.enqueue('delete', 'task-3', null);
await new Promise(resolve => setTimeout(resolve, 10));
- await queue.enqueue('update', 'task-2', null, {});
+ await queue.enqueue('update', 'task-2', null);
await new Promise(resolve => setTimeout(resolve, 10));
- await queue.enqueue('create', 'task-1', null, {});
+ await queue.enqueue('create', 'task-1', null);
const pending = await queue.getPending();
@@ -122,9 +120,9 @@ describe('SyncQueue', () => {
});
it('should return correct count of pending operations', async () => {
- await queue.enqueue('create', 'task-1', null, {});
- await queue.enqueue('update', 'task-2', null, {});
- await queue.enqueue('delete', 'task-3', null, {});
+ await queue.enqueue('create', 'task-1', null);
+ await queue.enqueue('update', 'task-2', null);
+ await queue.enqueue('delete', 'task-3', null);
const count = await queue.getPendingCount();
@@ -132,8 +130,8 @@ describe('SyncQueue', () => {
});
it('should update count after dequeue', async () => {
- await queue.enqueue('create', 'task-1', null, {});
- await queue.enqueue('update', 'task-2', null, {});
+ await queue.enqueue('create', 'task-1', null);
+ await queue.enqueue('update', 'task-2', null);
const pending = await queue.getPending();
await queue.dequeue(pending[0].id);
@@ -146,7 +144,7 @@ describe('SyncQueue', () => {
describe('dequeue', () => {
it('should remove operation from queue', async () => {
- await queue.enqueue('create', 'task-1', null, {});
+ await queue.enqueue('create', 'task-1', null);
const pending = await queue.getPending();
expect(pending.length).toBe(1);
@@ -158,9 +156,9 @@ describe('SyncQueue', () => {
});
it('should only remove specified operation', async () => {
- await queue.enqueue('create', 'task-1', null, {});
- await queue.enqueue('update', 'task-2', null, {});
- await queue.enqueue('delete', 'task-3', null, {});
+ await queue.enqueue('create', 'task-1', null);
+ await queue.enqueue('update', 'task-2', null);
+ await queue.enqueue('delete', 'task-3', null);
const pending = await queue.getPending();
const task2Operation = pending.find(p => p.taskId === 'task-2');
@@ -179,9 +177,9 @@ describe('SyncQueue', () => {
describe('dequeueBulk', () => {
it('should remove multiple operations at once', async () => {
- await queue.enqueue('create', 'task-1', null, {});
- await queue.enqueue('update', 'task-2', null, {});
- await queue.enqueue('delete', 'task-3', null, {});
+ await queue.enqueue('create', 'task-1', null);
+ await queue.enqueue('update', 'task-2', null);
+ await queue.enqueue('delete', 'task-3', null);
const pending = await queue.getPending();
const task1Op = pending.find(p => p.taskId === 'task-1');
@@ -197,7 +195,7 @@ describe('SyncQueue', () => {
});
it('should handle empty bulk delete', async () => {
- await queue.enqueue('create', 'task-1', null, {});
+ await queue.enqueue('create', 'task-1', null);
await queue.dequeueBulk([]);
@@ -207,8 +205,8 @@ describe('SyncQueue', () => {
});
it('should handle deleting all operations in bulk', async () => {
- await queue.enqueue('create', 'task-1', null, {});
- await queue.enqueue('update', 'task-2', null, {});
+ await queue.enqueue('create', 'task-1', null);
+ await queue.enqueue('update', 'task-2', null);
const pending = await queue.getPending();
await queue.dequeueBulk(pending.map(p => p.id));
@@ -221,7 +219,7 @@ describe('SyncQueue', () => {
describe('incrementRetry', () => {
it('should increment retry count', async () => {
- await queue.enqueue('create', 'task-1', null, {});
+ await queue.enqueue('create', 'task-1', null);
const pending = await queue.getPending();
const itemId = pending[0].id;
@@ -234,7 +232,7 @@ describe('SyncQueue', () => {
});
it('should increment multiple times', async () => {
- await queue.enqueue('create', 'task-1', null, {});
+ await queue.enqueue('create', 'task-1', null);
const pending = await queue.getPending();
const itemId = pending[0].id;
@@ -255,9 +253,9 @@ describe('SyncQueue', () => {
describe('clear', () => {
it('should remove all operations', async () => {
- await queue.enqueue('create', 'task-1', null, {});
- await queue.enqueue('update', 'task-2', null, {});
- await queue.enqueue('delete', 'task-3', null, {});
+ await queue.enqueue('create', 'task-1', null);
+ await queue.enqueue('update', 'task-2', null);
+ await queue.enqueue('delete', 'task-3', null);
await queue.clear();
@@ -277,9 +275,9 @@ describe('SyncQueue', () => {
describe('getForTask', () => {
it('should return operations for specific task', async () => {
- await queue.enqueue('create', 'task-1', null, {});
- await queue.enqueue('update', 'task-1', null, {});
- await queue.enqueue('update', 'task-2', null, {});
+ await queue.enqueue('create', 'task-1', null);
+ await queue.enqueue('update', 'task-1', null);
+ await queue.enqueue('update', 'task-2', null);
const operations = await queue.getForTask('task-1');
@@ -288,7 +286,7 @@ describe('SyncQueue', () => {
});
it('should return empty array when no operations for task', async () => {
- await queue.enqueue('create', 'task-1', null, {});
+ await queue.enqueue('create', 'task-1', null);
const operations = await queue.getForTask('task-2');
@@ -296,9 +294,9 @@ describe('SyncQueue', () => {
});
it('should return all operation types for a task', async () => {
- await queue.enqueue('create', 'task-1', null, {});
- await queue.enqueue('update', 'task-1', null, {});
- await queue.enqueue('delete', 'task-1', null, {});
+ await queue.enqueue('create', 'task-1', null);
+ await queue.enqueue('update', 'task-1', null);
+ await queue.enqueue('delete', 'task-1', null);
const operations = await queue.getForTask('task-1');
@@ -325,7 +323,6 @@ describe('SyncQueue', () => {
tags: [],
subtasks: [],
dependencies: [],
- vectorClock: {},
},
{
id: 'task-2',
@@ -341,7 +338,6 @@ describe('SyncQueue', () => {
tags: [],
subtasks: [],
dependencies: [],
- vectorClock: {},
},
];
@@ -383,13 +379,12 @@ describe('SyncQueue', () => {
tags: [],
subtasks: [],
dependencies: [],
- vectorClock: {},
};
await db.tasks.add(task);
// Manually add task to queue first
- await queue.enqueue('update', 'task-1', task, {});
+ await queue.enqueue('update', 'task-1', task);
// Now try to populate
const count = await queue.populateFromExistingTasks();
@@ -417,7 +412,6 @@ describe('SyncQueue', () => {
tags: [],
subtasks: [],
dependencies: [],
- vectorClock: {},
}));
await db.tasks.bulkAdd(tasks);
@@ -431,32 +425,6 @@ describe('SyncQueue', () => {
expect(pending.length).toBe(100);
});
- it('should use task vector clock when adding to queue', async () => {
- const task: TaskRecord = {
- id: 'task-1',
- title: 'Task 1',
- description: '',
- urgent: true,
- important: true,
- quadrant: 'urgent-important',
- completed: false,
- createdAt: Date.now(),
- updatedAt: Date.now(),
- recurrence: 'none',
- tags: [],
- subtasks: [],
- dependencies: [],
- vectorClock: { 'device-1': 5, 'device-2': 3 },
- };
-
- await db.tasks.add(task);
-
- await queue.populateFromExistingTasks();
-
- const pending = await queue.getPending();
-
- expect(pending[0].vectorClock).toEqual({ 'device-1': 5, 'device-2': 3 });
- });
});
describe('getSyncQueue singleton', () => {
@@ -470,41 +438,18 @@ describe('SyncQueue', () => {
describe('Edge Cases', () => {
it('should handle enqueueing null payload', async () => {
- await queue.enqueue('delete', 'task-1', null, {});
+ await queue.enqueue('delete', 'task-1', null);
const pending = await queue.getPending();
expect(pending[0].payload).toBeNull();
});
- it('should handle empty vector clock', async () => {
- await queue.enqueue('create', 'task-1', null, {});
-
- const pending = await queue.getPending();
-
- expect(pending[0].vectorClock).toEqual({});
- });
-
- it('should handle complex vector clock', async () => {
- const complexClock = {
- 'device-1': 10,
- 'device-2': 5,
- 'device-3': 15,
- 'device-4': 2,
- };
-
- await queue.enqueue('update', 'task-1', null, complexClock);
-
- const pending = await queue.getPending();
-
- expect(pending[0].vectorClock).toEqual(complexClock);
- });
-
it('should handle rapid enqueue operations', async () => {
const promises = [];
for (let i = 0; i < 50; i++) {
- promises.push(queue.enqueue('create', `task-${i}`, null, {}));
+ promises.push(queue.enqueue('create', `task-${i}`, null));
}
await Promise.all(promises);
@@ -517,7 +462,7 @@ describe('SyncQueue', () => {
it('should handle concurrent dequeue operations', async () => {
// Add 10 operations
for (let i = 0; i < 10; i++) {
- await queue.enqueue('create', `task-${i}`, null, {});
+ await queue.enqueue('create', `task-${i}`, null);
}
const pending = await queue.getPending();
@@ -540,7 +485,7 @@ describe('SyncQueue', () => {
const timestamps: number[] = [];
for (let i = 0; i < 5; i++) {
- await queue.enqueue('create', `task-${i}`, null, {});
+ await queue.enqueue('create', `task-${i}`, null);
const pending = await queue.getPending();
timestamps.push(pending[pending.length - 1].timestamp);
}
diff --git a/tests/data/task-dependencies.test.ts b/tests/data/task-dependencies.test.ts
index a2fc2f16..899afa6f 100644
--- a/tests/data/task-dependencies.test.ts
+++ b/tests/data/task-dependencies.test.ts
@@ -26,13 +26,6 @@ vi.mock('@/lib/sync/config', () => ({
})),
}));
-vi.mock('@/lib/sync/vector-clock', () => ({
- incrementVectorClock: vi.fn((clock) => ({
- ...clock,
- 'test-device': (clock['test-device'] || 0) + 1,
- })),
-}));
-
describe('Task Dependency CRUD Operations', () => {
beforeEach(async () => {
const db = getDb();
@@ -94,16 +87,6 @@ describe('Task Dependency CRUD Operations', () => {
);
});
- it('should update vector clock when adding dependency', async () => {
- const taskA = await createTask(createMockTaskDraft({ title: 'Task A' }));
- const taskB = await createTask(createMockTaskDraft({ title: 'Task B' }));
-
- const updated = await addDependency(taskA.id, taskB.id);
-
- expect(updated.vectorClock).toBeDefined();
- expect(updated.vectorClock?.['test-device']).toBeGreaterThan(0);
- });
-
it('should preserve existing task properties when adding dependency', async () => {
const taskA = await createTask(
createMockTaskDraft({
@@ -173,17 +156,6 @@ describe('Task Dependency CRUD Operations', () => {
);
});
- it('should update vector clock when removing dependency', async () => {
- const taskA = await createTask(createMockTaskDraft({ title: 'Task A' }));
- const taskB = await createTask(createMockTaskDraft({ title: 'Task B' }));
-
- await addDependency(taskA.id, taskB.id);
- const updated = await removeDependency(taskA.id, taskB.id);
-
- expect(updated.vectorClock).toBeDefined();
- expect(updated.vectorClock?.['test-device']).toBeGreaterThan(0);
- });
-
it('should update updatedAt timestamp when removing dependency', async () => {
const taskA = await createTask(createMockTaskDraft({ title: 'Task A' }));
const taskB = await createTask(createMockTaskDraft({ title: 'Task B' }));
diff --git a/tests/data/tasks/crud.test.ts b/tests/data/tasks/crud.test.ts
index d8c5195b..96d02699 100644
--- a/tests/data/tasks/crud.test.ts
+++ b/tests/data/tasks/crud.test.ts
@@ -107,8 +107,8 @@ describe('Task CRUD Operations', () => {
describe('listTasks', () => {
it('should return all tasks ordered by creation date (newest first)', async () => {
const tasks: TaskRecord[] = [
- { ...baseDraft, id: '1', quadrant: 'urgent-important', completed: false, createdAt: '2025-01-15T10:00:00Z', updatedAt: '2025-01-15T10:00:00Z', vectorClock: {}, notificationSent: false },
- { ...baseDraft, id: '2', quadrant: 'urgent-important', completed: false, createdAt: '2025-01-14T10:00:00Z', updatedAt: '2025-01-14T10:00:00Z', vectorClock: {}, notificationSent: false },
+ { ...baseDraft, id: '1', quadrant: 'urgent-important', completed: false, createdAt: '2025-01-15T10:00:00Z', updatedAt: '2025-01-15T10:00:00Z', notificationSent: false },
+ { ...baseDraft, id: '2', quadrant: 'urgent-important', completed: false, createdAt: '2025-01-14T10:00:00Z', updatedAt: '2025-01-14T10:00:00Z', notificationSent: false },
];
mockDb.tasks.toArray.mockResolvedValue(tasks);
@@ -162,15 +162,6 @@ describe('Task CRUD Operations', () => {
expect(task4.quadrant).toBe('not-urgent-not-important');
});
- it('should initialize vector clock', async () => {
- mockDb.tasks.add.mockResolvedValue(undefined);
-
- const result = await createTask(baseDraft);
-
- expect(result.vectorClock).toBeDefined();
- expect(result.vectorClock).toHaveProperty('test-device');
- });
-
it('should throw error for invalid task data', async () => {
const invalid = { ...baseDraft, title: '' };
@@ -187,8 +178,7 @@ describe('Task CRUD Operations', () => {
completed: false,
createdAt: '2025-01-15T10:00:00Z',
updatedAt: '2025-01-15T10:00:00Z',
- vectorClock: {},
- notificationSent: false,
+ notificationSent: false,
};
mockDb.tasks.get.mockResolvedValue(existing);
@@ -209,8 +199,7 @@ describe('Task CRUD Operations', () => {
completed: false,
createdAt: '2025-01-15T10:00:00Z',
updatedAt: '2025-01-15T10:00:00Z',
- vectorClock: {},
- notificationSent: false,
+ notificationSent: false,
};
mockDb.tasks.get.mockResolvedValue(existing);
@@ -235,8 +224,7 @@ describe('Task CRUD Operations', () => {
completed: false,
createdAt: '2025-01-15T10:00:00Z',
updatedAt: '2025-01-15T10:00:00Z',
- vectorClock: {},
- notificationSent: true,
+ notificationSent: true,
lastNotificationAt: '2025-01-15T10:00:00Z',
snoozedUntil: '2025-01-15T12:00:00Z',
};
@@ -261,8 +249,7 @@ describe('Task CRUD Operations', () => {
completed: false,
createdAt: '2025-01-15T10:00:00Z',
updatedAt: '2025-01-15T10:00:00Z',
- vectorClock: {},
- notificationSent: false,
+ notificationSent: false,
};
mockDb.tasks.get.mockResolvedValue(existing);
@@ -283,8 +270,7 @@ describe('Task CRUD Operations', () => {
completedAt: '2025-01-15T10:00:00Z',
createdAt: '2025-01-15T10:00:00Z',
updatedAt: '2025-01-15T10:00:00Z',
- vectorClock: {},
- notificationSent: false,
+ notificationSent: false,
};
mockDb.tasks.get.mockResolvedValue(existing);
@@ -306,8 +292,7 @@ describe('Task CRUD Operations', () => {
completed: false,
createdAt: '2025-01-15T10:00:00Z',
updatedAt: '2025-01-15T10:00:00Z',
- vectorClock: {},
- notificationSent: false,
+ notificationSent: false,
};
mockDb.tasks.get.mockResolvedValue(existing);
@@ -334,8 +319,7 @@ describe('Task CRUD Operations', () => {
completed: false,
createdAt: '2025-01-15T10:00:00Z',
updatedAt: '2025-01-15T10:00:00Z',
- vectorClock: {},
- notificationSent: false,
+ notificationSent: false,
};
mockDb.tasks.get.mockResolvedValue(existing);
@@ -370,8 +354,7 @@ describe('Task CRUD Operations', () => {
completed: false,
createdAt: '2025-01-15T10:00:00Z',
updatedAt: '2025-01-15T10:00:00Z',
- vectorClock: {},
- notificationSent: false,
+ notificationSent: false,
};
mockDb.tasks.get.mockResolvedValue(existing);
@@ -395,8 +378,7 @@ describe('Task CRUD Operations', () => {
completed: false,
createdAt: '2025-01-15T10:00:00Z',
updatedAt: '2025-01-15T10:00:00Z',
- vectorClock: {},
- notificationSent: false,
+ notificationSent: false,
};
mockDb.tasks.get.mockResolvedValue(original);
diff --git a/tests/data/tasks/dependencies.test.ts b/tests/data/tasks/dependencies.test.ts
index 0cda8ce1..fedbaa7e 100644
--- a/tests/data/tasks/dependencies.test.ts
+++ b/tests/data/tasks/dependencies.test.ts
@@ -59,7 +59,6 @@ describe('Task Dependency Operations', () => {
dependencies: ['task-2', 'task-3'],
createdAt: '2025-01-15T10:00:00Z',
updatedAt: '2025-01-15T10:00:00Z',
- vectorClock: { 'test-device': 1 },
notifyBefore: 15,
notificationEnabled: true,
notificationSent: false,
@@ -109,16 +108,6 @@ describe('Task Dependency Operations', () => {
expect(mockDb.tasks.put).not.toHaveBeenCalled();
});
- it('should increment vector clock for new dependency', async () => {
- mockDb.tasks.get.mockResolvedValue(baseTask);
- mockDb.tasks.put.mockResolvedValue(undefined);
-
- const result = await addDependency('task-1', 'task-4');
-
- expect(result.vectorClock).toHaveProperty('test-device');
- expect(result.vectorClock['test-device']).toBeGreaterThan(baseTask.vectorClock['test-device']);
- });
-
it('should update updatedAt timestamp', async () => {
mockDb.tasks.get.mockResolvedValue(baseTask);
mockDb.tasks.put.mockResolvedValue(undefined);
@@ -141,8 +130,7 @@ describe('Task Dependency Operations', () => {
expect(getSyncQueue().enqueue).toHaveBeenCalledWith(
'update',
'task-1',
- result,
- result.vectorClock
+ result
);
});
@@ -193,16 +181,6 @@ describe('Task Dependency Operations', () => {
expect(result.dependencies).not.toContain('task-2');
});
- it('should increment vector clock', async () => {
- mockDb.tasks.get.mockResolvedValue(baseTask);
- mockDb.tasks.put.mockResolvedValue(undefined);
-
- const result = await removeDependency('task-1', 'task-2');
-
- expect(result.vectorClock).toHaveProperty('test-device');
- expect(result.vectorClock['test-device']).toBeGreaterThan(baseTask.vectorClock['test-device']);
- });
-
it('should update updatedAt timestamp', async () => {
mockDb.tasks.get.mockResolvedValue(baseTask);
mockDb.tasks.put.mockResolvedValue(undefined);
@@ -225,8 +203,7 @@ describe('Task Dependency Operations', () => {
expect(getSyncQueue().enqueue).toHaveBeenCalledWith(
'update',
'task-1',
- result,
- result.vectorClock
+ result
);
});
diff --git a/tests/data/tasks/import-export.test.ts b/tests/data/tasks/import-export.test.ts
index 5bca9a14..aa97f30b 100644
--- a/tests/data/tasks/import-export.test.ts
+++ b/tests/data/tasks/import-export.test.ts
@@ -33,7 +33,6 @@ describe('Task Import/Export Operations', () => {
dependencies: [],
createdAt: '2025-01-15T10:00:00Z',
updatedAt: '2025-01-15T10:00:00Z',
- vectorClock: { 'device-1': 1 },
notifyBefore: 15,
notificationEnabled: true,
notificationSent: false,
@@ -56,7 +55,6 @@ describe('Task Import/Export Operations', () => {
dependencies: [],
createdAt: '2025-01-14T10:00:00Z',
updatedAt: '2025-01-16T10:00:00Z',
- vectorClock: { 'device-1': 2 },
notifyBefore: 15,
notificationEnabled: true,
notificationSent: false,
diff --git a/tests/data/tasks/subtasks.test.ts b/tests/data/tasks/subtasks.test.ts
index e01b60e3..2ac2bbad 100644
--- a/tests/data/tasks/subtasks.test.ts
+++ b/tests/data/tasks/subtasks.test.ts
@@ -61,7 +61,6 @@ describe('Task Subtask Operations', () => {
dependencies: [],
createdAt: '2025-01-15T10:00:00Z',
updatedAt: '2025-01-15T10:00:00Z',
- vectorClock: { 'test-device': 1 },
notifyBefore: 15,
notificationEnabled: true,
notificationSent: false,
@@ -112,16 +111,6 @@ describe('Task Subtask Operations', () => {
expect(result.subtasks[2].completed).toBe(false); // Unchanged
});
- it('should increment vector clock', async () => {
- mockDb.tasks.get.mockResolvedValue(baseTask);
- mockDb.tasks.put.mockResolvedValue(undefined);
-
- const result = await toggleSubtask('task-1', 'sub-1', true);
-
- expect(result.vectorClock).toHaveProperty('test-device');
- expect(result.vectorClock['test-device']).toBeGreaterThan(baseTask.vectorClock['test-device']);
- });
-
it('should update updatedAt timestamp', async () => {
mockDb.tasks.get.mockResolvedValue(baseTask);
mockDb.tasks.put.mockResolvedValue(undefined);
@@ -144,8 +133,7 @@ describe('Task Subtask Operations', () => {
expect(getSyncQueue().enqueue).toHaveBeenCalledWith(
'update',
'task-1',
- result,
- result.vectorClock
+ result
);
});
@@ -218,16 +206,6 @@ describe('Task Subtask Operations', () => {
expect(result.subtasks[3].completed).toBe(false);
});
- it('should increment vector clock', async () => {
- mockDb.tasks.get.mockResolvedValue(baseTask);
- mockDb.tasks.put.mockResolvedValue(undefined);
-
- const result = await addSubtask('task-1', 'New Subtask');
-
- expect(result.vectorClock).toHaveProperty('test-device');
- expect(result.vectorClock['test-device']).toBeGreaterThan(baseTask.vectorClock['test-device']);
- });
-
it('should update updatedAt timestamp', async () => {
mockDb.tasks.get.mockResolvedValue(baseTask);
mockDb.tasks.put.mockResolvedValue(undefined);
@@ -250,8 +228,7 @@ describe('Task Subtask Operations', () => {
expect(getSyncQueue().enqueue).toHaveBeenCalledWith(
'update',
'task-1',
- result,
- result.vectorClock
+ result
);
});
@@ -303,16 +280,6 @@ describe('Task Subtask Operations', () => {
expect(result.subtasks[1]).toEqual(baseTask.subtasks[2]);
});
- it('should increment vector clock', async () => {
- mockDb.tasks.get.mockResolvedValue(baseTask);
- mockDb.tasks.put.mockResolvedValue(undefined);
-
- const result = await deleteSubtask('task-1', 'sub-2');
-
- expect(result.vectorClock).toHaveProperty('test-device');
- expect(result.vectorClock['test-device']).toBeGreaterThan(baseTask.vectorClock['test-device']);
- });
-
it('should update updatedAt timestamp', async () => {
mockDb.tasks.get.mockResolvedValue(baseTask);
mockDb.tasks.put.mockResolvedValue(undefined);
@@ -335,8 +302,7 @@ describe('Task Subtask Operations', () => {
expect(getSyncQueue().enqueue).toHaveBeenCalledWith(
'update',
'task-1',
- result,
- result.vectorClock
+ result
);
});
diff --git a/tests/data/use-sync.test.ts b/tests/data/use-sync.test.ts
index a8eae254..c39efcc4 100644
--- a/tests/data/use-sync.test.ts
+++ b/tests/data/use-sync.test.ts
@@ -3,17 +3,12 @@ import { renderHook, waitFor, act } from '@testing-library/react';
import { useSync } from '@/lib/hooks/use-sync';
import { getSyncEngine } from '@/lib/sync/engine';
import { getSyncCoordinator } from '@/lib/sync/sync-coordinator';
-import { getHealthMonitor } from '@/lib/sync/health-monitor';
import { getBackgroundSyncManager } from '@/lib/sync/background-sync';
import { getAutoSyncConfig } from '@/lib/sync/config';
-// Type import used for documentation
-// eslint-disable-next-line @typescript-eslint/no-unused-vars
-import type { SyncResult } from '@/lib/sync/types';
// Mock the sync modules
vi.mock('@/lib/sync/engine');
vi.mock('@/lib/sync/sync-coordinator');
-vi.mock('@/lib/sync/health-monitor');
vi.mock('@/lib/sync/background-sync', () => ({
getBackgroundSyncManager: vi.fn(),
}));
@@ -27,8 +22,6 @@ describe('useSync', () => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
let mockCoordinator: any;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
- let mockHealthMonitor: any;
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
let mockBackgroundSyncManager: any;
const flushAsync = async () => {
@@ -60,19 +53,6 @@ describe('useSync', () => {
};
vi.mocked(getSyncCoordinator).mockReturnValue(mockCoordinator);
- // Setup mock health monitor
- mockHealthMonitor = {
- isActive: vi.fn().mockReturnValue(false),
- start: vi.fn(),
- stop: vi.fn(),
- check: vi.fn().mockResolvedValue({
- healthy: true,
- issues: [],
- timestamp: Date.now(),
- }),
- };
- vi.mocked(getHealthMonitor).mockReturnValue(mockHealthMonitor);
-
mockBackgroundSyncManager = {
isRunning: vi.fn().mockReturnValue(false),
start: vi.fn().mockResolvedValue(undefined),
@@ -115,24 +95,24 @@ describe('useSync', () => {
expect(mockEngine.isEnabled).toHaveBeenCalled();
});
- it('should start health monitor when sync is enabled', async () => {
+ it('should start background sync manager when sync is enabled', async () => {
mockEngine.isEnabled.mockResolvedValue(true);
renderHook(() => useSync());
await flushAsync();
- expect(mockHealthMonitor.start).toHaveBeenCalled();
+ expect(mockBackgroundSyncManager.start).toHaveBeenCalled();
});
- it('should not start health monitor when sync is disabled', async () => {
+ it('should not start background sync when sync is disabled', async () => {
mockEngine.isEnabled.mockResolvedValue(false);
renderHook(() => useSync());
await flushAsync();
- expect(mockHealthMonitor.start).not.toHaveBeenCalled();
+ expect(mockBackgroundSyncManager.start).not.toHaveBeenCalled();
});
});
@@ -328,32 +308,10 @@ describe('useSync', () => {
});
});
- describe('health monitoring', () => {
- it('should start health monitor when sync is enabled', async () => {
- mockEngine.isEnabled.mockResolvedValue(true);
-
- renderHook(() => useSync());
-
- await flushAsync();
-
- expect(mockHealthMonitor.start).toHaveBeenCalled();
- });
-
- it('should not start health monitor when sync is disabled', async () => {
- mockEngine.isEnabled.mockResolvedValue(false);
-
- renderHook(() => useSync());
-
- await flushAsync();
-
- expect(mockHealthMonitor.start).not.toHaveBeenCalled();
- });
- });
-
describe('cleanup', () => {
- it('should stop health monitor on unmount', async () => {
+ it('should stop background sync on unmount', async () => {
mockEngine.isEnabled.mockResolvedValue(true);
- mockHealthMonitor.isActive.mockReturnValue(true);
+ mockBackgroundSyncManager.isRunning.mockReturnValue(true);
const { unmount } = renderHook(() => useSync());
@@ -361,7 +319,7 @@ describe('useSync', () => {
unmount();
- expect(mockHealthMonitor.stop).toHaveBeenCalled();
+ expect(mockBackgroundSyncManager.stop).toHaveBeenCalled();
});
it('should clear intervals on unmount', () => {
diff --git a/tests/data/vector-clock.test.ts b/tests/data/vector-clock.test.ts
deleted file mode 100644
index e4488f65..00000000
--- a/tests/data/vector-clock.test.ts
+++ /dev/null
@@ -1,365 +0,0 @@
-import { describe, it, expect } from 'vitest';
-import {
- compareVectorClocks,
- mergeVectorClocks,
- incrementVectorClock,
- happensBefore,
- areConcurrent,
- createVectorClock,
- cloneVectorClock,
-} from '@/lib/sync/vector-clock';
-import type { VectorClock } from '@/lib/sync/types';
-
-describe('Vector Clock Operations', () => {
- describe('createVectorClock', () => {
- it('should create initial clock with device counter at 1', () => {
- const clock = createVectorClock('device-a');
- expect(clock).toEqual({ 'device-a': 1 });
- });
-
- it('should handle different device IDs', () => {
- const clock1 = createVectorClock('laptop');
- const clock2 = createVectorClock('phone');
-
- expect(clock1).toEqual({ laptop: 1 });
- expect(clock2).toEqual({ phone: 1 });
- });
- });
-
- describe('incrementVectorClock', () => {
- it('should increment existing device counter', () => {
- const clock: VectorClock = { 'device-a': 3, 'device-b': 2 };
- const incremented = incrementVectorClock(clock, 'device-a');
-
- expect(incremented).toEqual({ 'device-a': 4, 'device-b': 2 });
- // Should not mutate original
- expect(clock).toEqual({ 'device-a': 3, 'device-b': 2 });
- });
-
- it('should add new device with counter 1', () => {
- const clock: VectorClock = { 'device-a': 3 };
- const incremented = incrementVectorClock(clock, 'device-b');
-
- expect(incremented).toEqual({ 'device-a': 3, 'device-b': 1 });
- });
-
- it('should handle empty clock', () => {
- const clock: VectorClock = {};
- const incremented = incrementVectorClock(clock, 'device-a');
-
- expect(incremented).toEqual({ 'device-a': 1 });
- });
-
- it('should increment from zero if device exists with 0', () => {
- const clock: VectorClock = { 'device-a': 0 };
- const incremented = incrementVectorClock(clock, 'device-a');
-
- expect(incremented).toEqual({ 'device-a': 1 });
- });
- });
-
- describe('compareVectorClocks', () => {
- it('should return "identical" for same clocks', () => {
- const clockA: VectorClock = { 'device-a': 3, 'device-b': 2 };
- const clockB: VectorClock = { 'device-a': 3, 'device-b': 2 };
-
- expect(compareVectorClocks(clockA, clockB)).toBe('identical');
- });
-
- it('should return "identical" for empty clocks', () => {
- const clockA: VectorClock = {};
- const clockB: VectorClock = {};
-
- expect(compareVectorClocks(clockA, clockB)).toBe('identical');
- });
-
- it('should return "b_before_a" when A happened before B (B has larger counters)', () => {
- const clockA: VectorClock = { 'device-a': 2, 'device-b': 1 };
- const clockB: VectorClock = { 'device-a': 3, 'device-b': 2 };
-
- expect(compareVectorClocks(clockA, clockB)).toBe('b_before_a');
- });
-
- it('should return "a_before_b" when B happened before A (A has larger counters)', () => {
- const clockA: VectorClock = { 'device-a': 3, 'device-b': 2 };
- const clockB: VectorClock = { 'device-a': 2, 'device-b': 1 };
-
- expect(compareVectorClocks(clockA, clockB)).toBe('a_before_b');
- });
-
- it('should return "concurrent" for conflicting clocks', () => {
- // Device A has higher counter on device-a, B has higher on device-b
- const clockA: VectorClock = { 'device-a': 3, 'device-b': 1 };
- const clockB: VectorClock = { 'device-a': 2, 'device-b': 5 };
-
- expect(compareVectorClocks(clockA, clockB)).toBe('concurrent');
- });
-
- it('should handle missing devices (treat as 0)', () => {
- const clockA: VectorClock = { 'device-a': 3 };
- const clockB: VectorClock = { 'device-b': 2 };
-
- // A has device-a:3, B has device-a:0 → A greater
- // B has device-b:2, A has device-b:0 → B greater
- // Therefore concurrent
- expect(compareVectorClocks(clockA, clockB)).toBe('concurrent');
- });
-
- it('should return "b_before_a" when A is subset of B', () => {
- const clockA: VectorClock = { 'device-a': 2 };
- const clockB: VectorClock = { 'device-a': 3, 'device-b': 1 };
-
- expect(compareVectorClocks(clockA, clockB)).toBe('b_before_a');
- });
-
- it('should return "a_before_b" when B is subset of A', () => {
- const clockA: VectorClock = { 'device-a': 3, 'device-b': 1 };
- const clockB: VectorClock = { 'device-a': 2 };
-
- expect(compareVectorClocks(clockA, clockB)).toBe('a_before_b');
- });
-
- it('should handle many devices correctly', () => {
- const clockA: VectorClock = { a: 1, b: 2, c: 3, d: 4 };
- const clockB: VectorClock = { a: 2, b: 3, c: 4, d: 5 };
-
- expect(compareVectorClocks(clockA, clockB)).toBe('b_before_a');
- });
-
- it('should detect concurrent with many devices', () => {
- const clockA: VectorClock = { a: 5, b: 2, c: 3 };
- const clockB: VectorClock = { a: 3, b: 6, c: 2 };
-
- expect(compareVectorClocks(clockA, clockB)).toBe('concurrent');
- });
- });
-
- describe('mergeVectorClocks', () => {
- it('should take maximum for each device', () => {
- const clockA: VectorClock = { 'device-a': 3, 'device-b': 1 };
- const clockB: VectorClock = { 'device-a': 2, 'device-b': 5 };
-
- const merged = mergeVectorClocks(clockA, clockB);
-
- expect(merged).toEqual({ 'device-a': 3, 'device-b': 5 });
- });
-
- it('should include devices only in A', () => {
- const clockA: VectorClock = { 'device-a': 3, 'device-c': 7 };
- const clockB: VectorClock = { 'device-b': 2 };
-
- const merged = mergeVectorClocks(clockA, clockB);
-
- expect(merged).toEqual({ 'device-a': 3, 'device-b': 2, 'device-c': 7 });
- });
-
- it('should include devices only in B', () => {
- const clockA: VectorClock = { 'device-a': 3 };
- const clockB: VectorClock = { 'device-b': 2, 'device-c': 4 };
-
- const merged = mergeVectorClocks(clockA, clockB);
-
- expect(merged).toEqual({ 'device-a': 3, 'device-b': 2, 'device-c': 4 });
- });
-
- it('should not mutate original clocks', () => {
- const clockA: VectorClock = { 'device-a': 3 };
- const clockB: VectorClock = { 'device-b': 2 };
-
- mergeVectorClocks(clockA, clockB);
-
- expect(clockA).toEqual({ 'device-a': 3 });
- expect(clockB).toEqual({ 'device-b': 2 });
- });
-
- it('should handle empty clocks', () => {
- const clockA: VectorClock = {};
- const clockB: VectorClock = { 'device-a': 5 };
-
- const merged = mergeVectorClocks(clockA, clockB);
-
- expect(merged).toEqual({ 'device-a': 5 });
- });
-
- it('should handle both empty clocks', () => {
- const clockA: VectorClock = {};
- const clockB: VectorClock = {};
-
- const merged = mergeVectorClocks(clockA, clockB);
-
- expect(merged).toEqual({});
- });
-
- it('should handle many devices', () => {
- const clockA: VectorClock = { a: 10, b: 5, c: 20 };
- const clockB: VectorClock = { a: 8, b: 15, d: 3 };
-
- const merged = mergeVectorClocks(clockA, clockB);
-
- expect(merged).toEqual({ a: 10, b: 15, c: 20, d: 3 });
- });
- });
-
- describe('happensBefore', () => {
- it('should return true when B happened before A (A has larger counters)', () => {
- const clockA: VectorClock = { 'device-a': 3, 'device-b': 2 };
- const clockB: VectorClock = { 'device-a': 2, 'device-b': 1 };
-
- expect(happensBefore(clockA, clockB)).toBe(true);
- expect(happensBefore(clockB, clockA)).toBe(false);
- });
-
- it('should return false for concurrent clocks', () => {
- const clockA: VectorClock = { 'device-a': 3, 'device-b': 1 };
- const clockB: VectorClock = { 'device-a': 2, 'device-b': 5 };
-
- expect(happensBefore(clockA, clockB)).toBe(false);
- expect(happensBefore(clockB, clockA)).toBe(false);
- });
-
- it('should return false for identical clocks', () => {
- const clockA: VectorClock = { 'device-a': 3, 'device-b': 2 };
- const clockB: VectorClock = { 'device-a': 3, 'device-b': 2 };
-
- expect(happensBefore(clockA, clockB)).toBe(false);
- });
- });
-
- describe('areConcurrent', () => {
- it('should return true for concurrent clocks', () => {
- const clockA: VectorClock = { 'device-a': 3, 'device-b': 1 };
- const clockB: VectorClock = { 'device-a': 2, 'device-b': 5 };
-
- expect(areConcurrent(clockA, clockB)).toBe(true);
- });
-
- it('should return false for sequential clocks (B > A)', () => {
- const clockA: VectorClock = { 'device-a': 2, 'device-b': 1 };
- const clockB: VectorClock = { 'device-a': 3, 'device-b': 2 };
-
- expect(areConcurrent(clockA, clockB)).toBe(false);
- });
-
- it('should return false for sequential clocks (A > B)', () => {
- const clockA: VectorClock = { 'device-a': 3, 'device-b': 2 };
- const clockB: VectorClock = { 'device-a': 2, 'device-b': 1 };
-
- expect(areConcurrent(clockA, clockB)).toBe(false);
- });
-
- it('should return false for identical clocks', () => {
- const clockA: VectorClock = { 'device-a': 3, 'device-b': 2 };
- const clockB: VectorClock = { 'device-a': 3, 'device-b': 2 };
-
- expect(areConcurrent(clockA, clockB)).toBe(false);
- });
- });
-
- describe('cloneVectorClock', () => {
- it('should create a copy of the clock', () => {
- const original: VectorClock = { 'device-a': 3, 'device-b': 2 };
- const cloned = cloneVectorClock(original);
-
- expect(cloned).toEqual(original);
- });
-
- it('should not mutate original when clone is modified', () => {
- const original: VectorClock = { 'device-a': 3, 'device-b': 2 };
- const cloned = cloneVectorClock(original);
-
- cloned['device-c'] = 5;
-
- expect(original).toEqual({ 'device-a': 3, 'device-b': 2 });
- expect(cloned).toEqual({ 'device-a': 3, 'device-b': 2, 'device-c': 5 });
- });
-
- it('should handle empty clock', () => {
- const original: VectorClock = {};
- const cloned = cloneVectorClock(original);
-
- expect(cloned).toEqual({});
- });
- });
-
- describe('Real-world scenarios', () => {
- it('should detect sequential updates from same device', () => {
- let clock = createVectorClock('laptop');
- const v1 = clock; // { laptop: 1 }
-
- clock = incrementVectorClock(clock, 'laptop');
- const v2 = clock; // { laptop: 2 }
-
- clock = incrementVectorClock(clock, 'laptop');
- const v3 = clock; // { laptop: 3 }
-
- expect(compareVectorClocks(v1, v2)).toBe('b_before_a'); // v2 is greater
- expect(compareVectorClocks(v2, v3)).toBe('b_before_a'); // v3 is greater
- expect(compareVectorClocks(v1, v3)).toBe('b_before_a'); // v3 is greater
- });
-
- it('should detect conflict in multi-device scenario', () => {
- // Both devices start from same state
- const base: VectorClock = { laptop: 5, phone: 3 };
-
- // Laptop makes change
- const laptopClock = incrementVectorClock(base, 'laptop');
- // { laptop: 6, phone: 3 }
-
- // Phone makes change (without seeing laptop's change)
- const phoneClock = incrementVectorClock(base, 'phone');
- // { laptop: 5, phone: 4 }
-
- // These are concurrent - conflict!
- expect(areConcurrent(laptopClock, phoneClock)).toBe(true);
- });
-
- it('should properly merge after conflict resolution', () => {
- const laptopClock: VectorClock = { laptop: 6, phone: 3 };
- const phoneClock: VectorClock = { laptop: 5, phone: 4 };
-
- // After resolving conflict, merge clocks
- const resolved = mergeVectorClocks(laptopClock, phoneClock);
- // { laptop: 6, phone: 4 }
-
- // Merged clock has greater or equal values than both
- // resolved={laptop:6,phone:4}, laptopClock={laptop:6,phone:3}, phoneClock={laptop:5,phone:4}
- expect(compareVectorClocks(resolved, laptopClock)).toBe('a_before_b'); // resolved has phone:4 > 3
- expect(compareVectorClocks(resolved, phoneClock)).toBe('a_before_b'); // resolved has laptop:6 > 5
- });
-
- it('should handle three-device conflict', () => {
- const base: VectorClock = { a: 1, b: 1, c: 1 };
-
- const clockA = incrementVectorClock(base, 'a'); // { a: 2, b: 1, c: 1 }
- const clockB = incrementVectorClock(base, 'b'); // { a: 1, b: 2, c: 1 }
- const clockC = incrementVectorClock(base, 'c'); // { a: 1, b: 1, c: 2 }
-
- // All three are concurrent with each other
- expect(areConcurrent(clockA, clockB)).toBe(true);
- expect(areConcurrent(clockB, clockC)).toBe(true);
- expect(areConcurrent(clockA, clockC)).toBe(true);
- });
-
- it('should detect cascade of changes', () => {
- // Device A makes change
- let clock: VectorClock = { a: 1 };
-
- // Device B syncs and makes change
- clock = incrementVectorClock(clock, 'b'); // { a: 1, b: 1 }
-
- // Device C syncs from B and makes change
- clock = incrementVectorClock(clock, 'c'); // { a: 1, b: 1, c: 1 }
-
- // Device A syncs from C and makes change
- clock = incrementVectorClock(clock, 'a'); // { a: 2, b: 1, c: 1 }
-
- const initial: VectorClock = { a: 1 };
- const final = clock;
-
- // Final happened after initial (has some greater values)
- // Note: final={a:2,b:1,c:1}, initial={a:1}
- // compareVectorClocks compares: final.a=2 > initial.a=1, so result is 'a_before_b'
- expect(compareVectorClocks(final, initial)).toBe('a_before_b');
- });
- });
-});
diff --git a/tests/fixtures/example-usage.test.ts b/tests/fixtures/example-usage.test.ts
index fa9d5108..2acba67e 100644
--- a/tests/fixtures/example-usage.test.ts
+++ b/tests/fixtures/example-usage.test.ts
@@ -8,7 +8,6 @@ import {
createMockTask,
createMockTasks,
createMockSyncConfig,
- createMockHealthReport,
createMockFetchResponse,
mockDateNow,
mockConsole,
@@ -77,21 +76,6 @@ describe('Example: Using Sync Fixtures', () => {
expect(config.enabled).toBe(true);
});
- it('should create health report', () => {
- const report = createMockHealthReport({
- healthy: false,
- issues: [{
- type: 'token_expired',
- severity: 'error',
- message: 'Token has expired',
- suggestedAction: 'Sign in again',
- }],
- });
-
- expect(report.healthy).toBe(false);
- expect(report.issues).toHaveLength(1);
- expect(report.issues[0].type).toBe('token_expired');
- });
});
describe('Example: Mocking Time', () => {
@@ -211,20 +195,20 @@ describe('Example: Complex Test Scenario', () => {
it('should handle a complete sync scenario', async () => {
// Create test data
const tasks = createMockTasks(3, { completed: false });
- const config = createMockSyncConfig();
-
+ createMockSyncConfig();
+
// Mock API response
const mockResponse = createMockFetchResponse({
success: true,
synced: tasks.length,
});
vi.mocked(global.fetch).mockResolvedValue(mockResponse);
-
+
// Simulate sync operation
console.log('Starting sync...');
- const response = await fetch(`${config.serverUrl}/sync`);
+ const response = await fetch('/api/sync');
const result = await response.json();
-
+
// Verify
expect(result.success).toBe(true);
expect(result.synced).toBe(3);
diff --git a/tests/fixtures/fixtures.test.ts b/tests/fixtures/fixtures.test.ts
index 38327de9..7f901cbe 100644
--- a/tests/fixtures/fixtures.test.ts
+++ b/tests/fixtures/fixtures.test.ts
@@ -9,9 +9,7 @@ import {
createMockSubtask,
createMockTasks,
createMockSyncConfig,
- createMockVectorClock,
createMockSyncQueueItem,
- createMockHealthReport,
createMockNotificationSettings,
createMockSyncHistoryRecord,
createMockFetchResponse,
@@ -87,24 +85,17 @@ describe('Task Fixtures', () => {
describe('Sync Fixtures', () => {
it('should create a mock sync config', () => {
const config = createMockSyncConfig();
-
+
expect(config.key).toBe('sync_config');
expect(config.enabled).toBe(true);
expect(config.userId).toBe('user-123');
expect(config.deviceId).toBe('device-456');
expect(config.email).toBe('test@example.com');
- expect(config.token).toBe('test-token-abc123');
- });
-
- it('should create a mock vector clock', () => {
- const clock = createMockVectorClock();
-
- expect(clock['device-456']).toBe(1);
});
it('should create a mock sync queue item', () => {
const item = createMockSyncQueueItem();
-
+
expect(item.id).toBe('queue-item-1');
expect(item.taskId).toBe('test-task-1');
expect(item.operation).toBe('create');
@@ -112,32 +103,6 @@ describe('Sync Fixtures', () => {
});
});
-describe('Health Monitor Fixtures', () => {
- it('should create a mock health report', () => {
- const report = createMockHealthReport();
-
- expect(report.healthy).toBe(true);
- expect(report.issues).toEqual([]);
- expect(report.timestamp).toBeDefined();
- });
-
- it('should create a mock health report with issues', () => {
- const report = createMockHealthReport({
- healthy: false,
- issues: [{
- type: 'token_expired',
- severity: 'error',
- message: 'Token expired',
- suggestedAction: 'Sign in again',
- }],
- });
-
- expect(report.healthy).toBe(false);
- expect(report.issues).toHaveLength(1);
- expect(report.issues[0].type).toBe('token_expired');
- });
-});
-
describe('Notification Fixtures', () => {
it('should create mock notification settings', () => {
const settings = createMockNotificationSettings();
diff --git a/tests/fixtures/index.ts b/tests/fixtures/index.ts
index 7ac4165c..0a5b6437 100644
--- a/tests/fixtures/index.ts
+++ b/tests/fixtures/index.ts
@@ -13,15 +13,11 @@ import type {
} from '@/lib/types';
import type {
SyncConfig,
- VectorClock,
SyncQueueItem,
- EncryptedTaskBlob,
+ EncryptedTaskRow,
ConflictInfo,
SyncResult,
- PushResponse,
- PullResponse,
} from '@/lib/sync/types';
-import type { HealthReport, HealthIssue } from '@/lib/sync/health-monitor';
// ============================================================================
// Task Fixtures
@@ -110,12 +106,8 @@ export function createMockSyncConfig(overrides?: Partial): SyncConfi
deviceId: 'device-456',
deviceName: 'Test Device',
email: 'test@example.com',
- token: 'test-token-abc123',
- tokenExpiresAt: Date.now() + 60 * 60 * 1000, // 1 hour from now
lastSyncAt: null,
- vectorClock: {},
conflictStrategy: 'last_write_wins',
- serverUrl: 'http://localhost:8787',
consecutiveFailures: 0,
lastFailureAt: null,
lastFailureReason: null,
@@ -124,16 +116,6 @@ export function createMockSyncConfig(overrides?: Partial): SyncConfi
};
}
-/**
- * Create a mock vector clock
- */
-export function createMockVectorClock(overrides?: VectorClock): VectorClock {
- return {
- 'device-456': 1,
- ...overrides,
- };
-}
-
/**
* Create a mock sync queue item
*/
@@ -145,24 +127,26 @@ export function createMockSyncQueueItem(overrides?: Partial): Syn
timestamp: Date.now(),
retryCount: 0,
payload: createMockTask(),
- vectorClock: createMockVectorClock(),
...overrides,
};
}
/**
- * Create a mock encrypted task blob
+ * Create a mock encrypted task row (Supabase shape)
*/
-export function createMockEncryptedTaskBlob(
- overrides?: Partial
-): EncryptedTaskBlob {
+export function createMockEncryptedTaskRow(
+ overrides?: Partial
+): EncryptedTaskRow {
return {
id: 'test-task-1',
- encryptedBlob: 'encrypted-data-base64',
+ user_id: 'user-123',
+ encrypted_blob: 'encrypted-data-base64',
nonce: 'nonce-base64',
version: 1,
- vectorClock: createMockVectorClock(),
- updatedAt: Date.now(),
+ deleted_at: null,
+ created_at: new Date().toISOString(),
+ updated_at: new Date().toISOString(),
+ last_modified_device: 'device-456',
checksum: 'checksum-abc123',
...overrides,
};
@@ -174,13 +158,13 @@ export function createMockEncryptedTaskBlob(
export function createMockConflictInfo(overrides?: Partial): ConflictInfo {
const localTask = createMockTask({ id: 'conflict-task-1', title: 'Local Version' });
const remoteTask = createMockTask({ id: 'conflict-task-1', title: 'Remote Version' });
-
+
return {
taskId: 'conflict-task-1',
local: localTask,
remote: remoteTask,
- localClock: createMockVectorClock({ 'device-456': 2 }),
- remoteClock: createMockVectorClock({ 'device-789': 2 }),
+ localUpdatedAt: new Date().toISOString(),
+ remoteUpdatedAt: new Date().toISOString(),
...overrides,
};
}
@@ -200,62 +184,6 @@ export function createMockSyncResult(overrides?: Partial): SyncResul
};
}
-/**
- * Create a mock push response
- */
-export function createMockPushResponse(overrides?: Partial): PushResponse {
- return {
- accepted: [],
- rejected: [],
- conflicts: [],
- serverVectorClock: createMockVectorClock(),
- ...overrides,
- };
-}
-
-/**
- * Create a mock pull response
- */
-export function createMockPullResponse(overrides?: Partial): PullResponse {
- return {
- tasks: [],
- deletedTaskIds: [],
- serverVectorClock: createMockVectorClock(),
- conflicts: [],
- hasMore: false,
- ...overrides,
- };
-}
-
-// ============================================================================
-// Health Monitor Fixtures
-// ============================================================================
-
-/**
- * Create a mock health issue
- */
-export function createMockHealthIssue(overrides?: Partial): HealthIssue {
- return {
- type: 'stale_queue',
- severity: 'warning',
- message: 'Test health issue',
- suggestedAction: 'Test action',
- ...overrides,
- };
-}
-
-/**
- * Create a mock health report
- */
-export function createMockHealthReport(overrides?: Partial): HealthReport {
- return {
- healthy: true,
- issues: [],
- timestamp: Date.now(),
- ...overrides,
- };
-}
-
// ============================================================================
// Notification Fixtures
// ============================================================================
@@ -355,7 +283,7 @@ export function createMockErrorResponse(
*/
export function createMockDexieTable() {
const data: T[] = [];
-
+
return {
toArray: vi.fn(async () => [...data]),
// eslint-disable-next-line @typescript-eslint/no-explicit-any
@@ -481,7 +409,7 @@ export function mockConsole() {
export function mockDateNow(timestamp: number) {
const original = Date.now;
Date.now = vi.fn(() => timestamp);
-
+
return {
restore: () => {
Date.now = original;
diff --git a/tests/security/oauth-security.test.ts b/tests/security/oauth-security.test.ts
deleted file mode 100644
index 623c72c1..00000000
--- a/tests/security/oauth-security.test.ts
+++ /dev/null
@@ -1,335 +0,0 @@
-/**
- * OAuth Security Tests
- *
- * Tests multi-layer security validation for OAuth postMessage flows.
- * Covers origin validation, message structure, state validation, and attack scenarios.
- */
-
-import { describe, it, expect } from 'vitest';
-import { isOAuthOriginAllowed } from '@/lib/oauth-config';
-import { validateOAuthMessage } from '@/lib/oauth-schemas';
-
-describe('OAuth Security - Origin Validation', () => {
- it('should allow production origin', () => {
- expect(isOAuthOriginAllowed('https://gsd.vinny.dev')).toBe(true);
- });
-
- it('should allow development origin', () => {
- expect(isOAuthOriginAllowed('https://gsd-dev.vinny.dev')).toBe(true);
- });
-
- it('should allow worker domains', () => {
- expect(isOAuthOriginAllowed('https://gsd-sync-worker.vscarpenter.workers.dev')).toBe(true);
- expect(isOAuthOriginAllowed('https://gsd-sync-worker-production.vscarpenter.workers.dev')).toBe(true);
- expect(isOAuthOriginAllowed('https://gsd-sync-worker-dev.vscarpenter.workers.dev')).toBe(true);
- });
-
- it('should allow localhost with any port', () => {
- expect(isOAuthOriginAllowed('http://localhost:3000')).toBe(true);
- expect(isOAuthOriginAllowed('http://localhost:8080')).toBe(true);
- expect(isOAuthOriginAllowed('http://127.0.0.1:3000')).toBe(true);
- expect(isOAuthOriginAllowed('http://127.0.0.1:8787')).toBe(true);
- });
-
- it('should reject untrusted origins', () => {
- expect(isOAuthOriginAllowed('https://evil.com')).toBe(false);
- expect(isOAuthOriginAllowed('https://gsd-vinny-dev.attacker.com')).toBe(false);
- expect(isOAuthOriginAllowed('http://malicious.localhost')).toBe(false);
- });
-
- it('should reject null or undefined origins', () => {
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- expect(isOAuthOriginAllowed(null as any)).toBe(false);
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- expect(isOAuthOriginAllowed(undefined as any)).toBe(false);
- });
-
- it('should reject empty string origin', () => {
- expect(isOAuthOriginAllowed('')).toBe(false);
- });
-});
-
-describe('OAuth Security - Message Structure Validation', () => {
- it('should validate correct OAuth success message', () => {
- const message = {
- type: 'oauth_success',
- state: 'a'.repeat(32), // Valid 32-char state
- authData: {
- userId: 'user123',
- deviceId: 'device456',
- email: 'user@example.com',
- token: 'valid_jwt_token',
- expiresAt: Date.now() + 3600000,
- requiresEncryptionSetup: false,
- provider: 'google',
- },
- };
-
- const result = validateOAuthMessage(message);
- expect(result.success).toBe(true);
- expect(result.data).toBeDefined();
- expect(result.data?.type).toBe('oauth_success');
- });
-
- it('should validate correct OAuth error message', () => {
- const message = {
- type: 'oauth_error',
- error: 'User denied authorization',
- state: 'a'.repeat(32),
- };
-
- const result = validateOAuthMessage(message);
- expect(result.success).toBe(true);
- expect(result.data).toBeDefined();
- expect(result.data?.type).toBe('oauth_error');
- });
-
- it('should reject message with short state token', () => {
- const message = {
- type: 'oauth_success',
- state: 'short', // Too short (< 32 chars)
- authData: {
- userId: 'user123',
- deviceId: 'device456',
- email: 'user@example.com',
- token: 'valid_jwt_token',
- expiresAt: Date.now() + 3600000,
- requiresEncryptionSetup: false,
- provider: 'google',
- },
- };
-
- const result = validateOAuthMessage(message);
- expect(result.success).toBe(false);
- expect(result.error).toContain('State token too short');
- });
-
- it('should reject message with invalid email', () => {
- const message = {
- type: 'oauth_success',
- state: 'a'.repeat(32),
- authData: {
- userId: 'user123',
- deviceId: 'device456',
- email: 'not-an-email', // Invalid email format
- token: 'valid_jwt_token',
- expiresAt: Date.now() + 3600000,
- requiresEncryptionSetup: false,
- provider: 'google',
- },
- };
-
- const result = validateOAuthMessage(message);
- expect(result.success).toBe(false);
- expect(result.error).toContain('Invalid email format');
- });
-
- it('should reject message with invalid provider', () => {
- const message = {
- type: 'oauth_success',
- state: 'a'.repeat(32),
- authData: {
- userId: 'user123',
- deviceId: 'device456',
- email: 'user@example.com',
- token: 'valid_jwt_token',
- expiresAt: Date.now() + 3600000,
- requiresEncryptionSetup: false,
- provider: 'facebook', // Invalid provider (not google or apple)
- },
- };
-
- const result = validateOAuthMessage(message);
- expect(result.success).toBe(false);
- expect(result.error).toContain('Invalid OAuth provider');
- });
-
- it('should reject message with missing required fields', () => {
- const message = {
- type: 'oauth_success',
- state: 'a'.repeat(32),
- authData: {
- userId: 'user123',
- // Missing deviceId, email, token, etc.
- },
- };
-
- const result = validateOAuthMessage(message);
- expect(result.success).toBe(false);
- });
-
- it('should reject message with invalid type', () => {
- const message = {
- type: 'oauth_hacked', // Invalid type
- state: 'a'.repeat(32),
- };
-
- const result = validateOAuthMessage(message);
- expect(result.success).toBe(false);
- });
-
- it('should reject message with negative expiresAt', () => {
- const message = {
- type: 'oauth_success',
- state: 'a'.repeat(32),
- authData: {
- userId: 'user123',
- deviceId: 'device456',
- email: 'user@example.com',
- token: 'valid_jwt_token',
- expiresAt: -1, // Negative timestamp
- requiresEncryptionSetup: false,
- provider: 'google',
- },
- };
-
- const result = validateOAuthMessage(message);
- expect(result.success).toBe(false);
- expect(result.error).toContain('Invalid expiration timestamp');
- });
-
- it('should reject completely malformed message', () => {
- const messages = [
- null,
- undefined,
- 'not an object',
- 123,
- [],
- { random: 'object' },
- ];
-
- messages.forEach((msg) => {
- const result = validateOAuthMessage(msg);
- expect(result.success).toBe(false);
- });
- });
-});
-
-describe('OAuth Security - Attack Scenarios', () => {
- it('should reject CSRF attack with fake state', () => {
- // Attacker tries to inject a message with a fake state
- const attackMessage = {
- type: 'oauth_success',
- state: 'fake_state_from_attacker_12345678901234567890',
- authData: {
- userId: 'attacker_user',
- deviceId: 'attacker_device',
- email: 'attacker@evil.com',
- token: 'fake_token',
- expiresAt: Date.now() + 3600000,
- requiresEncryptionSetup: false,
- provider: 'google',
- },
- };
-
- const result = validateOAuthMessage(attackMessage);
- // Message structure is valid, but state won't match (checked in component logic)
- expect(result.success).toBe(true);
- // In actual usage, the component would reject due to unknown state
- });
-
- it('should reject XSS payload injection attempts', () => {
- const xssMessage = {
- type: 'oauth_success',
- state: 'a'.repeat(32),
- authData: {
- userId: '',
- deviceId: 'device456',
- email: 'user@example.com',
- token: 'valid_jwt_token',
- expiresAt: Date.now() + 3600000,
- requiresEncryptionSetup: false,
- provider: 'google',
- },
- };
-
- // Message validation passes (strings are allowed)
- // XSS prevention happens in rendering layer
- const result = validateOAuthMessage(xssMessage);
- expect(result.success).toBe(true);
- });
-
- it('should validate encryptionSalt if provided', () => {
- const message = {
- type: 'oauth_success',
- state: 'a'.repeat(32),
- authData: {
- userId: 'user123',
- deviceId: 'device456',
- email: 'user@example.com',
- token: 'valid_jwt_token',
- expiresAt: Date.now() + 3600000,
- requiresEncryptionSetup: false,
- provider: 'google',
- encryptionSalt: 'salt_value_here',
- },
- };
-
- const result = validateOAuthMessage(message);
- expect(result.success).toBe(true);
- expect(result.data?.type === 'oauth_success' && result.data.authData.encryptionSalt).toBe('salt_value_here');
- });
-});
-
-describe('OAuth Security - Edge Cases', () => {
- it('should handle OAuth error without state', () => {
- const message = {
- type: 'oauth_error',
- error: 'Generic OAuth error',
- // No state field
- };
-
- const result = validateOAuthMessage(message);
- expect(result.success).toBe(true);
- expect(result.data?.type).toBe('oauth_error');
- });
-
- it('should reject empty error message', () => {
- const message = {
- type: 'oauth_error',
- error: '', // Empty error message
- };
-
- const result = validateOAuthMessage(message);
- expect(result.success).toBe(false);
- expect(result.error).toContain('Error message is required');
- });
-
- it('should handle very long state tokens', () => {
- const message = {
- type: 'oauth_success',
- state: 'a'.repeat(256), // Very long state
- authData: {
- userId: 'user123',
- deviceId: 'device456',
- email: 'user@example.com',
- token: 'valid_jwt_token',
- expiresAt: Date.now() + 3600000,
- requiresEncryptionSetup: false,
- provider: 'google',
- },
- };
-
- const result = validateOAuthMessage(message);
- expect(result.success).toBe(true); // Long states are ok
- });
-
- it('should validate Apple provider', () => {
- const message = {
- type: 'oauth_success',
- state: 'a'.repeat(32),
- authData: {
- userId: 'user123',
- deviceId: 'device456',
- email: 'user@example.com',
- token: 'valid_jwt_token',
- expiresAt: Date.now() + 3600000,
- requiresEncryptionSetup: false,
- provider: 'apple',
- },
- };
-
- const result = validateOAuthMessage(message);
- expect(result.success).toBe(true);
- });
-});
diff --git a/tests/sync/debug.test.ts b/tests/sync/debug.test.ts
deleted file mode 100644
index e9ce58a3..00000000
--- a/tests/sync/debug.test.ts
+++ /dev/null
@@ -1,393 +0,0 @@
-/**
- * Tests for sync debug utilities
- */
-
-import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
-import { getDb } from '@/lib/db';
-import { debugSyncQueue, clearStuckOperations, installSyncDebugTools } from '@/lib/sync/debug';
-import {
- createMockSyncConfig,
- createMockSyncQueueItem,
- createMockTask,
- mockConsole,
-} from '../fixtures';
-
-// Mock dependencies
-vi.mock('@/lib/sync/queue', () => ({
- getSyncQueue: vi.fn(() => mockQueue),
-}));
-
-// Create mock queue instance
-const mockQueue = {
- getPending: vi.fn(async () => []),
- clear: vi.fn(async () => {}),
-};
-
-describe('sync/debug', () => {
- let db: ReturnType;
- let consoleMock: ReturnType;
-
- beforeEach(async () => {
- db = getDb();
- consoleMock = mockConsole();
-
- // Clear database
- await db.delete();
- await db.open();
-
- // Reset all mocks
- vi.clearAllMocks();
- mockQueue.getPending.mockResolvedValue([]);
- mockQueue.clear.mockResolvedValue(undefined);
- });
-
- afterEach(async () => {
- consoleMock.restore();
- await db.delete();
- });
-
- describe('debugSyncQueue', () => {
- it('should log sync queue debug information', async () => {
- // Setup test data
- const config = createMockSyncConfig({
- enabled: true,
- lastSyncAt: Date.now() - 60000,
- consecutiveFailures: 0,
- vectorClock: { 'device-1': 5 },
- });
- await db.syncMetadata.add(config);
-
- const task = createMockTask({ id: 'task-1', title: 'Test Task' });
- await db.tasks.add(task);
-
- const queueItem = createMockSyncQueueItem({
- id: 'op-1',
- taskId: 'task-1',
- operation: 'update',
- timestamp: Date.now(),
- retryCount: 0,
- });
- mockQueue.getPending.mockResolvedValue([queueItem]);
-
- // Execute
- const result = await debugSyncQueue();
-
- // Verify console output
- expect(console.log).toHaveBeenCalledWith('=== SYNC QUEUE DEBUG ===');
- expect(console.log).toHaveBeenCalledWith('Total pending operations: 1');
- expect(console.log).toHaveBeenCalledWith('\nPending operations:');
- expect(console.log).toHaveBeenCalledWith('\n=== SYNC CONFIG ===');
- expect(console.log).toHaveBeenCalledWith('\n=== TASKS ===');
- expect(console.log).toHaveBeenCalledWith('Total tasks: 1');
-
- // Verify return value
- expect(result.pendingOps).toHaveLength(1);
- expect(result.pendingOps[0].id).toBe('op-1');
- expect(result.config).toBeDefined();
- expect(result.config?.enabled).toBe(true);
- expect(result.tasks).toHaveLength(1);
- expect(result.tasks[0].id).toBe('task-1');
- });
-
- it('should log pending operation details', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- const timestamp = Date.now();
- const queueItem = createMockSyncQueueItem({
- id: 'op-1',
- taskId: 'task-1',
- operation: 'create',
- timestamp,
- retryCount: 2,
- consolidatedFrom: ['op-0', 'op-00'],
- });
- mockQueue.getPending.mockResolvedValue([queueItem]);
-
- await debugSyncQueue();
-
- // Verify operation details were logged
- expect(console.log).toHaveBeenCalledWith(
- expect.objectContaining({
- id: 'op-1',
- taskId: 'task-1',
- operation: 'create',
- timestamp: new Date(timestamp).toISOString(),
- retryCount: 2,
- consolidatedFrom: 2,
- hasPayload: true,
- })
- );
- });
-
- it('should detect and warn about duplicate task IDs in queue', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- const queueItems = [
- createMockSyncQueueItem({ id: 'op-1', taskId: 'task-1' }),
- createMockSyncQueueItem({ id: 'op-2', taskId: 'task-2' }),
- createMockSyncQueueItem({ id: 'op-3', taskId: 'task-1' }), // Duplicate
- createMockSyncQueueItem({ id: 'op-4', taskId: 'task-2' }), // Duplicate
- ];
- mockQueue.getPending.mockResolvedValue(queueItems);
-
- await debugSyncQueue();
-
- // Verify warning was logged
- expect(console.warn).toHaveBeenCalledWith(
- '\n⚠️ DUPLICATE TASK IDS IN QUEUE:',
- expect.arrayContaining(['task-1', 'task-2'])
- );
- });
-
- it('should not warn when no duplicate task IDs exist', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- const queueItems = [
- createMockSyncQueueItem({ id: 'op-1', taskId: 'task-1' }),
- createMockSyncQueueItem({ id: 'op-2', taskId: 'task-2' }),
- createMockSyncQueueItem({ id: 'op-3', taskId: 'task-3' }),
- ];
- mockQueue.getPending.mockResolvedValue(queueItems);
-
- await debugSyncQueue();
-
- // Verify no warning was logged
- expect(console.warn).not.toHaveBeenCalled();
- });
-
- it('should handle empty queue', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
- mockQueue.getPending.mockResolvedValue([]);
-
- const result = await debugSyncQueue();
-
- expect(console.log).toHaveBeenCalledWith('Total pending operations: 0');
- expect(result.pendingOps).toHaveLength(0);
- });
-
- it('should log sync config details', async () => {
- const lastSyncAt = Date.now() - 120000;
- const nextRetryAt = Date.now() + 60000;
- const config = createMockSyncConfig({
- enabled: true,
- lastSyncAt,
- consecutiveFailures: 3,
- nextRetryAt,
- vectorClock: { 'device-1': 10, 'device-2': 5 },
- });
- await db.syncMetadata.add(config);
-
- await debugSyncQueue();
-
- // Verify config was logged with formatted dates
- expect(console.log).toHaveBeenCalledWith(
- expect.objectContaining({
- enabled: true,
- lastSyncAt: new Date(lastSyncAt).toISOString(),
- consecutiveFailures: 3,
- nextRetryAt: new Date(nextRetryAt).toISOString(),
- vectorClock: { 'device-1': 10, 'device-2': 5 },
- })
- );
- });
-
- it('should handle missing sync config', async () => {
- // No sync config in database
- mockQueue.getPending.mockResolvedValue([]);
-
- const result = await debugSyncQueue();
-
- // Should log null values for config
- expect(console.log).toHaveBeenCalledWith(
- expect.objectContaining({
- enabled: undefined,
- lastSyncAt: null,
- consecutiveFailures: undefined,
- nextRetryAt: null,
- vectorClock: undefined,
- })
- );
-
- expect(result.config).toBeNull();
- });
-
- it('should count tasks correctly', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- // Add multiple tasks
- await db.tasks.add(createMockTask({ id: 'task-1' }));
- await db.tasks.add(createMockTask({ id: 'task-2' }));
- await db.tasks.add(createMockTask({ id: 'task-3' }));
-
- const result = await debugSyncQueue();
-
- expect(console.log).toHaveBeenCalledWith('Total tasks: 3');
- expect(result.tasks).toHaveLength(3);
- });
-
- it('should handle operations without consolidatedFrom', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- const queueItem = createMockSyncQueueItem({
- id: 'op-1',
- consolidatedFrom: undefined,
- });
- mockQueue.getPending.mockResolvedValue([queueItem]);
-
- await debugSyncQueue();
-
- // Should log 0 for consolidatedFrom when undefined
- expect(console.log).toHaveBeenCalledWith(
- expect.objectContaining({
- consolidatedFrom: 0,
- })
- );
- });
-
- it('should handle operations without payload', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- const queueItem = createMockSyncQueueItem({
- id: 'op-1',
- payload: undefined,
- });
- mockQueue.getPending.mockResolvedValue([queueItem]);
-
- await debugSyncQueue();
-
- // Should log false for hasPayload when undefined
- expect(console.log).toHaveBeenCalledWith(
- expect.objectContaining({
- hasPayload: false,
- })
- );
- });
- });
-
- describe('clearStuckOperations', () => {
- it('should clear queue when user confirms', async () => {
- const queueItems = [
- createMockSyncQueueItem({ id: 'op-1' }),
- createMockSyncQueueItem({ id: 'op-2' }),
- ];
- mockQueue.getPending.mockResolvedValue(queueItems);
-
- // Mock window.confirm to return true
- const confirmSpy = vi.spyOn(window, 'confirm').mockReturnValue(true);
-
- await clearStuckOperations();
-
- // Verify confirmation prompt
- expect(confirmSpy).toHaveBeenCalledWith(
- 'Are you sure you want to clear 2 pending operations? This cannot be undone.'
- );
-
- // Verify queue was cleared
- expect(mockQueue.clear).toHaveBeenCalled();
- expect(console.log).toHaveBeenCalledWith('✓ Queue cleared');
-
- confirmSpy.mockRestore();
- });
-
- it('should not clear queue when user cancels', async () => {
- const queueItems = [
- createMockSyncQueueItem({ id: 'op-1' }),
- ];
- mockQueue.getPending.mockResolvedValue(queueItems);
-
- // Mock window.confirm to return false
- const confirmSpy = vi.spyOn(window, 'confirm').mockReturnValue(false);
-
- await clearStuckOperations();
-
- // Verify confirmation prompt
- expect(confirmSpy).toHaveBeenCalled();
-
- // Verify queue was NOT cleared
- expect(mockQueue.clear).not.toHaveBeenCalled();
- expect(console.log).toHaveBeenCalledWith('Cancelled');
-
- confirmSpy.mockRestore();
- });
-
- it('should handle empty queue', async () => {
- mockQueue.getPending.mockResolvedValue([]);
-
- // Mock window.confirm (should not be called)
- const confirmSpy = vi.spyOn(window, 'confirm');
-
- await clearStuckOperations();
-
- expect(console.log).toHaveBeenCalledWith('Found 0 pending operations');
- expect(console.log).toHaveBeenCalledWith('No operations to clear');
- expect(confirmSpy).not.toHaveBeenCalled();
- expect(mockQueue.clear).not.toHaveBeenCalled();
-
- confirmSpy.mockRestore();
- });
-
- it('should log pending operation count', async () => {
- const queueItems = [
- createMockSyncQueueItem({ id: 'op-1' }),
- createMockSyncQueueItem({ id: 'op-2' }),
- createMockSyncQueueItem({ id: 'op-3' }),
- ];
- mockQueue.getPending.mockResolvedValue(queueItems);
-
- const confirmSpy = vi.spyOn(window, 'confirm').mockReturnValue(false);
-
- await clearStuckOperations();
-
- expect(console.log).toHaveBeenCalledWith('Found 3 pending operations');
-
- confirmSpy.mockRestore();
- });
- });
-
- describe('installSyncDebugTools', () => {
- it('should install debug functions on window object', () => {
- // Clear any existing functions
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- delete (window as any).debugSyncQueue;
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- delete (window as any).clearStuckOperations;
-
- installSyncDebugTools();
-
- // Verify functions are installed
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- expect((window as any).debugSyncQueue).toBe(debugSyncQueue);
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- expect((window as any).clearStuckOperations).toBe(clearStuckOperations);
-
- // Verify installation message
- expect(console.log).toHaveBeenCalledWith(
- '[SYNC DEBUG] Debug tools installed. Available functions:'
- );
- expect(console.log).toHaveBeenCalledWith(' - debugSyncQueue()');
- expect(console.log).toHaveBeenCalledWith(' - clearStuckOperations()');
- });
-
- it('should be callable multiple times without error', () => {
- installSyncDebugTools();
-
- expect(() => installSyncDebugTools()).not.toThrow();
-
- // Functions should still be available
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- expect((window as any).debugSyncQueue).toBe(debugSyncQueue);
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- expect((window as any).clearStuckOperations).toBe(clearStuckOperations);
- });
-
- it('should make debug functions accessible from window', () => {
- installSyncDebugTools();
-
- // Verify we can call the functions from window
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- expect(typeof (window as any).debugSyncQueue).toBe('function');
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- expect(typeof (window as any).clearStuckOperations).toBe('function');
- });
- });
-});
diff --git a/tests/sync/engine-coordinator.test.ts b/tests/sync/engine-coordinator.test.ts
deleted file mode 100644
index 231240da..00000000
--- a/tests/sync/engine-coordinator.test.ts
+++ /dev/null
@@ -1,557 +0,0 @@
-/**
- * Tests for SyncEngine - sync orchestration and state transitions
- */
-
-import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
-import { getDb } from '@/lib/db';
-import { SyncEngine } from '@/lib/sync/engine/coordinator';
-import { createMockSyncConfig, mockConsole } from '../fixtures';
-
-// Mock all dependencies
-vi.mock('@/lib/sync/crypto', () => ({
- getCryptoManager: vi.fn(() => mockCrypto),
-}));
-
-vi.mock('@/lib/sync/api-client', () => ({
- getApiClient: vi.fn(() => mockApiClient),
-}));
-
-vi.mock('@/lib/sync/token-manager', () => ({
- getTokenManager: vi.fn(() => mockTokenManager),
-}));
-
-vi.mock('@/lib/sync/retry-manager', () => ({
- getRetryManager: vi.fn(() => mockRetryManager),
-}));
-
-vi.mock('@/lib/sync/queue-optimizer', () => ({
- getQueueOptimizer: vi.fn(() => mockQueueOptimizer),
-}));
-
-vi.mock('@/lib/sync/engine/push-handler', () => ({
- pushLocalChanges: vi.fn(async () => mockPushResult),
-}));
-
-vi.mock('@/lib/sync/engine/pull-handler', () => ({
- pullRemoteChanges: vi.fn(async () => mockPullResult),
-}));
-
-vi.mock('@/lib/sync/engine/conflict-resolver', () => ({
- autoResolveConflicts: vi.fn(async (conflicts) => conflicts.length),
-}));
-
-vi.mock('@/lib/sync/engine/error-handler', () => ({
- handleSyncError: vi.fn(async (error) => ({
- status: 'error',
- error: error.message,
- })),
-}));
-
-vi.mock('@/lib/sync-history', () => ({
- recordSyncSuccess: vi.fn(async () => {}),
-}));
-
-// Import mocked modules
-import { pushLocalChanges } from '@/lib/sync/engine/push-handler';
-import { pullRemoteChanges } from '@/lib/sync/engine/pull-handler';
-import { autoResolveConflicts } from '@/lib/sync/engine/conflict-resolver';
-import { handleSyncError } from '@/lib/sync/engine/error-handler';
-import { recordSyncSuccess } from '@/lib/sync-history';
-
-// Create mock instances
-const mockCrypto = {
- isInitialized: vi.fn(() => true),
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
- encrypt: vi.fn(async (_data: string) => ({
- ciphertext: 'encrypted',
- nonce: 'nonce',
- })),
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
- decrypt: vi.fn(async (_data: string) => 'decrypted'),
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
- hash: vi.fn(async (_data: string) => 'hash'),
-};
-
-const mockApiClient = {
- setToken: vi.fn(),
- push: vi.fn(),
- pull: vi.fn(),
-};
-
-const mockTokenManager = {
- ensureValidToken: vi.fn(async () => true),
- handleUnauthorized: vi.fn(async () => true),
-};
-
-const mockRetryManager = {
- canSyncNow: vi.fn(async () => true),
- getRetryCount: vi.fn(async () => 0),
- recordSuccess: vi.fn(async () => {}),
-};
-
-const mockQueueOptimizer = {
- consolidateAll: vi.fn(async () => 0),
-};
-
-let mockPushResult = {
- accepted: [],
- rejected: [],
- conflicts: [],
- serverVectorClock: {},
-};
-
-let mockPullResult = {
- tasks: [],
- deletedTaskIds: [],
- serverVectorClock: {},
- conflicts: [],
-};
-
-describe('SyncEngine', () => {
- let engine: SyncEngine;
- let db: ReturnType;
- let consoleMock: ReturnType;
-
- beforeEach(async () => {
- engine = new SyncEngine();
- db = getDb();
- consoleMock = mockConsole();
-
- // Clear database
- await db.delete();
- await db.open();
-
- // Reset all mocks
- vi.clearAllMocks();
-
- // Reset mock results
- mockPushResult = {
- accepted: [],
- rejected: [],
- conflicts: [],
- serverVectorClock: {},
- };
-
- mockPullResult = {
- tasks: [],
- deletedTaskIds: [],
- serverVectorClock: {},
- conflicts: [],
- };
-
- // Setup default mock behaviors
- mockCrypto.isInitialized.mockReturnValue(true);
- mockTokenManager.ensureValidToken.mockResolvedValue(true);
- mockRetryManager.canSyncNow.mockResolvedValue(true);
- mockRetryManager.getRetryCount.mockResolvedValue(0);
- mockQueueOptimizer.consolidateAll.mockResolvedValue(0);
-
- vi.mocked(pushLocalChanges).mockResolvedValue(mockPushResult);
- vi.mocked(pullRemoteChanges).mockResolvedValue(mockPullResult);
- vi.mocked(autoResolveConflicts).mockImplementation(async (conflicts) => conflicts.length);
- vi.mocked(recordSyncSuccess).mockResolvedValue();
- });
-
- afterEach(async () => {
- consoleMock.restore();
- await db.delete();
- });
-
- describe('sync orchestration', () => {
- it('should execute full sync flow: push then pull', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- const result = await engine.sync('user');
-
- expect(result.status).toBe('success');
- expect(pushLocalChanges).toHaveBeenCalled();
- expect(pullRemoteChanges).toHaveBeenCalled();
-
- // Verify push was called before pull
- const pushCall = vi.mocked(pushLocalChanges).mock.invocationCallOrder[0];
- const pullCall = vi.mocked(pullRemoteChanges).mock.invocationCallOrder[0];
- expect(pushCall).toBeLessThan(pullCall);
- });
-
- it('should update metadata after successful sync', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- await engine.sync('user');
-
- const config = await db.syncMetadata.get('sync_config');
- expect(config?.lastSyncAt).toBeGreaterThan(0);
- });
-
- it('should record sync success to history', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- await engine.sync('user');
-
- expect(recordSyncSuccess).toHaveBeenCalled();
- });
-
- it('should return success status with counts', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- mockPushResult.accepted = ['task-1', 'task-2'];
- mockPullResult.tasks = [
- { id: 'task-3', encryptedBlob: 'enc', nonce: 'nonce', vectorClock: {}, updatedAt: Date.now() },
- ];
-
- const result = await engine.sync('user');
-
- expect(result.status).toBe('success');
- expect(result.pushedCount).toBe(2);
- expect(result.pulledCount).toBe(1);
- });
- });
-
- describe('state transitions', () => {
- it('should prevent concurrent sync operations', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- // Make sync take some time
- vi.mocked(pushLocalChanges).mockImplementation(async () => {
- await new Promise(resolve => setTimeout(resolve, 100));
- return mockPushResult;
- });
-
- const sync1 = engine.sync('user');
- const sync2 = engine.sync('user');
-
- const [result1, result2] = await Promise.all([sync1, sync2]);
-
- expect(result1.status).toBe('success');
- expect(result2.status).toBe('already_running');
-
- // Verify push was only called once
- expect(pushLocalChanges).toHaveBeenCalledTimes(1);
- });
-
- it('should allow sync after previous sync completes', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- await engine.sync('user');
- await engine.sync('user');
-
- expect(pushLocalChanges).toHaveBeenCalledTimes(2);
- });
-
- it('should check sync enabled before starting', async () => {
- await db.syncMetadata.add(createMockSyncConfig({ enabled: false }));
-
- const result = await engine.sync('user');
-
- expect(result.status).toBe('error');
- expect(result.error).toContain('not configured');
- expect(pushLocalChanges).not.toHaveBeenCalled();
- });
-
- it('should check encryption initialized before sync', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
- mockCrypto.isInitialized.mockReturnValue(false);
-
- const result = await engine.sync('user');
-
- expect(result.status).toBe('error');
- expect(result.error).toContain('Encryption not initialized');
- expect(pushLocalChanges).not.toHaveBeenCalled();
- });
- });
-
- describe('pull-then-push sync flow', () => {
- it('should execute push before pull', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- await engine.sync('user');
-
- const pushOrder = vi.mocked(pushLocalChanges).mock.invocationCallOrder[0];
- const pullOrder = vi.mocked(pullRemoteChanges).mock.invocationCallOrder[0];
-
- expect(pushOrder).toBeLessThan(pullOrder);
- });
-
- it('should pass updated config to pull after push', async () => {
- const config = createMockSyncConfig({ vectorClock: { device1: 1 } });
- await db.syncMetadata.add(config);
-
- // Mock push updating vector clock
- mockPushResult.serverVectorClock = { device1: 2 };
-
- await engine.sync('user');
-
- // Pull should be called with config (it will reload from DB)
- expect(pullRemoteChanges).toHaveBeenCalled();
- });
-
- it('should handle conflicts from pull phase', async () => {
- await db.syncMetadata.add(createMockSyncConfig({ conflictStrategy: 'last_write_wins' }));
-
- mockPullResult.conflicts = [
- {
- taskId: 'task-1',
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- localVersion: { id: 'task-1', title: 'Local', updatedAt: Date.now() - 1000 } as any,
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- remoteVersion: { id: 'task-1', title: 'Remote', updatedAt: Date.now() } as any,
- },
- ];
-
- const result = await engine.sync('user');
-
- expect(autoResolveConflicts).toHaveBeenCalledWith(mockPullResult.conflicts);
- expect(result.conflictsResolved).toBe(1);
- });
-
- it('should not auto-resolve conflicts with manual strategy', async () => {
- await db.syncMetadata.add(createMockSyncConfig({ conflictStrategy: 'manual' }));
-
- mockPullResult.conflicts = [
- {
- taskId: 'task-1',
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- localVersion: { id: 'task-1', title: 'Local', updatedAt: Date.now() - 1000 } as any,
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- remoteVersion: { id: 'task-1', title: 'Remote', updatedAt: Date.now() } as any,
- },
- ];
-
- const result = await engine.sync('user');
-
- expect(autoResolveConflicts).not.toHaveBeenCalled();
- expect(result.status).toBe('conflict');
- expect(result.conflicts).toHaveLength(1);
- });
- });
-
- describe('sync cancellation', () => {
- it('should return already_running when sync is in progress', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- // Make first sync take time
- vi.mocked(pushLocalChanges).mockImplementation(async () => {
- await new Promise(resolve => setTimeout(resolve, 100));
- return mockPushResult;
- });
-
- const sync1 = engine.sync('user');
- const sync2 = engine.sync('user');
-
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
- const [_result1, result2] = await Promise.all([sync1, sync2]);
-
- expect(result2.status).toBe('already_running');
- });
- });
-
- describe('concurrent sync prevention', () => {
- it('should block concurrent auto sync requests', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- vi.mocked(pushLocalChanges).mockImplementation(async () => {
- await new Promise(resolve => setTimeout(resolve, 50));
- return mockPushResult;
- });
-
- const results = await Promise.all([
- engine.sync('auto'),
- engine.sync('auto'),
- engine.sync('auto'),
- ]);
-
- const successCount = results.filter(r => r.status === 'success').length;
- const runningCount = results.filter(r => r.status === 'already_running').length;
-
- expect(successCount).toBe(1);
- expect(runningCount).toBe(2);
- });
-
- it('should block concurrent user sync requests', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- vi.mocked(pushLocalChanges).mockImplementation(async () => {
- await new Promise(resolve => setTimeout(resolve, 50));
- return mockPushResult;
- });
-
- const results = await Promise.all([
- engine.sync('user'),
- engine.sync('user'),
- ]);
-
- expect(results[0].status).toBe('success');
- expect(results[1].status).toBe('already_running');
- });
- });
-
- describe('priority handling', () => {
- it('should bypass backoff for user-triggered sync', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
- mockRetryManager.canSyncNow.mockResolvedValue(false);
-
- const result = await engine.sync('user');
-
- expect(result.status).toBe('success');
- expect(pushLocalChanges).toHaveBeenCalled();
- });
-
- it('should respect backoff for auto sync', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
- mockRetryManager.canSyncNow.mockResolvedValue(false);
-
- const result = await engine.sync('auto');
-
- expect(result.status).toBe('error');
- expect(result.error).toContain('backoff');
- expect(pushLocalChanges).not.toHaveBeenCalled();
- });
- });
-
- describe('token validation', () => {
- it('should ensure valid token before sync', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- await engine.sync('user');
-
- expect(mockTokenManager.ensureValidToken).toHaveBeenCalled();
- });
-
- it('should fail sync if token validation fails', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
- mockTokenManager.ensureValidToken.mockResolvedValue(false);
-
- const result = await engine.sync('user');
-
- expect(result.status).toBe('error');
- expect(result.error).toContain('authentication token');
- expect(pushLocalChanges).not.toHaveBeenCalled();
- });
-
- it('should handle 401 errors with token refresh', async () => {
- const config = createMockSyncConfig({ token: 'old-token' });
- await db.syncMetadata.add(config);
-
- // Mock 401 error on first push
- vi.mocked(pushLocalChanges)
- .mockRejectedValueOnce(new Error('401 Unauthorized'))
- .mockResolvedValueOnce(mockPushResult);
-
- mockTokenManager.handleUnauthorized.mockResolvedValue(true);
-
- // Update token in DB to simulate refresh
- await db.syncMetadata.put({
- ...config,
- token: 'new-token',
- key: 'sync_config',
- });
-
- const result = await engine.sync('user');
-
- expect(mockTokenManager.handleUnauthorized).toHaveBeenCalled();
- expect(pushLocalChanges).toHaveBeenCalledTimes(2);
- expect(result.status).toBe('success');
- });
-
- it('should fail if token refresh fails', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- vi.mocked(pushLocalChanges).mockRejectedValue(new Error('401 Unauthorized'));
- mockTokenManager.handleUnauthorized.mockResolvedValue(false);
-
- const result = await engine.sync('user');
-
- expect(result.status).toBe('error');
- expect(result.error).toContain('Authentication expired');
- });
- });
-
- describe('queue optimization', () => {
- it('should optimize queue before sync', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- await engine.sync('user');
-
- expect(mockQueueOptimizer.consolidateAll).toHaveBeenCalled();
- });
-
- it('should log removed operations count', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
- mockQueueOptimizer.consolidateAll.mockResolvedValue(5);
-
- await engine.sync('user');
-
- expect(mockQueueOptimizer.consolidateAll).toHaveBeenCalled();
- });
- });
-
- describe('error handling', () => {
- it('should delegate error handling to error handler', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- const testError = new Error('Test error');
- vi.mocked(pushLocalChanges).mockRejectedValue(testError);
-
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
- const _result = await engine.sync('user');
-
- expect(handleSyncError).toHaveBeenCalledWith(
- testError,
- null,
- null,
- mockRetryManager,
- mockTokenManager,
- expect.any(String),
- 'user',
- expect.any(Number)
- );
- });
-
- it('should handle errors during pull phase', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- const testError = new Error('Pull failed');
- vi.mocked(pullRemoteChanges).mockRejectedValue(testError);
-
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
- const _result = await engine.sync('user');
-
- expect(handleSyncError).toHaveBeenCalled();
- });
-
- it('should not fail sync if history recording fails', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
- vi.mocked(recordSyncSuccess).mockRejectedValue(new Error('History error'));
-
- const result = await engine.sync('user');
-
- expect(result.status).toBe('success');
- });
- });
-
- describe('helper methods', () => {
- it('should check if sync is enabled', async () => {
- await db.syncMetadata.add(createMockSyncConfig({ enabled: true }));
-
- const enabled = await engine.isEnabled();
-
- expect(enabled).toBe(true);
- });
-
- it('should return false when sync not configured', async () => {
- const enabled = await engine.isEnabled();
-
- expect(enabled).toBe(false);
- });
-
- it('should get sync status', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- const status = await engine.getStatus();
-
- expect(status).toHaveProperty('enabled');
- expect(status).toHaveProperty('lastSyncAt');
- expect(status).toHaveProperty('pendingCount');
- expect(status).toHaveProperty('isRunning');
- });
- });
-});
diff --git a/tests/sync/error-handler.test.ts b/tests/sync/error-handler.test.ts
deleted file mode 100644
index e84434af..00000000
--- a/tests/sync/error-handler.test.ts
+++ /dev/null
@@ -1,621 +0,0 @@
-/**
- * Tests for sync engine error handler - error recovery strategies
- */
-
-import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
-import { handleSyncError } from '@/lib/sync/engine/error-handler';
-import { RetryManager } from '@/lib/sync/retry-manager';
-import { TokenManager } from '@/lib/sync/token-manager';
-import * as syncHistory from '@/lib/sync-history';
-
-// Mock dependencies
-vi.mock('@/lib/sync-history');
-vi.mock('@/lib/logger', () => ({
- createLogger: () => ({
- error: vi.fn(),
- warn: vi.fn(),
- info: vi.fn(),
- debug: vi.fn(),
- }),
-}));
-
-// Mock getSyncQueue
-vi.mock('@/lib/sync/queue', () => {
- const mockQueue = {
- getPendingCount: vi.fn().mockResolvedValue(5),
- };
- return {
- getSyncQueue: vi.fn(() => mockQueue),
- SyncQueue: vi.fn(),
- };
-});
-
-describe('error-handler', () => {
- let mockRetryManager: RetryManager;
- let mockTokenManager: TokenManager;
- const deviceId = 'test-device-123';
- const triggeredBy = 'user' as const;
- const syncStartTime = Date.now() - 5000; // 5 seconds ago
-
- beforeEach(() => {
- // Create mock retry manager
- mockRetryManager = {
- recordFailure: vi.fn(),
- recordSuccess: vi.fn(),
- getRetryCount: vi.fn(),
- shouldRetry: vi.fn(),
- getNextRetryDelay: vi.fn(),
- canSyncNow: vi.fn(),
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- } as any;
-
- // Create mock token manager
- mockTokenManager = {
- handleUnauthorized: vi.fn(),
- ensureValidToken: vi.fn(),
- needsRefresh: vi.fn(),
- getTimeUntilExpiry: vi.fn(),
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- } as any;
-
- // Mock recordSyncError
- vi.mocked(syncHistory.recordSyncError).mockResolvedValue(undefined);
- });
-
- afterEach(() => {
- vi.clearAllMocks();
- });
-
- describe('transient error handling', () => {
- it('should handle transient errors with retry', async () => {
- const error = new Error('Network error');
- const pushResult = { accepted: [] };
- const pullResult = { tasks: [] };
-
- vi.mocked(mockRetryManager.getRetryCount).mockResolvedValue(1);
- vi.mocked(mockRetryManager.shouldRetry).mockResolvedValue(true);
- vi.mocked(mockRetryManager.getNextRetryDelay).mockReturnValue(5000);
-
- const result = await handleSyncError(
- error,
- pushResult,
- pullResult,
- mockRetryManager,
- mockTokenManager,
- deviceId,
- triggeredBy,
- syncStartTime
- );
-
- expect(mockRetryManager.recordFailure).toHaveBeenCalledWith(error);
- expect(result.status).toBe('error');
- expect(result.error).toContain('Network error');
- expect(result.error).toContain('retry automatically');
- });
-
- it('should handle transient errors when max retries exceeded', async () => {
- const error = new Error('500 Internal Server Error');
- const pushResult = { accepted: [] };
- const pullResult = { tasks: [] };
-
- vi.mocked(mockRetryManager.getRetryCount).mockResolvedValue(5);
- vi.mocked(mockRetryManager.shouldRetry).mockResolvedValue(false);
-
- const result = await handleSyncError(
- error,
- pushResult,
- pullResult,
- mockRetryManager,
- mockTokenManager,
- deviceId,
- triggeredBy,
- syncStartTime
- );
-
- expect(mockRetryManager.recordFailure).toHaveBeenCalledWith(error);
- expect(result.status).toBe('error');
- expect(result.error).toContain('failed after multiple retries');
- });
-
- it('should record sync error to history for transient errors', async () => {
- const error = new Error('Timeout');
- const pushResult = { accepted: [] };
- const pullResult = { tasks: [] };
-
- vi.mocked(mockRetryManager.getRetryCount).mockResolvedValue(1);
- vi.mocked(mockRetryManager.shouldRetry).mockResolvedValue(true);
- vi.mocked(mockRetryManager.getNextRetryDelay).mockReturnValue(5000);
-
- await handleSyncError(
- error,
- pushResult,
- pullResult,
- mockRetryManager,
- mockTokenManager,
- deviceId,
- triggeredBy,
- syncStartTime
- );
-
- expect(syncHistory.recordSyncError).toHaveBeenCalledWith(
- 'Timeout',
- deviceId,
- triggeredBy,
- expect.any(Number)
- );
- });
- });
-
- describe('authentication error handling', () => {
- it('should handle auth errors with successful token refresh', async () => {
- const error = new Error('401 Unauthorized');
- const pushResult = { accepted: [] };
- const pullResult = { tasks: [] };
-
- vi.mocked(mockTokenManager.handleUnauthorized).mockResolvedValue(true);
-
- const result = await handleSyncError(
- error,
- pushResult,
- pullResult,
- mockRetryManager,
- mockTokenManager,
- deviceId,
- triggeredBy,
- syncStartTime
- );
-
- expect(mockTokenManager.handleUnauthorized).toHaveBeenCalled();
- expect(result.status).toBe('error');
- expect(result.error).toContain('Authentication refreshed');
- expect(result.error).toContain('try syncing again');
- });
-
- it('should handle auth errors with failed token refresh', async () => {
- const error = new Error('403 Forbidden');
- const pushResult = { accepted: [] };
- const pullResult = { tasks: [] };
-
- vi.mocked(mockTokenManager.handleUnauthorized).mockResolvedValue(false);
-
- const result = await handleSyncError(
- error,
- pushResult,
- pullResult,
- mockRetryManager,
- mockTokenManager,
- deviceId,
- triggeredBy,
- syncStartTime
- );
-
- expect(mockTokenManager.handleUnauthorized).toHaveBeenCalled();
- expect(result.status).toBe('error');
- expect(result.error).toContain('Authentication expired');
- expect(result.error).toContain('sign in again');
- });
-
- it('should not record failure for auth errors', async () => {
- const error = new Error('Token expired');
- const pushResult = { accepted: [] };
- const pullResult = { tasks: [] };
-
- vi.mocked(mockTokenManager.handleUnauthorized).mockResolvedValue(true);
-
- await handleSyncError(
- error,
- pushResult,
- pullResult,
- mockRetryManager,
- mockTokenManager,
- deviceId,
- triggeredBy,
- syncStartTime
- );
-
- expect(mockRetryManager.recordFailure).not.toHaveBeenCalled();
- });
- });
-
- describe('permanent error handling', () => {
- it('should handle permanent errors without retry', async () => {
- const error = new Error('400 Bad Request');
- const pushResult = { accepted: [] };
- const pullResult = { tasks: [] };
-
- const result = await handleSyncError(
- error,
- pushResult,
- pullResult,
- mockRetryManager,
- mockTokenManager,
- deviceId,
- triggeredBy,
- syncStartTime
- );
-
- expect(mockRetryManager.recordFailure).not.toHaveBeenCalled();
- expect(result.status).toBe('error');
- expect(result.error).toContain('Sync error');
- expect(result.error).toContain('400 Bad Request');
- });
-
- it('should handle validation errors as permanent', async () => {
- const error = new Error('Validation failed');
- const pushResult = { accepted: [] };
- const pullResult = { tasks: [] };
-
- const result = await handleSyncError(
- error,
- pushResult,
- pullResult,
- mockRetryManager,
- mockTokenManager,
- deviceId,
- triggeredBy,
- syncStartTime
- );
-
- expect(mockRetryManager.recordFailure).not.toHaveBeenCalled();
- expect(result.status).toBe('error');
- expect(result.error).toContain('Validation failed');
- });
-
- it('should handle encryption errors as permanent', async () => {
- const error = new Error('Decryption failed');
- const pushResult = { accepted: [] };
- const pullResult = { tasks: [] };
-
- const result = await handleSyncError(
- error,
- pushResult,
- pullResult,
- mockRetryManager,
- mockTokenManager,
- deviceId,
- triggeredBy,
- syncStartTime
- );
-
- expect(mockRetryManager.recordFailure).not.toHaveBeenCalled();
- expect(result.status).toBe('error');
- expect(result.error).toContain('Decryption failed');
- });
- });
-
- describe('exponential backoff', () => {
- it('should use exponential backoff for consecutive failures', async () => {
- const error = new Error('Network error');
- const pushResult = { accepted: [] };
- const pullResult = { tasks: [] };
-
- // First failure - 5s delay
- vi.mocked(mockRetryManager.getRetryCount).mockResolvedValue(1);
- vi.mocked(mockRetryManager.shouldRetry).mockResolvedValue(true);
- vi.mocked(mockRetryManager.getNextRetryDelay).mockReturnValue(5000);
-
- let result = await handleSyncError(
- error,
- pushResult,
- pullResult,
- mockRetryManager,
- mockTokenManager,
- deviceId,
- triggeredBy,
- syncStartTime
- );
-
- expect(result.error).toContain('5s');
-
- // Second failure - 10s delay
- vi.mocked(mockRetryManager.getRetryCount).mockResolvedValue(2);
- vi.mocked(mockRetryManager.getNextRetryDelay).mockReturnValue(10000);
-
- result = await handleSyncError(
- error,
- pushResult,
- pullResult,
- mockRetryManager,
- mockTokenManager,
- deviceId,
- triggeredBy,
- syncStartTime
- );
-
- expect(result.error).toContain('10s');
-
- // Third failure - 30s delay
- vi.mocked(mockRetryManager.getRetryCount).mockResolvedValue(3);
- vi.mocked(mockRetryManager.getNextRetryDelay).mockReturnValue(30000);
-
- result = await handleSyncError(
- error,
- pushResult,
- pullResult,
- mockRetryManager,
- mockTokenManager,
- deviceId,
- triggeredBy,
- syncStartTime
- );
-
- expect(result.error).toContain('30s');
- });
- });
-
- describe('error categorization integration', () => {
- it('should categorize network errors as transient', async () => {
- const errors = [
- new Error('Network error'),
- new Error('Fetch failed'),
- new Error('Connection refused'),
- new Error('ETIMEDOUT'),
- ];
-
- for (const error of errors) {
- vi.mocked(mockRetryManager.getRetryCount).mockResolvedValue(1);
- vi.mocked(mockRetryManager.shouldRetry).mockResolvedValue(true);
- vi.mocked(mockRetryManager.getNextRetryDelay).mockReturnValue(5000);
-
- const result = await handleSyncError(
- error,
- { accepted: [] },
- { tasks: [] },
- mockRetryManager,
- mockTokenManager,
- deviceId,
- triggeredBy,
- syncStartTime
- );
-
- expect(mockRetryManager.recordFailure).toHaveBeenCalledWith(error);
- expect(result.error).toContain('retry');
- }
- });
-
- it('should categorize server errors as transient', async () => {
- const errors = [
- new Error('500 Internal Server Error'),
- new Error('502 Bad Gateway'),
- new Error('503 Service Unavailable'),
- new Error('504 Gateway Timeout'),
- ];
-
- for (const error of errors) {
- vi.clearAllMocks();
- vi.mocked(mockRetryManager.getRetryCount).mockResolvedValue(1);
- vi.mocked(mockRetryManager.shouldRetry).mockResolvedValue(true);
- vi.mocked(mockRetryManager.getNextRetryDelay).mockReturnValue(5000);
-
- const result = await handleSyncError(
- error,
- { accepted: [] },
- { tasks: [] },
- mockRetryManager,
- mockTokenManager,
- deviceId,
- triggeredBy,
- syncStartTime
- );
-
- expect(mockRetryManager.recordFailure).toHaveBeenCalledWith(error);
- expect(result.error).toContain('retry');
- }
- });
-
- it('should categorize auth errors correctly', async () => {
- const errors = [
- new Error('401 Unauthorized'),
- new Error('403 Forbidden'),
- new Error('Token expired'),
- ];
-
- for (const error of errors) {
- vi.clearAllMocks();
- vi.mocked(mockTokenManager.handleUnauthorized).mockResolvedValue(true);
-
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
- const _result = await handleSyncError(
- error,
- { accepted: [] },
- { tasks: [] },
- mockRetryManager,
- mockTokenManager,
- deviceId,
- triggeredBy,
- syncStartTime
- );
-
- expect(mockTokenManager.handleUnauthorized).toHaveBeenCalled();
- expect(mockRetryManager.recordFailure).not.toHaveBeenCalled();
- }
- });
-
- it('should categorize client errors as permanent', async () => {
- const errors = [
- new Error('400 Bad Request'),
- new Error('404 Not Found'),
- new Error('409 Conflict'),
- new Error('422 Unprocessable Entity'),
- ];
-
- for (const error of errors) {
- vi.clearAllMocks();
-
- const result = await handleSyncError(
- error,
- { accepted: [] },
- { tasks: [] },
- mockRetryManager,
- mockTokenManager,
- deviceId,
- triggeredBy,
- syncStartTime
- );
-
- expect(mockRetryManager.recordFailure).not.toHaveBeenCalled();
- expect(mockTokenManager.handleUnauthorized).not.toHaveBeenCalled();
- expect(result.error).toContain(error.message);
- }
- });
- });
-
- describe('fatal error handling', () => {
- it('should handle non-Error objects', async () => {
- const error = 'String error';
- const pushResult = { accepted: [] };
- const pullResult = { tasks: [] };
-
- vi.mocked(mockRetryManager.getRetryCount).mockResolvedValue(1);
- vi.mocked(mockRetryManager.shouldRetry).mockResolvedValue(true);
- vi.mocked(mockRetryManager.getNextRetryDelay).mockReturnValue(5000);
-
- const result = await handleSyncError(
- error,
- pushResult,
- pullResult,
- mockRetryManager,
- mockTokenManager,
- deviceId,
- triggeredBy,
- syncStartTime
- );
-
- expect(result.status).toBe('error');
- expect(mockRetryManager.recordFailure).toHaveBeenCalled();
- });
-
- it('should handle uncategorized errors as transient', async () => {
- const error = new Error('Unknown error type');
- const pushResult = { accepted: [] };
- const pullResult = { tasks: [] };
-
- vi.mocked(mockRetryManager.getRetryCount).mockResolvedValue(1);
- vi.mocked(mockRetryManager.shouldRetry).mockResolvedValue(true);
- vi.mocked(mockRetryManager.getNextRetryDelay).mockReturnValue(5000);
-
- const result = await handleSyncError(
- error,
- pushResult,
- pullResult,
- mockRetryManager,
- mockTokenManager,
- deviceId,
- triggeredBy,
- syncStartTime
- );
-
- expect(mockRetryManager.recordFailure).toHaveBeenCalledWith(error);
- expect(result.status).toBe('error');
- });
-
- it('should continue if history recording fails', async () => {
- const error = new Error('Network error');
- const pushResult = { accepted: [] };
- const pullResult = { tasks: [] };
-
- vi.mocked(mockRetryManager.getRetryCount).mockResolvedValue(1);
- vi.mocked(mockRetryManager.shouldRetry).mockResolvedValue(true);
- vi.mocked(mockRetryManager.getNextRetryDelay).mockReturnValue(5000);
- vi.mocked(syncHistory.recordSyncError).mockRejectedValue(new Error('History write failed'));
-
- const result = await handleSyncError(
- error,
- pushResult,
- pullResult,
- mockRetryManager,
- mockTokenManager,
- deviceId,
- triggeredBy,
- syncStartTime
- );
-
- // Should still return a result even if history recording fails
- expect(result.status).toBe('error');
- expect(result.error).toContain('retry');
- });
- });
-
- describe('context logging', () => {
- it('should include push/pull counts in error context', async () => {
- const error = new Error('Network error');
- const pushResult = { accepted: [{ id: '1' }, { id: '2' }] };
- const pullResult = { tasks: [{ id: '3' }] };
-
- vi.mocked(mockRetryManager.getRetryCount).mockResolvedValue(1);
- vi.mocked(mockRetryManager.shouldRetry).mockResolvedValue(true);
- vi.mocked(mockRetryManager.getNextRetryDelay).mockReturnValue(5000);
-
- await handleSyncError(
- error,
- pushResult,
- pullResult,
- mockRetryManager,
- mockTokenManager,
- deviceId,
- triggeredBy,
- syncStartTime
- );
-
- // Verify that the function completed successfully with context
- expect(mockRetryManager.recordFailure).toHaveBeenCalled();
- });
-
- it('should handle missing push/pull results', async () => {
- const error = new Error('Network error');
- const pushResult = null;
- const pullResult = null;
-
- vi.mocked(mockRetryManager.getRetryCount).mockResolvedValue(1);
- vi.mocked(mockRetryManager.shouldRetry).mockResolvedValue(true);
- vi.mocked(mockRetryManager.getNextRetryDelay).mockReturnValue(5000);
-
- const result = await handleSyncError(
- error,
- pushResult,
- pullResult,
- mockRetryManager,
- mockTokenManager,
- deviceId,
- triggeredBy,
- syncStartTime
- );
-
- expect(result.status).toBe('error');
- });
- });
-
- describe('sync duration tracking', () => {
- it('should calculate and record sync duration', async () => {
- const error = new Error('Network error');
- const pushResult = { accepted: [] };
- const pullResult = { tasks: [] };
- const startTime = Date.now() - 3000; // 3 seconds ago
-
- vi.mocked(mockRetryManager.getRetryCount).mockResolvedValue(1);
- vi.mocked(mockRetryManager.shouldRetry).mockResolvedValue(true);
- vi.mocked(mockRetryManager.getNextRetryDelay).mockReturnValue(5000);
-
- await handleSyncError(
- error,
- pushResult,
- pullResult,
- mockRetryManager,
- mockTokenManager,
- deviceId,
- triggeredBy,
- startTime
- );
-
- expect(syncHistory.recordSyncError).toHaveBeenCalledWith(
- error.message,
- deviceId,
- triggeredBy,
- expect.any(Number)
- );
-
- const duration = vi.mocked(syncHistory.recordSyncError).mock.calls[0][3];
- expect(duration).toBeGreaterThan(0);
- });
- });
-});
diff --git a/tests/sync/health-monitor.test.ts b/tests/sync/health-monitor.test.ts
deleted file mode 100644
index d6319a62..00000000
--- a/tests/sync/health-monitor.test.ts
+++ /dev/null
@@ -1,435 +0,0 @@
-/**
- * Tests for HealthMonitor - periodic health checks and issue detection
- */
-
-import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
-import { getDb } from '@/lib/db';
-import { HealthMonitor, getHealthMonitor } from '@/lib/sync/health-monitor';
-import {
- createMockSyncConfig,
- createMockSyncQueueItem,
- mockDateNow,
- mockConsole,
-} from '../fixtures';
-
-// Mock dependencies
-vi.mock('@/lib/sync/queue', () => ({
- getSyncQueue: vi.fn(() => mockQueue),
-}));
-
-vi.mock('@/lib/sync/token-manager', () => ({
- getTokenManager: vi.fn(() => mockTokenManager),
-}));
-
-vi.mock('@/lib/sync/api-client', () => ({
- getApiClient: vi.fn(() => mockApiClient),
-}));
-
-// Create mock instances
-const mockQueue = {
- getPending: vi.fn(async () => []),
-};
-
-const mockTokenManager = {
- getTimeUntilExpiry: vi.fn(async () => 60 * 60 * 1000), // 1 hour
- needsRefresh: vi.fn(async () => false),
- ensureValidToken: vi.fn(async () => true),
-};
-
-const mockApiClient = {
- setToken: vi.fn(),
- getStatus: vi.fn(async () => ({ status: 'ok' })),
-};
-
-describe('HealthMonitor', () => {
- let monitor: HealthMonitor;
- let db: ReturnType;
- let consoleMock: ReturnType;
- let dateMock: ReturnType | null = null;
-
- beforeEach(async () => {
- // Create a new monitor instance for each test
- monitor = new HealthMonitor();
- db = getDb();
- consoleMock = mockConsole();
-
- // Clear database
- await db.delete();
- await db.open();
-
- // Reset all mocks
- vi.clearAllMocks();
- mockQueue.getPending.mockResolvedValue([]);
- mockTokenManager.getTimeUntilExpiry.mockResolvedValue(60 * 60 * 1000);
- mockTokenManager.needsRefresh.mockResolvedValue(false);
- mockTokenManager.ensureValidToken.mockResolvedValue(true);
- mockApiClient.getStatus.mockResolvedValue({ status: 'ok' });
- });
-
- afterEach(async () => {
- // Stop monitor if running
- monitor.stop();
-
- // Restore console
- consoleMock.restore();
-
- // Restore date mock if used
- if (dateMock) {
- dateMock.restore();
- dateMock = null;
- }
-
- await db.delete();
- });
-
- describe('start and stop', () => {
- it('should start health monitor and run initial check', async () => {
- // Setup sync config
- await db.syncMetadata.add(createMockSyncConfig());
-
- monitor.start();
-
- expect(monitor.isActive()).toBe(true);
-
- // Wait for initial check to complete
- await new Promise(resolve => setTimeout(resolve, 50));
-
- // Verify initial check was performed
- expect(mockQueue.getPending).toHaveBeenCalled();
- });
-
- it('should not start if already running', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- monitor.start();
- const firstActive = monitor.isActive();
-
- monitor.start(); // Try to start again
- const secondActive = monitor.isActive();
-
- expect(firstActive).toBe(true);
- expect(secondActive).toBe(true);
- });
-
- it('should stop health monitor and clear interval', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- monitor.start();
- expect(monitor.isActive()).toBe(true);
-
- monitor.stop();
- expect(monitor.isActive()).toBe(false);
- });
-
- it('should not error when stopping if not running', () => {
- expect(monitor.isActive()).toBe(false);
-
- expect(() => monitor.stop()).not.toThrow();
-
- expect(monitor.isActive()).toBe(false);
- });
- });
-
- describe('check - health status calculation', () => {
- it('should return healthy status when no issues found', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- const report = await monitor.check();
-
- expect(report.healthy).toBe(true);
- expect(report.issues).toHaveLength(0);
- expect(report.timestamp).toBeGreaterThan(0);
- });
-
- it('should return unhealthy status when issues found', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- // Mock expired token
- mockTokenManager.getTimeUntilExpiry.mockResolvedValue(-1000);
-
- const report = await monitor.check();
-
- expect(report.healthy).toBe(false);
- expect(report.issues.length).toBeGreaterThan(0);
- });
-
- it('should skip check when sync not enabled', async () => {
- await db.syncMetadata.add(createMockSyncConfig({ enabled: false }));
-
- const report = await monitor.check();
-
- expect(report.healthy).toBe(true);
- expect(report.issues).toHaveLength(0);
-
- // Verify checks were not performed
- expect(mockQueue.getPending).not.toHaveBeenCalled();
- });
-
- it('should skip check when sync config not found', async () => {
- // No sync config in database
-
- const report = await monitor.check();
-
- expect(report.healthy).toBe(true);
- expect(report.issues).toHaveLength(0);
- });
-
- it('should handle check errors gracefully', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- // Mock error in queue check
- mockQueue.getPending.mockRejectedValue(new Error('Database error'));
-
- const report = await monitor.check();
-
- expect(report.healthy).toBe(false);
- expect(report.issues).toHaveLength(1);
- expect(report.issues[0].type).toBe('server_unreachable');
- expect(report.issues[0].severity).toBe('error');
- });
- });
-
- describe('stale operations detection', () => {
- it('should detect stale queue operations older than 1 hour', async () => {
- const now = Date.now();
- dateMock = mockDateNow(now);
-
- await db.syncMetadata.add(createMockSyncConfig());
-
- // Create stale operation (2 hours old)
- const staleOp = createMockSyncQueueItem({
- timestamp: now - 2 * 60 * 60 * 1000,
- });
-
- mockQueue.getPending.mockResolvedValue([staleOp]);
-
- const report = await monitor.check();
-
- expect(report.healthy).toBe(false);
- expect(report.issues).toHaveLength(1);
- expect(report.issues[0].type).toBe('stale_queue');
- expect(report.issues[0].severity).toBe('warning');
- expect(report.issues[0].message).toContain('1 pending operations');
- });
-
- it('should not flag recent operations as stale', async () => {
- const now = Date.now();
- dateMock = mockDateNow(now);
-
- await db.syncMetadata.add(createMockSyncConfig());
-
- // Create recent operation (30 minutes old)
- const recentOp = createMockSyncQueueItem({
- timestamp: now - 30 * 60 * 1000,
- });
-
- mockQueue.getPending.mockResolvedValue([recentOp]);
-
- const report = await monitor.check();
-
- expect(report.healthy).toBe(true);
- expect(report.issues).toHaveLength(0);
- });
-
- it('should count multiple stale operations', async () => {
- const now = Date.now();
- dateMock = mockDateNow(now);
-
- await db.syncMetadata.add(createMockSyncConfig());
-
- // Create multiple stale operations
- const staleOps = [
- createMockSyncQueueItem({ id: 'op-1', timestamp: now - 2 * 60 * 60 * 1000 }),
- createMockSyncQueueItem({ id: 'op-2', timestamp: now - 3 * 60 * 60 * 1000 }),
- createMockSyncQueueItem({ id: 'op-3', timestamp: now - 1.5 * 60 * 60 * 1000 }),
- ];
-
- mockQueue.getPending.mockResolvedValue(staleOps);
-
- const report = await monitor.check();
-
- expect(report.healthy).toBe(false);
- expect(report.issues[0].message).toContain('3 pending operations');
- });
-
- it('should return null issue when no pending operations', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- mockQueue.getPending.mockResolvedValue([]);
-
- const report = await monitor.check();
-
- expect(report.healthy).toBe(true);
- expect(report.issues).toHaveLength(0);
- });
- });
-
- describe('token expiration detection', () => {
- it('should detect expired token', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- // Mock expired token (negative time until expiry)
- mockTokenManager.getTimeUntilExpiry.mockResolvedValue(-1000);
-
- const report = await monitor.check();
-
- expect(report.healthy).toBe(false);
- expect(report.issues).toHaveLength(1);
- expect(report.issues[0].type).toBe('token_expired');
- expect(report.issues[0].severity).toBe('error');
- expect(report.issues[0].message).toContain('expired');
- });
-
- it('should attempt automatic token refresh when needed', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- // Mock token needs refresh
- mockTokenManager.needsRefresh.mockResolvedValue(true);
- mockTokenManager.ensureValidToken.mockResolvedValue(true);
-
- const report = await monitor.check();
-
- expect(mockTokenManager.ensureValidToken).toHaveBeenCalled();
- expect(report.healthy).toBe(true);
- });
-
- it('should report warning when token refresh fails', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- // Mock token needs refresh but refresh fails
- mockTokenManager.needsRefresh.mockResolvedValue(true);
- mockTokenManager.ensureValidToken.mockResolvedValue(false);
-
- const report = await monitor.check();
-
- expect(report.healthy).toBe(false);
- expect(report.issues).toHaveLength(1);
- expect(report.issues[0].type).toBe('token_expired');
- expect(report.issues[0].severity).toBe('warning');
- expect(report.issues[0].message).toContain('expiring soon');
- });
-
- it('should not report issue when token is valid', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- // Mock valid token (1 hour until expiry)
- mockTokenManager.getTimeUntilExpiry.mockResolvedValue(60 * 60 * 1000);
- mockTokenManager.needsRefresh.mockResolvedValue(false);
-
- const report = await monitor.check();
-
- expect(report.healthy).toBe(true);
- expect(report.issues).toHaveLength(0);
- });
- });
-
- describe('server connectivity check', () => {
- it('should detect server unreachable', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- // Mock API error
- mockApiClient.getStatus.mockRejectedValue(new Error('Network error'));
-
- const report = await monitor.check();
-
- expect(report.healthy).toBe(false);
- expect(report.issues).toHaveLength(1);
- expect(report.issues[0].type).toBe('server_unreachable');
- expect(report.issues[0].severity).toBe('error');
- expect(report.issues[0].message).toContain('Network error');
- });
-
- it('should pass when server is reachable', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- mockApiClient.getStatus.mockResolvedValue({ status: 'ok' });
-
- const report = await monitor.check();
-
- expect(mockApiClient.setToken).toHaveBeenCalled();
- expect(mockApiClient.getStatus).toHaveBeenCalled();
- expect(report.healthy).toBe(true);
- });
-
- it('should set token before checking connectivity', async () => {
- const config = createMockSyncConfig({ token: 'test-token-123' });
- await db.syncMetadata.add(config);
-
- await monitor.check();
-
- expect(mockApiClient.setToken).toHaveBeenCalledWith('test-token-123');
- });
- });
-
- describe('multiple issues detection', () => {
- it('should detect multiple issues in single check', async () => {
- const now = Date.now();
- dateMock = mockDateNow(now);
-
- await db.syncMetadata.add(createMockSyncConfig());
-
- // Setup multiple issues
- const staleOp = createMockSyncQueueItem({
- timestamp: now - 2 * 60 * 60 * 1000,
- });
- mockQueue.getPending.mockResolvedValue([staleOp]);
- mockTokenManager.getTimeUntilExpiry.mockResolvedValue(-1000);
- mockApiClient.getStatus.mockRejectedValue(new Error('Network error'));
-
- const report = await monitor.check();
-
- expect(report.healthy).toBe(false);
- expect(report.issues.length).toBeGreaterThanOrEqual(2);
-
- const issueTypes = report.issues.map(i => i.type);
- expect(issueTypes).toContain('stale_queue');
- expect(issueTypes).toContain('token_expired');
- });
- });
-
- describe('periodic health checks', () => {
- it('should schedule periodic checks when started', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- monitor.start();
-
- // Verify monitor is active
- expect(monitor.isActive()).toBe(true);
-
- // Wait for initial check to complete
- await new Promise(resolve => setTimeout(resolve, 100));
-
- // Verify initial check was performed
- expect(mockQueue.getPending).toHaveBeenCalled();
- });
-
- it('should clear interval when stopped', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- monitor.start();
- expect(monitor.isActive()).toBe(true);
-
- const callsBeforeStop = mockQueue.getPending.mock.calls.length;
-
- monitor.stop();
- expect(monitor.isActive()).toBe(false);
-
- // Wait a bit to ensure no more calls are made
- await new Promise(resolve => setTimeout(resolve, 100));
-
- const callsAfterStop = mockQueue.getPending.mock.calls.length;
-
- // Should not have made additional calls after stop (or at most 1 if timing)
- expect(callsAfterStop - callsBeforeStop).toBeLessThanOrEqual(1);
- });
- });
-
- describe('getHealthMonitor singleton', () => {
- it('should return same instance on multiple calls', () => {
- const instance1 = getHealthMonitor();
- const instance2 = getHealthMonitor();
-
- expect(instance1).toBe(instance2);
- });
- });
-});
diff --git a/tests/sync/metadata-manager.test.ts b/tests/sync/metadata-manager.test.ts
deleted file mode 100644
index 5ad3e21f..00000000
--- a/tests/sync/metadata-manager.test.ts
+++ /dev/null
@@ -1,363 +0,0 @@
-/**
- * Tests for metadata manager - sync configuration and metadata updates
- */
-
-import { describe, it, expect, beforeEach, afterEach } from 'vitest';
-import { getDb } from '@/lib/db';
-import {
- updateSyncMetadata,
- getSyncConfig,
- updateConfig,
- isEnabled,
- getStatus,
- queueExistingTasks,
-} from '@/lib/sync/engine/metadata-manager';
-import { getSyncQueue } from '@/lib/sync/queue';
-import { createMockSyncConfig, createMockTask, mockConsole } from '../fixtures';
-
-describe('MetadataManager', () => {
- let db: ReturnType;
- let consoleMock: ReturnType;
-
- beforeEach(async () => {
- db = getDb();
- consoleMock = mockConsole();
-
- // Clear database
- await db.delete();
- await db.open();
- });
-
- afterEach(async () => {
- consoleMock.restore();
- await db.delete();
- });
-
- describe('updateSyncMetadata', () => {
- it('should update last sync timestamp', async () => {
- const config = createMockSyncConfig({ lastSyncAt: null });
- await db.syncMetadata.add(config);
-
- const syncStartTime = Date.now();
- const serverClock = { 'device-456': 2, 'device-789': 1 };
-
- await updateSyncMetadata(config, serverClock, syncStartTime);
-
- const updated = await db.syncMetadata.get('sync_config');
- expect(updated?.lastSyncAt).toBe(syncStartTime);
- });
-
- it('should merge vector clocks', async () => {
- const config = createMockSyncConfig({
- vectorClock: { 'device-456': 1, 'device-111': 3 },
- });
- await db.syncMetadata.add(config);
-
- const syncStartTime = Date.now();
- const serverClock = { 'device-456': 2, 'device-789': 1 };
-
- await updateSyncMetadata(config, serverClock, syncStartTime);
-
- const updated = await db.syncMetadata.get('sync_config');
- expect(updated?.vectorClock).toEqual({
- 'device-456': 2, // Server clock wins (higher)
- 'device-789': 1, // From server
- 'device-111': 3, // Preserved from local
- });
- });
-
- it('should use sync start time to prevent race conditions', async () => {
- const config = createMockSyncConfig({ lastSyncAt: 1000 });
- await db.syncMetadata.add(config);
-
- const syncStartTime = 5000;
- const serverClock = { 'device-456': 1 };
-
- await updateSyncMetadata(config, serverClock, syncStartTime);
-
- const updated = await db.syncMetadata.get('sync_config');
- expect(updated?.lastSyncAt).toBe(5000);
- });
-
- it('should preserve other config fields', async () => {
- const config = createMockSyncConfig({
- userId: 'user-123',
- email: 'test@example.com',
- deviceName: 'Test Device',
- enabled: true,
- });
- await db.syncMetadata.add(config);
-
- const syncStartTime = Date.now();
- const serverClock = { 'device-456': 1 };
-
- await updateSyncMetadata(config, serverClock, syncStartTime);
-
- const updated = await db.syncMetadata.get('sync_config');
- expect(updated?.userId).toBe('user-123');
- expect(updated?.email).toBe('test@example.com');
- expect(updated?.deviceName).toBe('Test Device');
- expect(updated?.enabled).toBe(true);
- });
- });
-
- describe('getSyncConfig', () => {
- it('should return sync config when it exists', async () => {
- const config = createMockSyncConfig();
- await db.syncMetadata.add(config);
-
- const result = await getSyncConfig();
-
- expect(result).toBeDefined();
- expect(result?.key).toBe('sync_config');
- expect(result?.deviceId).toBe(config.deviceId);
- });
-
- it('should return null when config does not exist', async () => {
- const result = await getSyncConfig();
-
- expect(result).toBeUndefined();
- });
- });
-
- describe('updateConfig', () => {
- it('should update specific config fields', async () => {
- const config = createMockSyncConfig({ enabled: false });
- await db.syncMetadata.add(config);
-
- await updateConfig({ enabled: true });
-
- const updated = await db.syncMetadata.get('sync_config');
- expect(updated?.enabled).toBe(true);
- });
-
- it('should update multiple fields at once', async () => {
- const config = createMockSyncConfig({
- enabled: false,
- deviceName: 'Old Name',
- });
- await db.syncMetadata.add(config);
-
- await updateConfig({
- enabled: true,
- deviceName: 'New Name',
- email: 'new@example.com',
- });
-
- const updated = await db.syncMetadata.get('sync_config');
- expect(updated?.enabled).toBe(true);
- expect(updated?.deviceName).toBe('New Name');
- expect(updated?.email).toBe('new@example.com');
- });
-
- it('should preserve unchanged fields', async () => {
- const config = createMockSyncConfig({
- userId: 'user-123',
- deviceId: 'device-456',
- token: 'token-abc',
- });
- await db.syncMetadata.add(config);
-
- await updateConfig({ enabled: true });
-
- const updated = await db.syncMetadata.get('sync_config');
- expect(updated?.userId).toBe('user-123');
- expect(updated?.deviceId).toBe('device-456');
- expect(updated?.token).toBe('token-abc');
- });
-
- it('should throw error when config not initialized', async () => {
- await expect(updateConfig({ enabled: true })).rejects.toThrow(
- 'Sync config not initialized'
- );
- });
- });
-
- describe('isEnabled', () => {
- it('should return true when sync is enabled', async () => {
- await db.syncMetadata.add(createMockSyncConfig({ enabled: true }));
-
- const result = await isEnabled();
-
- expect(result).toBe(true);
- });
-
- it('should return false when sync is disabled', async () => {
- await db.syncMetadata.add(createMockSyncConfig({ enabled: false }));
-
- const result = await isEnabled();
-
- expect(result).toBe(false);
- });
-
- it('should return false when config does not exist', async () => {
- const result = await isEnabled();
-
- expect(result).toBe(false);
- });
- });
-
- describe('getStatus', () => {
- it('should return status with all fields when config exists', async () => {
- const config = createMockSyncConfig({
- enabled: true,
- lastSyncAt: 12345,
- });
- await db.syncMetadata.add(config);
-
- const status = await getStatus(false);
-
- expect(status.enabled).toBe(true);
- expect(status.lastSyncAt).toBe(12345);
- expect(status.pendingCount).toBe(0);
- expect(status.isRunning).toBe(false);
- });
-
- it('should return isRunning true when sync is active', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- const status = await getStatus(true);
-
- expect(status.isRunning).toBe(true);
- });
-
- it('should include pending queue count', async () => {
- await db.syncMetadata.add(createMockSyncConfig());
-
- const queue = getSyncQueue();
- await queue.enqueue('create', 'task-1', createMockTask(), {});
- await queue.enqueue('update', 'task-2', createMockTask({ id: 'task-2' }), {});
-
- const status = await getStatus(false);
-
- expect(status.pendingCount).toBe(2);
- });
-
- it('should return default values when config does not exist', async () => {
- const status = await getStatus(false);
-
- expect(status.enabled).toBe(false);
- expect(status.lastSyncAt).toBeNull();
- expect(status.pendingCount).toBe(0);
- expect(status.isRunning).toBe(false);
- });
- });
-
- describe('queueExistingTasks', () => {
- it('should queue all existing tasks', async () => {
- await db.syncMetadata.add(createMockSyncConfig({ enabled: true }));
-
- // Add tasks to database
- await db.tasks.add(createMockTask({ id: 'task-1', title: 'Task 1' }));
- await db.tasks.add(createMockTask({ id: 'task-2', title: 'Task 2' }));
- await db.tasks.add(createMockTask({ id: 'task-3', title: 'Task 3' }));
-
- const queuedCount = await queueExistingTasks();
-
- expect(queuedCount).toBe(3);
-
- const queue = getSyncQueue();
- const pending = await queue.getPending();
- expect(pending).toHaveLength(3);
-
- const taskIds = pending.map(p => p.taskId).sort();
- expect(taskIds).toEqual(['task-1', 'task-2', 'task-3']);
- });
-
- it('should skip tasks already in queue', async () => {
- await db.syncMetadata.add(createMockSyncConfig({ enabled: true }));
-
- // Add tasks to database
- await db.tasks.add(createMockTask({ id: 'task-1' }));
- await db.tasks.add(createMockTask({ id: 'task-2' }));
-
- // Pre-queue one task
- const queue = getSyncQueue();
- await queue.enqueue('create', 'task-1', createMockTask({ id: 'task-1' }), {});
-
- const queuedCount = await queueExistingTasks();
-
- expect(queuedCount).toBe(1); // Only task-2 should be queued
-
- const pending = await queue.getPending();
- expect(pending).toHaveLength(2); // task-1 (pre-existing) + task-2 (newly queued)
- });
-
- it('should return 0 when sync is not enabled', async () => {
- await db.syncMetadata.add(createMockSyncConfig({ enabled: false }));
-
- await db.tasks.add(createMockTask({ id: 'task-1' }));
-
- const queuedCount = await queueExistingTasks();
-
- expect(queuedCount).toBe(0);
-
- const queue = getSyncQueue();
- const pending = await queue.getPending();
- expect(pending).toHaveLength(0);
- });
-
- it('should return 0 when config does not exist', async () => {
- await db.tasks.add(createMockTask({ id: 'task-1' }));
-
- const queuedCount = await queueExistingTasks();
-
- expect(queuedCount).toBe(0);
- });
-
- it('should handle empty task list', async () => {
- await db.syncMetadata.add(createMockSyncConfig({ enabled: true }));
-
- const queuedCount = await queueExistingTasks();
-
- expect(queuedCount).toBe(0);
- });
-
- it('should queue tasks with create operation', async () => {
- await db.syncMetadata.add(createMockSyncConfig({ enabled: true }));
-
- await db.tasks.add(createMockTask({ id: 'task-1' }));
-
- await queueExistingTasks();
-
- const queue = getSyncQueue();
- const pending = await queue.getPending();
-
- expect(pending[0].operation).toBe('create');
- expect(pending[0].taskId).toBe('task-1');
- });
-
- it('should preserve task vector clocks when queueing', async () => {
- await db.syncMetadata.add(createMockSyncConfig({ enabled: true }));
-
- const taskWithClock = createMockTask({
- id: 'task-1',
- vectorClock: { 'device-456': 5 },
- });
- await db.tasks.add(taskWithClock);
-
- await queueExistingTasks();
-
- const queue = getSyncQueue();
- const pending = await queue.getPending();
-
- expect(pending[0].vectorClock).toEqual({ 'device-456': 5 });
- });
-
- it('should handle tasks without vector clocks', async () => {
- await db.syncMetadata.add(createMockSyncConfig({ enabled: true }));
-
- const taskWithoutClock = createMockTask({ id: 'task-1' });
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- delete (taskWithoutClock as any).vectorClock;
- await db.tasks.add(taskWithoutClock);
-
- await queueExistingTasks();
-
- const queue = getSyncQueue();
- const pending = await queue.getPending();
-
- expect(pending[0].vectorClock).toEqual({});
- });
- });
-});
diff --git a/tests/sync/oauth-handshake.test.ts b/tests/sync/oauth-handshake.test.ts
deleted file mode 100644
index 35f4c252..00000000
--- a/tests/sync/oauth-handshake.test.ts
+++ /dev/null
@@ -1,694 +0,0 @@
-/**
- * Tests for OAuth handshake functionality
- * Tests authorization flow, token exchange, state validation, and error handling
- */
-
-import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
-import {
- subscribeToOAuthHandshake,
- announceOAuthState,
- retryOAuthHandshake,
- type OAuthAuthData,
-} from '@/lib/sync/oauth-handshake';
-import { createMockFetchResponse, createMockErrorResponse, mockConsole } from '../fixtures';
-
-// Mock environment config
-vi.mock('@/lib/env-config', () => ({
- ENV_CONFIG: {
- apiBaseUrl: 'http://localhost:8787',
- oauthCallbackUrl: 'http://localhost:3000/auth/callback',
- isDevelopment: true,
- isProduction: false,
- isStaging: false,
- environment: 'development',
- },
-}));
-
-// Mock sonner toast
-vi.mock('sonner', () => ({
- toast: {
- error: vi.fn(),
- success: vi.fn(),
- },
-}));
-
-describe('OAuth Handshake', () => {
- let consoleMock: ReturnType;
- let originalFetch: typeof global.fetch;
- let originalSessionStorage: Storage;
- let originalLocalStorage: Storage;
- let mockSessionStorage: Map;
- let mockLocalStorage: Map;
-
- beforeEach(() => {
- // Mock console to suppress logs
- consoleMock = mockConsole();
-
- // Save original fetch
- originalFetch = global.fetch;
-
- // Mock sessionStorage
- mockSessionStorage = new Map();
- originalSessionStorage = window.sessionStorage;
- Object.defineProperty(window, 'sessionStorage', {
- value: {
- getItem: vi.fn((key: string) => mockSessionStorage.get(key) ?? null),
- setItem: vi.fn((key: string, value: string) => mockSessionStorage.set(key, value)),
- removeItem: vi.fn((key: string) => mockSessionStorage.delete(key)),
- clear: vi.fn(() => mockSessionStorage.clear()),
- },
- writable: true,
- });
-
- // Mock localStorage
- mockLocalStorage = new Map();
- originalLocalStorage = window.localStorage;
- Object.defineProperty(window, 'localStorage', {
- value: {
- getItem: vi.fn((key: string) => mockLocalStorage.get(key) ?? null),
- setItem: vi.fn((key: string, value: string) => mockLocalStorage.set(key, value)),
- removeItem: vi.fn((key: string) => mockLocalStorage.delete(key)),
- clear: vi.fn(() => mockLocalStorage.clear()),
- },
- writable: true,
- });
-
- // Reset fetch mock
- global.fetch = vi.fn();
- });
-
- afterEach(() => {
- // Restore console
- consoleMock.restore();
-
- // Restore fetch
- global.fetch = originalFetch;
-
- // Restore storage
- Object.defineProperty(window, 'sessionStorage', {
- value: originalSessionStorage,
- writable: true,
- });
- Object.defineProperty(window, 'localStorage', {
- value: originalLocalStorage,
- writable: true,
- });
-
- // Clear all mocks
- vi.clearAllMocks();
- });
-
- describe('subscribeToOAuthHandshake', () => {
- it('should register listener and return unsubscribe function', () => {
- const listener = vi.fn();
- const unsubscribe = subscribeToOAuthHandshake(listener);
-
- expect(typeof unsubscribe).toBe('function');
- expect(listener).not.toHaveBeenCalled();
-
- // Cleanup
- unsubscribe();
- });
-
- it('should call listener when OAuth event occurs', async () => {
- const listener = vi.fn();
- const unsubscribe = subscribeToOAuthHandshake(listener);
-
- const mockAuthData: OAuthAuthData = {
- userId: 'user-123',
- deviceId: 'device-456',
- email: 'test@example.com',
- token: 'test-token',
- expiresAt: Date.now() + 3600000,
- provider: 'google',
- };
-
- // Mock successful fetch response
- vi.mocked(global.fetch).mockResolvedValue(
- createMockFetchResponse({
- status: 'success',
- authData: mockAuthData,
- })
- );
-
- // Announce OAuth state
- await announceOAuthState('test-state-123', true);
-
- // Wait for async operations
- await new Promise(resolve => setTimeout(resolve, 50));
-
- expect(listener).toHaveBeenCalledWith(
- expect.objectContaining({
- status: 'success',
- state: 'test-state-123',
- authData: mockAuthData,
- })
- );
-
- unsubscribe();
- });
-
- it('should remove listener when unsubscribe is called', async () => {
- const listener = vi.fn();
- const unsubscribe = subscribeToOAuthHandshake(listener);
-
- // Unsubscribe immediately
- unsubscribe();
-
- const mockAuthData: OAuthAuthData = {
- userId: 'user-123',
- deviceId: 'device-456',
- email: 'test@example.com',
- token: 'test-token',
- expiresAt: Date.now() + 3600000,
- provider: 'google',
- };
-
- vi.mocked(global.fetch).mockResolvedValue(
- createMockFetchResponse({
- status: 'success',
- authData: mockAuthData,
- })
- );
-
- await announceOAuthState('test-state-456', true);
- await new Promise(resolve => setTimeout(resolve, 50));
-
- // Listener should not be called after unsubscribe
- expect(listener).not.toHaveBeenCalled();
- });
- });
-
- describe('announceOAuthState - successful authentication', () => {
- it('should fetch OAuth result and notify listeners on success', async () => {
- const listener = vi.fn();
- const unsubscribe = subscribeToOAuthHandshake(listener);
-
- const mockAuthData: OAuthAuthData = {
- userId: 'user-789',
- deviceId: 'device-abc',
- email: 'success@example.com',
- token: 'success-token',
- expiresAt: Date.now() + 3600000,
- provider: 'github',
- };
-
- vi.mocked(global.fetch).mockResolvedValue(
- createMockFetchResponse({
- status: 'success',
- authData: mockAuthData,
- })
- );
-
- await announceOAuthState('success-state', true);
- await new Promise(resolve => setTimeout(resolve, 50));
-
- expect(global.fetch).toHaveBeenCalledWith(
- expect.stringContaining('/api/auth/oauth/result?state=success-state'),
- expect.objectContaining({
- method: 'GET',
- headers: { Accept: 'application/json' },
- credentials: 'include',
- })
- );
-
- expect(listener).toHaveBeenCalledWith({
- status: 'success',
- state: 'success-state',
- authData: mockAuthData,
- });
-
- unsubscribe();
- });
-
- it('should store result in storage', async () => {
- const mockAuthData: OAuthAuthData = {
- userId: 'user-123',
- deviceId: 'device-456',
- email: 'test@example.com',
- token: 'test-token',
- expiresAt: Date.now() + 3600000,
- provider: 'google',
- };
-
- vi.mocked(global.fetch).mockResolvedValue(
- createMockFetchResponse({
- status: 'success',
- authData: mockAuthData,
- })
- );
-
- await announceOAuthState('storage-test-state', true);
- await new Promise(resolve => setTimeout(resolve, 50));
-
- // Verify result was stored (implementation uses try-catch so may not always succeed)
- // The key behavior is that fetch was called and result was processed
- expect(global.fetch).toHaveBeenCalled();
- });
- });
-
- describe('announceOAuthState - error handling', () => {
- it('should handle OAuth error response', async () => {
- const listener = vi.fn();
- const unsubscribe = subscribeToOAuthHandshake(listener);
-
- vi.mocked(global.fetch).mockResolvedValue(
- createMockFetchResponse({
- status: 'error',
- error: 'Invalid authorization code',
- })
- );
-
- await announceOAuthState('error-state', false, 'Invalid authorization code');
- await new Promise(resolve => setTimeout(resolve, 50));
-
- expect(listener).toHaveBeenCalledWith({
- status: 'error',
- state: 'error-state',
- error: 'Invalid authorization code',
- });
-
- unsubscribe();
- });
-
- it('should handle network errors during fetch', async () => {
- const listener = vi.fn();
- const unsubscribe = subscribeToOAuthHandshake(listener);
-
- vi.mocked(global.fetch).mockRejectedValue(new Error('Network error'));
-
- await announceOAuthState('network-error-state', true);
- await new Promise(resolve => setTimeout(resolve, 50));
-
- expect(listener).toHaveBeenCalledWith({
- status: 'error',
- state: 'network-error-state',
- error: 'Network error',
- });
-
- unsubscribe();
- });
-
- it('should handle 410 Gone response (expired result)', async () => {
- const listener = vi.fn();
- const unsubscribe = subscribeToOAuthHandshake(listener);
-
- vi.mocked(global.fetch).mockResolvedValue(
- createMockErrorResponse(410, 'Gone')
- );
-
- await announceOAuthState('expired-state', true);
- await new Promise(resolve => setTimeout(resolve, 50));
-
- expect(listener).toHaveBeenCalledWith({
- status: 'error',
- state: 'expired-state',
- error: 'OAuth result expired. Please try again.',
- });
-
- unsubscribe();
- });
-
- it('should handle 401 Unauthorized response', async () => {
- const listener = vi.fn();
- const unsubscribe = subscribeToOAuthHandshake(listener);
-
- vi.mocked(global.fetch).mockResolvedValue(
- createMockErrorResponse(401, 'Unauthorized')
- );
-
- await announceOAuthState('unauthorized-state', true);
- await new Promise(resolve => setTimeout(resolve, 50));
-
- expect(listener).toHaveBeenCalledWith({
- status: 'error',
- state: 'unauthorized-state',
- error: 'Failed to complete OAuth.',
- });
-
- unsubscribe();
- });
-
- it('should handle malformed JSON response', async () => {
- const listener = vi.fn();
- const unsubscribe = subscribeToOAuthHandshake(listener);
-
- const malformedResponse = {
- ok: true,
- status: 200,
- json: async () => {
- throw new Error('Invalid JSON');
- },
- } as Response;
-
- vi.mocked(global.fetch).mockResolvedValue(malformedResponse);
-
- await announceOAuthState('malformed-state', true);
- await new Promise(resolve => setTimeout(resolve, 50));
-
- expect(listener).toHaveBeenCalledWith({
- status: 'error',
- state: 'malformed-state',
- error: 'OAuth failed. Please try again.',
- });
-
- unsubscribe();
- });
- });
-
- describe('state parameter validation (CSRF protection)', () => {
- it('should include state parameter in fetch request', async () => {
- const testState = 'csrf-protection-state-abc123';
-
- vi.mocked(global.fetch).mockResolvedValue(
- createMockFetchResponse({
- status: 'success',
- authData: {
- userId: 'user-123',
- deviceId: 'device-456',
- email: 'test@example.com',
- token: 'test-token',
- expiresAt: Date.now() + 3600000,
- provider: 'google',
- },
- })
- );
-
- await announceOAuthState(testState, true);
- await new Promise(resolve => setTimeout(resolve, 50));
-
- expect(global.fetch).toHaveBeenCalledWith(
- expect.stringContaining(`state=${encodeURIComponent(testState)}`),
- expect.any(Object)
- );
- });
-
- it('should properly encode state parameter in URL', async () => {
- const stateWithSpecialChars = 'state-with-special+chars=&?';
-
- vi.mocked(global.fetch).mockResolvedValue(
- createMockFetchResponse({
- status: 'success',
- authData: {
- userId: 'user-123',
- deviceId: 'device-456',
- email: 'test@example.com',
- token: 'test-token',
- expiresAt: Date.now() + 3600000,
- provider: 'google',
- },
- })
- );
-
- await announceOAuthState(stateWithSpecialChars, true);
- await new Promise(resolve => setTimeout(resolve, 50));
-
- // Verify state is properly URL encoded
- expect(global.fetch).toHaveBeenCalledWith(
- expect.stringContaining(encodeURIComponent(stateWithSpecialChars)),
- expect.any(Object)
- );
- });
- });
-
- describe('retryOAuthHandshake', () => {
- it('should allow retry of previously processed state', async () => {
- const listener = vi.fn();
- const unsubscribe = subscribeToOAuthHandshake(listener);
-
- const retryState = 'retry-state-456';
-
- vi.mocked(global.fetch).mockResolvedValue(
- createMockFetchResponse({
- status: 'success',
- authData: {
- userId: 'user-123',
- deviceId: 'device-456',
- email: 'test@example.com',
- token: 'test-token',
- expiresAt: Date.now() + 3600000,
- provider: 'google',
- },
- })
- );
-
- // First attempt
- await announceOAuthState(retryState, true);
- await new Promise(resolve => setTimeout(resolve, 50));
-
- expect(listener).toHaveBeenCalledTimes(1);
- listener.mockClear();
-
- // Retry
- await retryOAuthHandshake(retryState);
- await new Promise(resolve => setTimeout(resolve, 50));
-
- // Should call listener again after retry
- expect(listener).toHaveBeenCalledTimes(1);
-
- unsubscribe();
- });
- });
-
- describe('encryption setup detection', () => {
- it('should detect when encryption setup is required', async () => {
- const listener = vi.fn();
- const unsubscribe = subscribeToOAuthHandshake(listener);
-
- const mockAuthData: OAuthAuthData = {
- userId: 'user-123',
- deviceId: 'device-456',
- email: 'test@example.com',
- token: 'test-token',
- expiresAt: Date.now() + 3600000,
- provider: 'google',
- requiresEncryptionSetup: true,
- };
-
- vi.mocked(global.fetch).mockResolvedValue(
- createMockFetchResponse({
- status: 'success',
- authData: mockAuthData,
- })
- );
-
- await announceOAuthState('encryption-setup-state', true);
- await new Promise(resolve => setTimeout(resolve, 50));
-
- expect(listener).toHaveBeenCalledWith(
- expect.objectContaining({
- status: 'success',
- authData: expect.objectContaining({
- requiresEncryptionSetup: true,
- }),
- })
- );
-
- unsubscribe();
- });
-
- it('should include encryption salt when provided', async () => {
- const listener = vi.fn();
- const unsubscribe = subscribeToOAuthHandshake(listener);
-
- const mockAuthData: OAuthAuthData = {
- userId: 'user-123',
- deviceId: 'device-456',
- email: 'test@example.com',
- token: 'test-token',
- expiresAt: Date.now() + 3600000,
- provider: 'google',
- encryptionSalt: 'base64-encoded-salt',
- };
-
- vi.mocked(global.fetch).mockResolvedValue(
- createMockFetchResponse({
- status: 'success',
- authData: mockAuthData,
- })
- );
-
- await announceOAuthState('encryption-salt-state', true);
- await new Promise(resolve => setTimeout(resolve, 50));
-
- expect(listener).toHaveBeenCalledWith(
- expect.objectContaining({
- status: 'success',
- authData: expect.objectContaining({
- encryptionSalt: 'base64-encoded-salt',
- }),
- })
- );
-
- unsubscribe();
- });
- });
-
- describe('OAuth provider support', () => {
- it('should handle Google OAuth provider', async () => {
- const listener = vi.fn();
- const unsubscribe = subscribeToOAuthHandshake(listener);
-
- vi.mocked(global.fetch).mockResolvedValue(
- createMockFetchResponse({
- status: 'success',
- authData: {
- userId: 'google-user-123',
- deviceId: 'device-456',
- email: 'google@example.com',
- token: 'google-token',
- expiresAt: Date.now() + 3600000,
- provider: 'google',
- },
- })
- );
-
- await announceOAuthState('google-state', true);
- await new Promise(resolve => setTimeout(resolve, 50));
-
- expect(listener).toHaveBeenCalledWith(
- expect.objectContaining({
- authData: expect.objectContaining({
- provider: 'google',
- }),
- })
- );
-
- unsubscribe();
- });
-
- it('should handle GitHub OAuth provider', async () => {
- const listener = vi.fn();
- const unsubscribe = subscribeToOAuthHandshake(listener);
-
- vi.mocked(global.fetch).mockResolvedValue(
- createMockFetchResponse({
- status: 'success',
- authData: {
- userId: 'github-user-456',
- deviceId: 'device-789',
- email: 'github@example.com',
- token: 'github-token',
- expiresAt: Date.now() + 3600000,
- provider: 'github',
- },
- })
- );
-
- await announceOAuthState('github-state', true);
- await new Promise(resolve => setTimeout(resolve, 50));
-
- expect(listener).toHaveBeenCalledWith(
- expect.objectContaining({
- authData: expect.objectContaining({
- provider: 'github',
- }),
- })
- );
-
- unsubscribe();
- });
- });
-
- describe('cross-tab communication', () => {
- it('should fetch and process OAuth result for cross-tab scenarios', async () => {
- const listener = vi.fn();
- const unsubscribe = subscribeToOAuthHandshake(listener);
-
- vi.mocked(global.fetch).mockResolvedValue(
- createMockFetchResponse({
- status: 'success',
- authData: {
- userId: 'user-123',
- deviceId: 'device-456',
- email: 'test@example.com',
- token: 'test-token',
- expiresAt: Date.now() + 3600000,
- provider: 'google',
- },
- })
- );
-
- await announceOAuthState('cross-tab-state', true);
- await new Promise(resolve => setTimeout(resolve, 50));
-
- // Verify the OAuth result was fetched and listener was notified
- expect(global.fetch).toHaveBeenCalled();
- expect(listener).toHaveBeenCalledWith(
- expect.objectContaining({
- status: 'success',
- state: 'cross-tab-state',
- })
- );
-
- unsubscribe();
- });
- });
-
- describe('error message handling', () => {
- it('should use custom error message from response', async () => {
- const listener = vi.fn();
- const unsubscribe = subscribeToOAuthHandshake(listener);
-
- vi.mocked(global.fetch).mockResolvedValue(
- createMockFetchResponse({
- status: 'error',
- error: 'Custom error message from server',
- })
- );
-
- await announceOAuthState('custom-error-state', true);
- await new Promise(resolve => setTimeout(resolve, 50));
-
- expect(listener).toHaveBeenCalledWith({
- status: 'error',
- state: 'custom-error-state',
- error: 'Custom error message from server',
- });
-
- unsubscribe();
- });
-
- it('should use message field as fallback for error', async () => {
- const listener = vi.fn();
- const unsubscribe = subscribeToOAuthHandshake(listener);
-
- vi.mocked(global.fetch).mockResolvedValue(
- createMockFetchResponse({
- message: 'Error message in message field',
- })
- );
-
- await announceOAuthState('message-field-state', true);
- await new Promise(resolve => setTimeout(resolve, 50));
-
- expect(listener).toHaveBeenCalledWith({
- status: 'error',
- state: 'message-field-state',
- error: 'Error message in message field',
- });
-
- unsubscribe();
- });
-
- it('should use default error message when none provided', async () => {
- const listener = vi.fn();
- const unsubscribe = subscribeToOAuthHandshake(listener);
-
- vi.mocked(global.fetch).mockResolvedValue(
- createMockFetchResponse({})
- );
-
- await announceOAuthState('default-error-state', true);
- await new Promise(resolve => setTimeout(resolve, 50));
-
- expect(listener).toHaveBeenCalledWith({
- status: 'error',
- state: 'default-error-state',
- error: 'OAuth failed. Please try again.',
- });
-
- unsubscribe();
- });
- });
-});
diff --git a/tests/sync/pull-handler.test.ts b/tests/sync/pull-handler.test.ts
deleted file mode 100644
index 0954cb7e..00000000
--- a/tests/sync/pull-handler.test.ts
+++ /dev/null
@@ -1,627 +0,0 @@
-/**
- * Tests for pull-handler - remote-to-local sync operations
- */
-
-import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
-import { getDb } from '@/lib/db';
-import { pullRemoteChanges } from '@/lib/sync/engine/pull-handler';
-import {
- createMockSyncConfig,
- createMockTask,
- createMockEncryptedTaskBlob,
- createMockPullResponse,
- mockConsole,
-} from '../fixtures';
-import type { PullContext } from '@/lib/sync/engine/pull-handler';
-import type { CryptoManager } from '@/lib/sync/crypto';
-import type { SyncApiClient } from '@/lib/sync/api-client';
-
-describe('pullRemoteChanges', () => {
- let db: ReturnType;
- let consoleMock: ReturnType;
- let mockCrypto: CryptoManager;
- let mockApi: SyncApiClient;
- let context: PullContext;
-
- beforeEach(async () => {
- db = getDb();
- consoleMock = mockConsole();
-
- // Clear database
- await db.delete();
- await db.open();
-
- // Create mock crypto manager
- mockCrypto = {
- isInitialized: vi.fn(() => true),
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
- encrypt: vi.fn(async (_data: string) => ({
- ciphertext: 'encrypted',
- nonce: 'nonce',
- })),
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
- decrypt: vi.fn(async (_ciphertext: string, _nonce: string) => {
- // Return a valid task JSON
- const task = createMockTask({ id: 'decrypted-task' });
- return JSON.stringify(task);
- }),
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
- hash: vi.fn(async (_data: string) => 'hash'),
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- } as any;
-
- // Create mock API client
- mockApi = {
- setToken: vi.fn(),
- push: vi.fn(),
- pull: vi.fn(async () => createMockPullResponse()),
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- } as any;
-
- context = {
- crypto: mockCrypto,
- api: mockApi,
- };
- });
-
- afterEach(async () => {
- consoleMock.restore();
- await db.delete();
- vi.clearAllMocks();
- });
-
- describe('remote-to-local sync flow', () => {
- it('should pull tasks from server and save to local database', async () => {
- const config = createMockSyncConfig();
- const remoteTask = createMockTask({ id: 'remote-task-1', title: 'Remote Task' });
-
- vi.mocked(mockApi.pull).mockResolvedValue(
- createMockPullResponse({
- tasks: [
- createMockEncryptedTaskBlob({
- id: 'remote-task-1',
- updatedAt: Date.now(),
- }),
- ],
- })
- );
-
- vi.mocked(mockCrypto.decrypt).mockResolvedValue(JSON.stringify(remoteTask));
-
- const result = await pullRemoteChanges(config, context);
-
- expect(result.tasks).toHaveLength(1);
-
- const savedTask = await db.tasks.get('remote-task-1');
- expect(savedTask).toBeDefined();
- expect(savedTask?.title).toBe('Remote Task');
- });
-
- it('should decrypt encrypted task blobs', async () => {
- const config = createMockSyncConfig();
- const encryptedBlob = createMockEncryptedTaskBlob({
- id: 'encrypted-task',
- encryptedBlob: 'encrypted-data',
- nonce: 'test-nonce',
- });
-
- vi.mocked(mockApi.pull).mockResolvedValue(
- createMockPullResponse({ tasks: [encryptedBlob] })
- );
-
- await pullRemoteChanges(config, context);
-
- expect(mockCrypto.decrypt).toHaveBeenCalledWith('encrypted-data', 'test-nonce');
- });
-
- it('should merge vector clocks when saving tasks', async () => {
- const config = createMockSyncConfig({ vectorClock: { 'device-1': 1 } });
- const remoteTask = createMockTask({ id: 'task-1' });
-
- vi.mocked(mockApi.pull).mockResolvedValue(
- createMockPullResponse({
- tasks: [
- createMockEncryptedTaskBlob({
- id: 'task-1',
- vectorClock: { 'device-2': 2 },
- }),
- ],
- })
- );
-
- vi.mocked(mockCrypto.decrypt).mockResolvedValue(JSON.stringify(remoteTask));
-
- await pullRemoteChanges(config, context);
-
- const savedTask = await db.tasks.get('task-1');
- expect(savedTask?.vectorClock).toEqual({ 'device-2': 2 });
- });
-
- it('should handle multiple tasks in single pull', async () => {
- const config = createMockSyncConfig();
- const task1 = createMockTask({ id: 'task-1', title: 'Task 1' });
- const task2 = createMockTask({ id: 'task-2', title: 'Task 2' });
- const task3 = createMockTask({ id: 'task-3', title: 'Task 3' });
-
- vi.mocked(mockApi.pull).mockResolvedValue(
- createMockPullResponse({
- tasks: [
- createMockEncryptedTaskBlob({ id: 'task-1' }),
- createMockEncryptedTaskBlob({ id: 'task-2' }),
- createMockEncryptedTaskBlob({ id: 'task-3' }),
- ],
- })
- );
-
- vi.mocked(mockCrypto.decrypt)
- .mockResolvedValueOnce(JSON.stringify(task1))
- .mockResolvedValueOnce(JSON.stringify(task2))
- .mockResolvedValueOnce(JSON.stringify(task3));
-
- const result = await pullRemoteChanges(config, context);
-
- expect(result.tasks).toHaveLength(3);
- expect(await db.tasks.count()).toBe(3);
- });
- });
-
- describe('incremental pull with timestamps', () => {
- it('should send lastSyncAt timestamp to API', async () => {
- const lastSync = Date.now() - 60000; // 1 minute ago
- const config = createMockSyncConfig({ lastSyncAt: lastSync });
-
- await pullRemoteChanges(config, context);
-
- expect(mockApi.pull).toHaveBeenCalledWith(
- expect.objectContaining({
- sinceTimestamp: lastSync,
- })
- );
- });
-
- it('should send vector clock to API', async () => {
- const config = createMockSyncConfig({
- vectorClock: { 'device-1': 5, 'device-2': 3 },
- });
-
- await pullRemoteChanges(config, context);
-
- expect(mockApi.pull).toHaveBeenCalledWith(
- expect.objectContaining({
- lastVectorClock: { 'device-1': 5, 'device-2': 3 },
- })
- );
- });
-
- it('should omit sinceTimestamp when lastSyncAt is null', async () => {
- const config = createMockSyncConfig({ lastSyncAt: null });
-
- await pullRemoteChanges(config, context);
-
- expect(mockApi.pull).toHaveBeenCalledWith(
- expect.objectContaining({
- sinceTimestamp: undefined,
- })
- );
- });
-
- it('should use limit parameter for pagination', async () => {
- const config = createMockSyncConfig();
-
- await pullRemoteChanges(config, context);
-
- expect(mockApi.pull).toHaveBeenCalledWith(
- expect.objectContaining({
- limit: 50,
- })
- );
- });
- });
-
- describe('conflict detection during pull', () => {
- it('should apply remote version when remote is newer', async () => {
- const config = createMockSyncConfig();
- const now = Date.now();
-
- // Create local task (older)
- const localTask = createMockTask({
- id: 'conflict-task',
- title: 'Local Version',
- updatedAt: new Date(now - 5000).toISOString(),
- });
- await db.tasks.add(localTask);
-
- // Remote task (newer)
- const remoteTask = createMockTask({
- id: 'conflict-task',
- title: 'Remote Version',
- updatedAt: new Date(now).toISOString(),
- });
-
- vi.mocked(mockApi.pull).mockResolvedValue(
- createMockPullResponse({
- tasks: [
- createMockEncryptedTaskBlob({
- id: 'conflict-task',
- updatedAt: now,
- }),
- ],
- })
- );
-
- vi.mocked(mockCrypto.decrypt).mockResolvedValue(JSON.stringify(remoteTask));
-
- await pullRemoteChanges(config, context);
-
- const savedTask = await db.tasks.get('conflict-task');
- expect(savedTask?.title).toBe('Remote Version');
- });
-
- it('should keep local version when local is newer', async () => {
- const config = createMockSyncConfig();
- const now = Date.now();
-
- // Create local task (newer)
- const localTask = createMockTask({
- id: 'conflict-task',
- title: 'Local Version',
- updatedAt: new Date(now).toISOString(),
- });
- await db.tasks.add(localTask);
-
- // Remote task (older)
- const remoteTask = createMockTask({
- id: 'conflict-task',
- title: 'Remote Version',
- updatedAt: new Date(now - 5000).toISOString(),
- });
-
- vi.mocked(mockApi.pull).mockResolvedValue(
- createMockPullResponse({
- tasks: [
- createMockEncryptedTaskBlob({
- id: 'conflict-task',
- updatedAt: now - 5000,
- }),
- ],
- })
- );
-
- vi.mocked(mockCrypto.decrypt).mockResolvedValue(JSON.stringify(remoteTask));
-
- await pullRemoteChanges(config, context);
-
- const savedTask = await db.tasks.get('conflict-task');
- expect(savedTask?.title).toBe('Local Version');
- });
-
- it('should apply remote version when timestamps are equal', async () => {
- const config = createMockSyncConfig();
- const now = Date.now();
- const timestamp = new Date(now).toISOString();
-
- // Create local task
- const localTask = createMockTask({
- id: 'conflict-task',
- title: 'Local Version',
- updatedAt: timestamp,
- });
- await db.tasks.add(localTask);
-
- // Remote task with same timestamp
- const remoteTask = createMockTask({
- id: 'conflict-task',
- title: 'Remote Version',
- updatedAt: timestamp,
- });
-
- vi.mocked(mockApi.pull).mockResolvedValue(
- createMockPullResponse({
- tasks: [
- createMockEncryptedTaskBlob({
- id: 'conflict-task',
- updatedAt: now,
- }),
- ],
- })
- );
-
- vi.mocked(mockCrypto.decrypt).mockResolvedValue(JSON.stringify(remoteTask));
-
- await pullRemoteChanges(config, context);
-
- const savedTask = await db.tasks.get('conflict-task');
- expect(savedTask?.title).toBe('Remote Version');
- });
-
- it('should create new task when no local version exists', async () => {
- const config = createMockSyncConfig();
- const remoteTask = createMockTask({
- id: 'new-task',
- title: 'New Remote Task',
- });
-
- vi.mocked(mockApi.pull).mockResolvedValue(
- createMockPullResponse({
- tasks: [createMockEncryptedTaskBlob({ id: 'new-task' })],
- })
- );
-
- vi.mocked(mockCrypto.decrypt).mockResolvedValue(JSON.stringify(remoteTask));
-
- await pullRemoteChanges(config, context);
-
- const savedTask = await db.tasks.get('new-task');
- expect(savedTask).toBeDefined();
- expect(savedTask?.title).toBe('New Remote Task');
- });
- });
-
- describe('local database updates', () => {
- it('should update existing tasks', async () => {
- const config = createMockSyncConfig();
- const now = Date.now();
-
- // Create existing task
- await db.tasks.add(
- createMockTask({
- id: 'existing-task',
- title: 'Old Title',
- completed: false,
- updatedAt: new Date(now - 10000).toISOString(),
- })
- );
-
- // Remote update
- const updatedTask = createMockTask({
- id: 'existing-task',
- title: 'New Title',
- completed: true,
- updatedAt: new Date(now).toISOString(),
- });
-
- vi.mocked(mockApi.pull).mockResolvedValue(
- createMockPullResponse({
- tasks: [
- createMockEncryptedTaskBlob({
- id: 'existing-task',
- updatedAt: now,
- }),
- ],
- })
- );
-
- vi.mocked(mockCrypto.decrypt).mockResolvedValue(JSON.stringify(updatedTask));
-
- await pullRemoteChanges(config, context);
-
- const savedTask = await db.tasks.get('existing-task');
- expect(savedTask?.title).toBe('New Title');
- expect(savedTask?.completed).toBe(true);
- });
-
- it('should handle task deletions', async () => {
- const config = createMockSyncConfig();
-
- // Create tasks to be deleted
- await db.tasks.add(createMockTask({ id: 'task-1' }));
- await db.tasks.add(createMockTask({ id: 'task-2' }));
- await db.tasks.add(createMockTask({ id: 'task-3' }));
-
- vi.mocked(mockApi.pull).mockResolvedValue(
- createMockPullResponse({
- deletedTaskIds: ['task-1', 'task-3'],
- })
- );
-
- await pullRemoteChanges(config, context);
-
- expect(await db.tasks.get('task-1')).toBeUndefined();
- expect(await db.tasks.get('task-2')).toBeDefined();
- expect(await db.tasks.get('task-3')).toBeUndefined();
- });
-
- it('should handle bulk deletions', async () => {
- const config = createMockSyncConfig();
-
- // Create many tasks
- for (let i = 1; i <= 10; i++) {
- await db.tasks.add(createMockTask({ id: `task-${i}` }));
- }
-
- const deletedIds = ['task-1', 'task-3', 'task-5', 'task-7', 'task-9'];
-
- vi.mocked(mockApi.pull).mockResolvedValue(
- createMockPullResponse({ deletedTaskIds: deletedIds })
- );
-
- await pullRemoteChanges(config, context);
-
- expect(await db.tasks.count()).toBe(5);
-
- for (const id of deletedIds) {
- expect(await db.tasks.get(id)).toBeUndefined();
- }
- });
-
- it('should preserve local vector clock when merging', async () => {
- const config = createMockSyncConfig();
-
- // Create local task with vector clock
- await db.tasks.add(
- createMockTask({
- id: 'task-1',
- vectorClock: { 'device-1': 3 },
- updatedAt: new Date(Date.now() - 10000).toISOString(),
- })
- );
-
- const remoteTask = createMockTask({
- id: 'task-1',
- updatedAt: new Date().toISOString(),
- });
-
- vi.mocked(mockApi.pull).mockResolvedValue(
- createMockPullResponse({
- tasks: [
- createMockEncryptedTaskBlob({
- id: 'task-1',
- vectorClock: { 'device-2': 5 },
- updatedAt: Date.now(),
- }),
- ],
- })
- );
-
- vi.mocked(mockCrypto.decrypt).mockResolvedValue(JSON.stringify(remoteTask));
-
- await pullRemoteChanges(config, context);
-
- const savedTask = await db.tasks.get('task-1');
- // Vector clock should be merged (contains both device clocks)
- expect(savedTask?.vectorClock).toEqual({ 'device-1': 3, 'device-2': 5 });
- });
- });
-
- describe('error handling', () => {
- it('should throw error when API pull fails', async () => {
- const config = createMockSyncConfig();
- const apiError = new Error('Network error');
-
- vi.mocked(mockApi.pull).mockRejectedValue(apiError);
-
- await expect(pullRemoteChanges(config, context)).rejects.toThrow('Network error');
- });
-
- it('should continue processing other tasks when one fails decryption', async () => {
- const config = createMockSyncConfig();
- const task2 = createMockTask({ id: 'task-2', title: 'Task 2' });
- const task3 = createMockTask({ id: 'task-3', title: 'Task 3' });
-
- vi.mocked(mockApi.pull).mockResolvedValue(
- createMockPullResponse({
- tasks: [
- createMockEncryptedTaskBlob({ id: 'task-1' }),
- createMockEncryptedTaskBlob({ id: 'task-2' }),
- createMockEncryptedTaskBlob({ id: 'task-3' }),
- ],
- })
- );
-
- vi.mocked(mockCrypto.decrypt)
- .mockRejectedValueOnce(new Error('Decryption failed'))
- .mockResolvedValueOnce(JSON.stringify(task2))
- .mockResolvedValueOnce(JSON.stringify(task3));
-
- const result = await pullRemoteChanges(config, context);
-
- // Should still process remaining tasks
- expect(result.tasks).toHaveLength(3);
- expect(await db.tasks.get('task-1')).toBeUndefined();
- expect(await db.tasks.get('task-2')).toBeDefined();
- expect(await db.tasks.get('task-3')).toBeDefined();
- });
-
- it('should continue processing when task validation fails', async () => {
- const config = createMockSyncConfig();
- const validTask = createMockTask({ id: 'valid-task' });
-
- vi.mocked(mockApi.pull).mockResolvedValue(
- createMockPullResponse({
- tasks: [
- createMockEncryptedTaskBlob({ id: 'invalid-task' }),
- createMockEncryptedTaskBlob({ id: 'valid-task' }),
- ],
- })
- );
-
- vi.mocked(mockCrypto.decrypt)
- .mockResolvedValueOnce('{ "invalid": "json" }') // Invalid task schema
- .mockResolvedValueOnce(JSON.stringify(validTask));
-
- const result = await pullRemoteChanges(config, context);
-
- expect(result.tasks).toHaveLength(2);
- expect(await db.tasks.get('invalid-task')).toBeUndefined();
- expect(await db.tasks.get('valid-task')).toBeDefined();
- });
-
- it('should handle empty pull response', async () => {
- const config = createMockSyncConfig();
-
- vi.mocked(mockApi.pull).mockResolvedValue(
- createMockPullResponse({
- tasks: [],
- deletedTaskIds: [],
- })
- );
-
- const result = await pullRemoteChanges(config, context);
-
- expect(result.tasks).toHaveLength(0);
- expect(result.deletedTaskIds).toHaveLength(0);
- });
- });
-
- describe('return value', () => {
- it('should return pull result with task count', async () => {
- const config = createMockSyncConfig();
-
- vi.mocked(mockApi.pull).mockResolvedValue(
- createMockPullResponse({
- tasks: [
- createMockEncryptedTaskBlob({ id: 'task-1' }),
- createMockEncryptedTaskBlob({ id: 'task-2' }),
- ],
- })
- );
-
- const result = await pullRemoteChanges(config, context);
-
- expect(result.tasks).toHaveLength(2);
- });
-
- it('should return deleted task IDs', async () => {
- const config = createMockSyncConfig();
-
- vi.mocked(mockApi.pull).mockResolvedValue(
- createMockPullResponse({
- deletedTaskIds: ['task-1', 'task-2', 'task-3'],
- })
- );
-
- const result = await pullRemoteChanges(config, context);
-
- expect(result.deletedTaskIds).toEqual(['task-1', 'task-2', 'task-3']);
- });
-
- it('should return server vector clock', async () => {
- const config = createMockSyncConfig();
- const serverClock = { 'device-1': 10, 'device-2': 5 };
-
- vi.mocked(mockApi.pull).mockResolvedValue(
- createMockPullResponse({
- serverVectorClock: serverClock,
- })
- );
-
- const result = await pullRemoteChanges(config, context);
-
- expect(result.serverVectorClock).toEqual(serverClock);
- });
-
- it('should return conflicts array', async () => {
- const config = createMockSyncConfig();
-
- vi.mocked(mockApi.pull).mockResolvedValue(
- createMockPullResponse({
- conflicts: [],
- })
- );
-
- const result = await pullRemoteChanges(config, context);
-
- expect(result.conflicts).toEqual([]);
- });
- });
-});
diff --git a/tests/sync/push-handler.test.ts b/tests/sync/push-handler.test.ts
deleted file mode 100644
index be3705e3..00000000
--- a/tests/sync/push-handler.test.ts
+++ /dev/null
@@ -1,673 +0,0 @@
-/**
- * Tests for push-handler - local-to-remote sync operations
- */
-
-import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
-import { getDb } from '@/lib/db';
-import { pushLocalChanges } from '@/lib/sync/engine/push-handler';
-import { getSyncQueue } from '@/lib/sync/queue';
-import {
- createMockSyncConfig,
- createMockTask,
- createMockPushResponse,
- mockConsole,
-} from '../fixtures';
-import type { PushContext } from '@/lib/sync/engine/push-handler';
-import type { CryptoManager } from '@/lib/sync/crypto';
-import type { SyncApiClient } from '@/lib/sync/api-client';
-
-describe('pushLocalChanges', () => {
- let db: ReturnType;
- let consoleMock: ReturnType;
- let mockCrypto: CryptoManager;
- let mockApi: SyncApiClient;
- let context: PushContext;
- let queue: ReturnType;
-
- beforeEach(async () => {
- db = getDb();
- consoleMock = mockConsole();
- queue = getSyncQueue();
-
- // Clear database
- await db.delete();
- await db.open();
-
- // Create mock crypto manager
- mockCrypto = {
- isInitialized: vi.fn(() => true),
- encrypt: vi.fn(async () => ({
- ciphertext: 'encrypted-data',
- nonce: 'test-nonce',
- })),
- decrypt: vi.fn(async () => JSON.stringify(createMockTask())),
- hash: vi.fn(async () => 'checksum-abc123'),
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- } as any;
-
- // Create mock API client
- mockApi = {
- setToken: vi.fn(),
- push: vi.fn(async () => createMockPushResponse()),
- pull: vi.fn(),
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- } as any;
-
- context = {
- crypto: mockCrypto,
- api: mockApi,
- };
- });
-
- afterEach(async () => {
- consoleMock.restore();
- await db.delete();
- vi.clearAllMocks();
- });
-
- describe('local-to-remote sync flow', () => {
- it('should push pending operations to server', async () => {
- const config = createMockSyncConfig();
- const task = createMockTask({ id: 'task-1', title: 'Test Task' });
-
- // Add operation to queue
- await queue.enqueue('create', 'task-1', task, {});
-
- vi.mocked(mockApi.push).mockResolvedValue(
- createMockPushResponse({ accepted: ['task-1'] })
- );
-
- const result = await pushLocalChanges(config, context);
-
- expect(mockApi.push).toHaveBeenCalledWith(
- expect.objectContaining({
- deviceId: config.deviceId,
- operations: expect.arrayContaining([
- expect.objectContaining({
- type: 'create',
- taskId: 'task-1',
- }),
- ]),
- })
- );
- expect(result.accepted).toEqual(['task-1']);
- });
-
- it('should encrypt task payloads before pushing', async () => {
- const config = createMockSyncConfig();
- const task = createMockTask({ id: 'task-1' });
-
- await queue.enqueue('create', 'task-1', task, {});
-
- vi.mocked(mockApi.push).mockResolvedValue(
- createMockPushResponse({ accepted: ['task-1'] })
- );
-
- await pushLocalChanges(config, context);
-
- expect(mockCrypto.encrypt).toHaveBeenCalledWith(JSON.stringify(task));
- expect(mockCrypto.hash).toHaveBeenCalledWith(JSON.stringify(task));
- });
-
- it('should include encrypted blob and nonce in operations', async () => {
- const config = createMockSyncConfig();
- const task = createMockTask({ id: 'task-1' });
-
- await queue.enqueue('create', 'task-1', task, {});
-
- vi.mocked(mockCrypto.encrypt).mockResolvedValue({
- ciphertext: 'encrypted-blob',
- nonce: 'nonce-123',
- });
-
- vi.mocked(mockCrypto.hash).mockResolvedValue('checksum-xyz');
-
- vi.mocked(mockApi.push).mockResolvedValue(
- createMockPushResponse({ accepted: ['task-1'] })
- );
-
- await pushLocalChanges(config, context);
-
- expect(mockApi.push).toHaveBeenCalledWith(
- expect.objectContaining({
- operations: expect.arrayContaining([
- expect.objectContaining({
- encryptedBlob: 'encrypted-blob',
- nonce: 'nonce-123',
- checksum: 'checksum-xyz',
- }),
- ]),
- })
- );
- });
-
- it('should send vector clocks with operations', async () => {
- const config = createMockSyncConfig({ vectorClock: { 'device-456': 5 } });
- const task = createMockTask({ id: 'task-1' });
- const taskClock = { 'device-456': 3 };
-
- await queue.enqueue('create', 'task-1', task, taskClock);
-
- vi.mocked(mockApi.push).mockResolvedValue(
- createMockPushResponse({ accepted: ['task-1'] })
- );
-
- await pushLocalChanges(config, context);
-
- expect(mockApi.push).toHaveBeenCalledWith(
- expect.objectContaining({
- clientVectorClock: { 'device-456': 5 },
- operations: expect.arrayContaining([
- expect.objectContaining({
- vectorClock: taskClock,
- }),
- ]),
- })
- );
- });
-
- it('should handle delete operations without payload', async () => {
- const config = createMockSyncConfig();
-
- await queue.enqueue('delete', 'task-1', null, { 'device-456': 2 });
-
- vi.mocked(mockApi.push).mockResolvedValue(
- createMockPushResponse({ accepted: ['task-1'] })
- );
-
- await pushLocalChanges(config, context);
-
- expect(mockApi.push).toHaveBeenCalledWith(
- expect.objectContaining({
- operations: expect.arrayContaining([
- expect.objectContaining({
- type: 'delete',
- taskId: 'task-1',
- vectorClock: { 'device-456': 2 },
- }),
- ]),
- })
- );
-
- // Should not encrypt for delete operations
- expect(mockCrypto.encrypt).not.toHaveBeenCalled();
- });
-
- it('should return early when no pending operations', async () => {
- const config = createMockSyncConfig();
-
- const result = await pushLocalChanges(config, context);
-
- expect(mockApi.push).not.toHaveBeenCalled();
- expect(result.accepted).toEqual([]);
- expect(result.rejected).toEqual([]);
- });
- });
-
- describe('change detection and batching', () => {
- it('should batch multiple operations in single push', async () => {
- const config = createMockSyncConfig();
- const task1 = createMockTask({ id: 'task-1' });
- const task2 = createMockTask({ id: 'task-2' });
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
- const _task3 = createMockTask({ id: 'task-3' });
-
- await queue.enqueue('create', 'task-1', task1, {});
- await queue.enqueue('update', 'task-2', task2, {});
- await queue.enqueue('delete', 'task-3', null, {});
-
- vi.mocked(mockApi.push).mockResolvedValue(
- createMockPushResponse({ accepted: ['task-1', 'task-2', 'task-3'] })
- );
-
- await pushLocalChanges(config, context);
-
- expect(mockApi.push).toHaveBeenCalledTimes(1);
- expect(mockApi.push).toHaveBeenCalledWith(
- expect.objectContaining({
- operations: expect.arrayContaining([
- expect.objectContaining({ type: 'create', taskId: 'task-1' }),
- expect.objectContaining({ type: 'update', taskId: 'task-2' }),
- expect.objectContaining({ type: 'delete', taskId: 'task-3' }),
- ]),
- })
- );
- });
-
- it('should handle multiple operations for same task', async () => {
- const config = createMockSyncConfig();
- const task = createMockTask({ id: 'task-1' });
-
- // Queue multiple operations for same task
- await queue.enqueue('create', 'task-1', task, {});
- await queue.enqueue('update', 'task-1', { ...task, title: 'Updated' }, {});
-
- vi.mocked(mockApi.push).mockResolvedValue(
- createMockPushResponse({ accepted: ['task-1'] })
- );
-
- await pushLocalChanges(config, context);
-
- // Both operations should be sent
- const pushCall = vi.mocked(mockApi.push).mock.calls[0][0];
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- const task1Ops = pushCall.operations.filter((op: any) => op.taskId === 'task-1');
- expect(task1Ops).toHaveLength(2);
- });
-
- it('should remove all queue items for accepted tasks', async () => {
- const config = createMockSyncConfig();
- const task = createMockTask({ id: 'task-1' });
-
- // Queue multiple operations for same task
- await queue.enqueue('create', 'task-1', task, {});
- await queue.enqueue('update', 'task-1', { ...task, title: 'Updated' }, {});
-
- vi.mocked(mockApi.push).mockResolvedValue(
- createMockPushResponse({ accepted: ['task-1'] })
- );
-
- await pushLocalChanges(config, context);
-
- // All operations for task-1 should be removed
- const remaining = await queue.getForTask('task-1');
- expect(remaining).toHaveLength(0);
- });
- });
-
- describe('push retry on failure', () => {
- it('should increment retry count for rejected operations', async () => {
- const config = createMockSyncConfig();
- const task = createMockTask({ id: 'task-1' });
-
- await queue.enqueue('create', 'task-1', task, {});
-
- const queueItems = await queue.getPending();
- const queueId = queueItems[0].id;
-
- vi.mocked(mockApi.push).mockResolvedValue(
- createMockPushResponse({
- rejected: [
- {
- taskId: 'task-1',
- reason: 'validation_error',
- details: 'Invalid task data',
- },
- ],
- })
- );
-
- await pushLocalChanges(config, context);
-
- const updatedItem = await db.syncQueue.get(queueId);
- expect(updatedItem?.retryCount).toBe(1);
- });
-
- it('should keep rejected operations in queue', async () => {
- const config = createMockSyncConfig();
- const task = createMockTask({ id: 'task-1' });
-
- await queue.enqueue('create', 'task-1', task, {});
-
- vi.mocked(mockApi.push).mockResolvedValue(
- createMockPushResponse({
- rejected: [
- {
- taskId: 'task-1',
- reason: 'version_mismatch',
- details: 'Version conflict',
- },
- ],
- })
- );
-
- await pushLocalChanges(config, context);
-
- const remaining = await queue.getPending();
- expect(remaining).toHaveLength(1);
- expect(remaining[0].taskId).toBe('task-1');
- });
-
- it('should throw error when push API call fails', async () => {
- const config = createMockSyncConfig();
- const task = createMockTask({ id: 'task-1' });
-
- await queue.enqueue('create', 'task-1', task, {});
-
- const networkError = new Error('Network error');
- vi.mocked(mockApi.push).mockRejectedValue(networkError);
-
- await expect(pushLocalChanges(config, context)).rejects.toThrow('Network error');
- });
-
- it('should not remove operations when push fails', async () => {
- const config = createMockSyncConfig();
- const task = createMockTask({ id: 'task-1' });
-
- await queue.enqueue('create', 'task-1', task, {});
-
- vi.mocked(mockApi.push).mockRejectedValue(new Error('Server error'));
-
- await expect(pushLocalChanges(config, context)).rejects.toThrow();
-
- const remaining = await queue.getPending();
- expect(remaining).toHaveLength(1);
- });
- });
-
- describe('conflict handling', () => {
- it('should remove conflicted operations from queue', async () => {
- const config = createMockSyncConfig();
- const task = createMockTask({ id: 'task-1' });
-
- await queue.enqueue('update', 'task-1', task, {});
-
- vi.mocked(mockApi.push).mockResolvedValue(
- createMockPushResponse({
- conflicts: [
- {
- taskId: 'task-1',
- local: task,
- remote: createMockTask({ id: 'task-1', title: 'Remote Version' }),
- localClock: { 'device-456': 2 },
- remoteClock: { 'device-789': 3 },
- },
- ],
- })
- );
-
- await pushLocalChanges(config, context);
-
- const remaining = await queue.getForTask('task-1');
- expect(remaining).toHaveLength(0);
- });
-
- it('should handle multiple conflicts', async () => {
- const config = createMockSyncConfig();
- const task1 = createMockTask({ id: 'task-1' });
- const task2 = createMockTask({ id: 'task-2' });
-
- await queue.enqueue('update', 'task-1', task1, {});
- await queue.enqueue('update', 'task-2', task2, {});
-
- vi.mocked(mockApi.push).mockResolvedValue(
- createMockPushResponse({
- conflicts: [
- {
- taskId: 'task-1',
- local: task1,
- remote: createMockTask({ id: 'task-1' }),
- localClock: {},
- remoteClock: {},
- },
- {
- taskId: 'task-2',
- local: task2,
- remote: createMockTask({ id: 'task-2' }),
- localClock: {},
- remoteClock: {},
- },
- ],
- })
- );
-
- await pushLocalChanges(config, context);
-
- expect(await queue.getForTask('task-1')).toHaveLength(0);
- expect(await queue.getForTask('task-2')).toHaveLength(0);
- });
-
- it('should remove all queue items for conflicted tasks', async () => {
- const config = createMockSyncConfig();
- const task = createMockTask({ id: 'task-1' });
-
- // Queue multiple operations for same task
- await queue.enqueue('create', 'task-1', task, {});
- await queue.enqueue('update', 'task-1', { ...task, title: 'Updated' }, {});
-
- vi.mocked(mockApi.push).mockResolvedValue(
- createMockPushResponse({
- conflicts: [
- {
- taskId: 'task-1',
- local: task,
- remote: createMockTask({ id: 'task-1' }),
- localClock: {},
- remoteClock: {},
- },
- ],
- })
- );
-
- await pushLocalChanges(config, context);
-
- const remaining = await queue.getForTask('task-1');
- expect(remaining).toHaveLength(0);
- });
- });
-
- describe('optimistic updates', () => {
- it('should remove accepted operations from queue immediately', async () => {
- const config = createMockSyncConfig();
- const task = createMockTask({ id: 'task-1' });
-
- await queue.enqueue('create', 'task-1', task, {});
-
- vi.mocked(mockApi.push).mockResolvedValue(
- createMockPushResponse({ accepted: ['task-1'] })
- );
-
- await pushLocalChanges(config, context);
-
- const remaining = await queue.getPending();
- expect(remaining).toHaveLength(0);
- });
-
- it('should handle partial acceptance', async () => {
- const config = createMockSyncConfig();
- const task1 = createMockTask({ id: 'task-1' });
- const task2 = createMockTask({ id: 'task-2' });
- const task3 = createMockTask({ id: 'task-3' });
-
- await queue.enqueue('create', 'task-1', task1, {});
- await queue.enqueue('create', 'task-2', task2, {});
- await queue.enqueue('create', 'task-3', task3, {});
-
- vi.mocked(mockApi.push).mockResolvedValue(
- createMockPushResponse({
- accepted: ['task-1', 'task-3'],
- rejected: [
- {
- taskId: 'task-2',
- reason: 'validation_error',
- details: 'Invalid data',
- },
- ],
- })
- );
-
- await pushLocalChanges(config, context);
-
- const remaining = await queue.getPending();
- expect(remaining).toHaveLength(1);
- expect(remaining[0].taskId).toBe('task-2');
- });
-
- it('should verify queue cleanup after acceptance', async () => {
- const config = createMockSyncConfig();
- const task = createMockTask({ id: 'task-1' });
-
- await queue.enqueue('create', 'task-1', task, {});
-
- vi.mocked(mockApi.push).mockResolvedValue(
- createMockPushResponse({ accepted: ['task-1'] })
- );
-
- const initialCount = await queue.getPendingCount();
- expect(initialCount).toBe(1);
-
- await pushLocalChanges(config, context);
-
- const finalCount = await queue.getPendingCount();
- expect(finalCount).toBe(0);
- });
- });
-
- describe('error handling', () => {
- it('should continue processing when encryption fails for one task', async () => {
- const config = createMockSyncConfig();
- const task1 = createMockTask({ id: 'task-1' });
- const task2 = createMockTask({ id: 'task-2' });
-
- await queue.enqueue('create', 'task-1', task1, {});
- await queue.enqueue('create', 'task-2', task2, {});
-
- vi.mocked(mockCrypto.encrypt)
- .mockRejectedValueOnce(new Error('Encryption failed'))
- .mockResolvedValueOnce({ ciphertext: 'encrypted', nonce: 'nonce' });
-
- vi.mocked(mockApi.push).mockResolvedValue(
- createMockPushResponse({ accepted: ['task-2'] })
- );
-
- await pushLocalChanges(config, context);
-
- // Should still push exactly one task (the one that encrypted successfully)
- const pushArgs = vi.mocked(mockApi.push).mock.calls[0]?.[0];
- expect(pushArgs).toEqual(
- expect.objectContaining({
- operations: expect.arrayContaining([
- expect.objectContaining({
- taskId: expect.stringMatching(/^task-[12]$/),
- }),
- ]),
- })
- );
- expect(pushArgs?.operations).toHaveLength(1);
- });
-
- it('should handle empty accepted array', async () => {
- const config = createMockSyncConfig();
- const task = createMockTask({ id: 'task-1' });
-
- await queue.enqueue('create', 'task-1', task, {});
-
- vi.mocked(mockApi.push).mockResolvedValue(
- createMockPushResponse({ accepted: [] })
- );
-
- await pushLocalChanges(config, context);
-
- // Task should remain in queue
- const remaining = await queue.getPending();
- expect(remaining).toHaveLength(1);
- });
-
- it('should handle server accepting unknown task ID', async () => {
- const config = createMockSyncConfig();
- const task = createMockTask({ id: 'task-1' });
-
- await queue.enqueue('create', 'task-1', task, {});
-
- // Server accepts a task we didn't send
- vi.mocked(mockApi.push).mockResolvedValue(
- createMockPushResponse({ accepted: ['task-1', 'unknown-task'] })
- );
-
- // Should not throw error
- await expect(pushLocalChanges(config, context)).resolves.toBeDefined();
- });
- });
-
- describe('return value', () => {
- it('should return push response with accepted tasks', async () => {
- const config = createMockSyncConfig();
- const task = createMockTask({ id: 'task-1' });
-
- await queue.enqueue('create', 'task-1', task, {});
-
- vi.mocked(mockApi.push).mockResolvedValue(
- createMockPushResponse({ accepted: ['task-1'] })
- );
-
- const result = await pushLocalChanges(config, context);
-
- expect(result.accepted).toEqual(['task-1']);
- });
-
- it('should return rejected operations', async () => {
- const config = createMockSyncConfig();
- const task = createMockTask({ id: 'task-1' });
-
- await queue.enqueue('create', 'task-1', task, {});
-
- const rejection = {
- taskId: 'task-1',
- reason: 'validation_error' as const,
- details: 'Invalid data',
- };
-
- vi.mocked(mockApi.push).mockResolvedValue(
- createMockPushResponse({ rejected: [rejection] })
- );
-
- const result = await pushLocalChanges(config, context);
-
- expect(result.rejected).toEqual([rejection]);
- });
-
- it('should return conflicts', async () => {
- const config = createMockSyncConfig();
- const task = createMockTask({ id: 'task-1' });
-
- await queue.enqueue('update', 'task-1', task, {});
-
- const conflict = {
- taskId: 'task-1',
- local: task,
- remote: createMockTask({ id: 'task-1' }),
- localClock: {},
- remoteClock: {},
- };
-
- vi.mocked(mockApi.push).mockResolvedValue(
- createMockPushResponse({ conflicts: [conflict] })
- );
-
- const result = await pushLocalChanges(config, context);
-
- expect(result.conflicts).toEqual([conflict]);
- });
-
- it('should return server vector clock', async () => {
- const config = createMockSyncConfig();
- const task = createMockTask({ id: 'task-1' });
-
- await queue.enqueue('create', 'task-1', task, {});
-
- const serverClock = { 'device-456': 10, 'device-789': 5 };
-
- vi.mocked(mockApi.push).mockResolvedValue(
- createMockPushResponse({
- accepted: ['task-1'],
- serverVectorClock: serverClock,
- })
- );
-
- const result = await pushLocalChanges(config, context);
-
- expect(result.serverVectorClock).toEqual(serverClock);
- });
-
- it('should return empty result when no operations', async () => {
- const config = createMockSyncConfig();
-
- const result = await pushLocalChanges(config, context);
-
- expect(result.accepted).toEqual([]);
- expect(result.rejected).toEqual([]);
- expect(result.conflicts).toEqual([]);
- expect(result.serverVectorClock).toEqual({});
- });
- });
-});
diff --git a/tests/sync/queue-optimizer.test.ts b/tests/sync/queue-optimizer.test.ts
index 5083b962..bcb16371 100644
--- a/tests/sync/queue-optimizer.test.ts
+++ b/tests/sync/queue-optimizer.test.ts
@@ -46,15 +46,14 @@ describe('QueueOptimizer', () => {
updatedAt: new Date().toISOString(),
notificationEnabled: true,
notificationSent: false,
- vectorClock: { device1: 1 },
};
// Add multiple update operations (with small delays to ensure different timestamps)
- await queue.enqueue('update', taskId, { ...basePayload, title: 'Update 1' }, { device1: 1 });
+ await queue.enqueue('update', taskId, { ...basePayload, title: 'Update 1' });
await new Promise(resolve => setTimeout(resolve, 10));
- await queue.enqueue('update', taskId, { ...basePayload, title: 'Update 2' }, { device1: 2 });
+ await queue.enqueue('update', taskId, { ...basePayload, title: 'Update 2' });
await new Promise(resolve => setTimeout(resolve, 10));
- await queue.enqueue('update', taskId, { ...basePayload, title: 'Update 3' }, { device1: 3 });
+ await queue.enqueue('update', taskId, { ...basePayload, title: 'Update 3' });
let pending = await queue.getPending();
expect(pending.length).toBe(3);
@@ -65,7 +64,6 @@ describe('QueueOptimizer', () => {
pending = await queue.getPending();
expect(pending.length).toBe(1);
expect(pending[0].payload?.title).toBe('Update 3'); // Latest payload
- expect(pending[0].vectorClock).toEqual({ device1: 3 }); // Latest vector clock
});
});
@@ -88,17 +86,16 @@ describe('QueueOptimizer', () => {
updatedAt: new Date().toISOString(),
notificationEnabled: true,
notificationSent: false,
- vectorClock: { device1: 1 },
};
// Add create and update operations (with small delays to ensure different timestamps)
- await queue.enqueue('create', taskId, basePayload, { device1: 1 });
+ await queue.enqueue('create', taskId, basePayload);
await new Promise(resolve => setTimeout(resolve, 10));
- await queue.enqueue('update', taskId, { ...basePayload, title: 'Updated' }, { device1: 2 });
+ await queue.enqueue('update', taskId, { ...basePayload, title: 'Updated' });
await new Promise(resolve => setTimeout(resolve, 10));
// Add delete operation
- await queue.enqueue('delete', taskId, null, { device1: 3 });
+ await queue.enqueue('delete', taskId, null);
let pending = await queue.getPending();
expect(pending.length).toBe(3);
@@ -131,17 +128,16 @@ describe('QueueOptimizer', () => {
updatedAt: new Date().toISOString(),
notificationEnabled: true,
notificationSent: false,
- vectorClock: { device1: 1 },
};
// Add create operation
- await queue.enqueue('create', taskId, basePayload, { device1: 1 });
+ await queue.enqueue('create', taskId, basePayload);
await new Promise(resolve => setTimeout(resolve, 10));
// Add update operations (with small delays to ensure different timestamps)
- await queue.enqueue('update', taskId, { ...basePayload, title: 'Update 1' }, { device1: 2 });
+ await queue.enqueue('update', taskId, { ...basePayload, title: 'Update 1' });
await new Promise(resolve => setTimeout(resolve, 10));
- await queue.enqueue('update', taskId, { ...basePayload, title: 'Final' }, { device1: 3 });
+ await queue.enqueue('update', taskId, { ...basePayload, title: 'Final' });
let pending = await queue.getPending();
expect(pending.length).toBe(3);
@@ -153,12 +149,11 @@ describe('QueueOptimizer', () => {
expect(pending.length).toBe(1);
expect(pending[0].operation).toBe('create');
expect(pending[0].payload?.title).toBe('Final'); // Latest payload
- expect(pending[0].vectorClock).toEqual({ device1: 3 }); // Merged vector clock
});
});
- describe('preserving latest vector clock', () => {
- it('should merge vector clocks from all consolidated operations', async () => {
+ describe('consolidating duplicate updates', () => {
+ it('should consolidate all operations into single operation', async () => {
const taskId = 'task1';
const basePayload: TaskRecord = {
id: taskId,
@@ -176,23 +171,20 @@ describe('QueueOptimizer', () => {
updatedAt: new Date().toISOString(),
notificationEnabled: true,
notificationSent: false,
- vectorClock: { device1: 1 },
};
- // Add operations with different vector clocks (with small delays to ensure different timestamps)
- await queue.enqueue('update', taskId, basePayload, { device1: 1, device2: 0 });
+ // Add operations with small delays to ensure different timestamps
+ await queue.enqueue('update', taskId, basePayload);
await new Promise(resolve => setTimeout(resolve, 10));
- await queue.enqueue('update', taskId, basePayload, { device1: 1, device2: 1 });
+ await queue.enqueue('update', taskId, basePayload);
await new Promise(resolve => setTimeout(resolve, 10));
- await queue.enqueue('update', taskId, basePayload, { device1: 2, device2: 1 });
+ await queue.enqueue('update', taskId, basePayload);
// Consolidate
await optimizer.consolidateTask(taskId);
const pending = await queue.getPending();
expect(pending.length).toBe(1);
- // Should have the latest vector clock from the last operation
- expect(pending[0].vectorClock).toEqual({ device1: 2, device2: 1 });
});
});
@@ -214,17 +206,16 @@ describe('QueueOptimizer', () => {
updatedAt: new Date().toISOString(),
notificationEnabled: true,
notificationSent: false,
- vectorClock: { device1: 1 },
};
// Add operations for multiple tasks (with small delays to ensure different timestamps)
- await queue.enqueue('update', 'task1', { ...basePayload, id: 'task1' }, { device1: 1 });
+ await queue.enqueue('update', 'task1', { ...basePayload, id: 'task1' });
await new Promise(resolve => setTimeout(resolve, 10));
- await queue.enqueue('update', 'task1', { ...basePayload, id: 'task1' }, { device1: 2 });
+ await queue.enqueue('update', 'task1', { ...basePayload, id: 'task1' });
await new Promise(resolve => setTimeout(resolve, 10));
- await queue.enqueue('update', 'task2', { ...basePayload, id: 'task2' }, { device1: 1 });
+ await queue.enqueue('update', 'task2', { ...basePayload, id: 'task2' });
await new Promise(resolve => setTimeout(resolve, 10));
- await queue.enqueue('update', 'task2', { ...basePayload, id: 'task2' }, { device1: 2 });
+ await queue.enqueue('update', 'task2', { ...basePayload, id: 'task2' });
let pending = await queue.getPending();
expect(pending.length).toBe(4);
diff --git a/tests/sync/sync-conflict-resolution.test.ts b/tests/sync/sync-conflict-resolution.test.ts
deleted file mode 100644
index de90b2fa..00000000
--- a/tests/sync/sync-conflict-resolution.test.ts
+++ /dev/null
@@ -1,470 +0,0 @@
-/**
- * Tests for conflict resolution - Last-Write-Wins strategy and conflict detection
- */
-
-import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
-import { getDb } from '@/lib/db';
-import { autoResolveConflicts } from '@/lib/sync/engine/conflict-resolver';
-import { compareVectorClocks, mergeVectorClocks } from '@/lib/sync/vector-clock';
-import {
- createMockTask,
- createMockVectorClock,
- mockConsole,
-} from '../fixtures';
-import type { ConflictInfo } from '@/lib/sync/types';
-
-describe('Conflict Resolution', () => {
- let db: ReturnType;
- let consoleMock: ReturnType;
-
- beforeEach(async () => {
- db = getDb();
- consoleMock = mockConsole();
-
- // Clear database
- await db.delete();
- await db.open();
- });
-
- afterEach(async () => {
- consoleMock.restore();
- await db.delete();
- });
-
- describe('autoResolveConflicts - Last-Write-Wins strategy', () => {
- it('should resolve conflict by choosing remote when remote is newer', async () => {
- const now = Date.now();
- const localTask = createMockTask({
- id: 'task-1',
- title: 'Local Version',
- updatedAt: new Date(now - 1000).toISOString(), // 1 second older
- });
- const remoteTask = createMockTask({
- id: 'task-1',
- title: 'Remote Version',
- updatedAt: new Date(now).toISOString(), // newer
- });
-
- const conflict: ConflictInfo = {
- taskId: 'task-1',
- local: localTask,
- remote: remoteTask,
- localClock: createMockVectorClock({ 'device-1': 1 }),
- remoteClock: createMockVectorClock({ 'device-2': 1 }),
- };
-
- const resolved = await autoResolveConflicts([conflict]);
-
- expect(resolved).toBe(1);
-
- // Verify remote version was saved
- const savedTask = await db.tasks.get('task-1');
- expect(savedTask).toBeDefined();
- expect(savedTask?.title).toBe('Remote Version');
- });
-
- it('should resolve conflict by choosing local when local is newer', async () => {
- const now = Date.now();
- const localTask = createMockTask({
- id: 'task-2',
- title: 'Local Version',
- updatedAt: new Date(now).toISOString(), // newer
- });
- const remoteTask = createMockTask({
- id: 'task-2',
- title: 'Remote Version',
- updatedAt: new Date(now - 2000).toISOString(), // 2 seconds older
- });
-
- const conflict: ConflictInfo = {
- taskId: 'task-2',
- local: localTask,
- remote: remoteTask,
- localClock: createMockVectorClock({ 'device-1': 2 }),
- remoteClock: createMockVectorClock({ 'device-2': 1 }),
- };
-
- const resolved = await autoResolveConflicts([conflict]);
-
- expect(resolved).toBe(1);
-
- // Verify local version was saved
- const savedTask = await db.tasks.get('task-2');
- expect(savedTask).toBeDefined();
- expect(savedTask?.title).toBe('Local Version');
- });
-
- it('should merge vector clocks when resolving conflict', async () => {
- const now = Date.now();
- const localTask = createMockTask({
- id: 'task-3',
- updatedAt: new Date(now).toISOString(),
- });
- const remoteTask = createMockTask({
- id: 'task-3',
- updatedAt: new Date(now - 1000).toISOString(),
- });
-
- const localClock = { 'device-1': 5, 'device-2': 2 };
- const remoteClock = { 'device-2': 3, 'device-3': 1 };
-
- const conflict: ConflictInfo = {
- taskId: 'task-3',
- local: localTask,
- remote: remoteTask,
- localClock,
- remoteClock,
- };
-
- await autoResolveConflicts([conflict]);
-
- const savedTask = await db.tasks.get('task-3');
- expect(savedTask?.vectorClock).toEqual({
- 'device-1': 5,
- 'device-2': 3, // max of 2 and 3
- 'device-3': 1,
- });
- });
-
- it('should resolve multiple conflicts in batch', async () => {
- const now = Date.now();
- const conflicts: ConflictInfo[] = [
- {
- taskId: 'task-a',
- local: createMockTask({ id: 'task-a', title: 'Local A', updatedAt: new Date(now).toISOString() }),
- remote: createMockTask({ id: 'task-a', title: 'Remote A', updatedAt: new Date(now - 1000).toISOString() }),
- localClock: createMockVectorClock({ 'device-1': 1 }),
- remoteClock: createMockVectorClock({ 'device-2': 1 }),
- },
- {
- taskId: 'task-b',
- local: createMockTask({ id: 'task-b', title: 'Local B', updatedAt: new Date(now - 2000).toISOString() }),
- remote: createMockTask({ id: 'task-b', title: 'Remote B', updatedAt: new Date(now).toISOString() }),
- localClock: createMockVectorClock({ 'device-1': 1 }),
- remoteClock: createMockVectorClock({ 'device-2': 1 }),
- },
- {
- taskId: 'task-c',
- local: createMockTask({ id: 'task-c', title: 'Local C', updatedAt: new Date(now).toISOString() }),
- remote: createMockTask({ id: 'task-c', title: 'Remote C', updatedAt: new Date(now - 500).toISOString() }),
- localClock: createMockVectorClock({ 'device-1': 1 }),
- remoteClock: createMockVectorClock({ 'device-2': 1 }),
- },
- ];
-
- const resolved = await autoResolveConflicts(conflicts);
-
- expect(resolved).toBe(3);
-
- // Verify each resolution
- const taskA = await db.tasks.get('task-a');
- expect(taskA?.title).toBe('Local A'); // local was newer
-
- const taskB = await db.tasks.get('task-b');
- expect(taskB?.title).toBe('Remote B'); // remote was newer
-
- const taskC = await db.tasks.get('task-c');
- expect(taskC?.title).toBe('Local C'); // local was newer
- });
-
- it('should handle conflicts with identical timestamps by choosing remote', async () => {
- const now = Date.now();
- const timestamp = new Date(now).toISOString();
-
- const localTask = createMockTask({
- id: 'task-4',
- title: 'Local Version',
- updatedAt: timestamp,
- });
- const remoteTask = createMockTask({
- id: 'task-4',
- title: 'Remote Version',
- updatedAt: timestamp,
- });
-
- const conflict: ConflictInfo = {
- taskId: 'task-4',
- local: localTask,
- remote: remoteTask,
- localClock: createMockVectorClock({ 'device-1': 1 }),
- remoteClock: createMockVectorClock({ 'device-2': 1 }),
- };
-
- await autoResolveConflicts([conflict]);
-
- const savedTask = await db.tasks.get('task-4');
- // When timestamps are equal, remote wins (remoteTime > localTime is false, so local wins)
- // Actually, when equal, neither is greater, so local wins
- expect(savedTask?.title).toBe('Local Version');
- });
- });
-
- describe('conflict detection logic', () => {
- it('should skip conflicts with missing local data', async () => {
- const conflict: ConflictInfo = {
- taskId: 'task-5',
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- local: null as any, // missing local
- remote: createMockTask({ id: 'task-5' }),
- localClock: createMockVectorClock(),
- remoteClock: createMockVectorClock(),
- };
-
- const resolved = await autoResolveConflicts([conflict]);
-
- expect(resolved).toBe(0);
-
- // Verify nothing was saved
- const savedTask = await db.tasks.get('task-5');
- expect(savedTask).toBeUndefined();
- });
-
- it('should skip conflicts with missing remote data', async () => {
- const conflict: ConflictInfo = {
- taskId: 'task-6',
- local: createMockTask({ id: 'task-6' }),
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- remote: null as any, // missing remote
- localClock: createMockVectorClock(),
- remoteClock: createMockVectorClock(),
- };
-
- const resolved = await autoResolveConflicts([conflict]);
-
- expect(resolved).toBe(0);
-
- // Verify nothing was saved
- const savedTask = await db.tasks.get('task-6');
- expect(savedTask).toBeUndefined();
- });
-
- it('should handle database errors gracefully', async () => {
- const now = Date.now();
- const conflict: ConflictInfo = {
- taskId: 'task-7',
- local: createMockTask({ id: 'task-7', updatedAt: new Date(now).toISOString() }),
- remote: createMockTask({ id: 'task-7', updatedAt: new Date(now - 1000).toISOString() }),
- localClock: createMockVectorClock(),
- remoteClock: createMockVectorClock(),
- };
-
- // Mock database error
- const originalPut = db.tasks.put;
- db.tasks.put = vi.fn().mockRejectedValue(new Error('Database error'));
-
- const resolved = await autoResolveConflicts([conflict]);
-
- expect(resolved).toBe(0);
-
- // Restore original method
- db.tasks.put = originalPut;
- });
-
- it('should continue resolving after individual failure', async () => {
- const now = Date.now();
- const conflicts: ConflictInfo[] = [
- {
- taskId: 'task-8',
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- local: null as any, // will fail
- remote: createMockTask({ id: 'task-8' }),
- localClock: createMockVectorClock(),
- remoteClock: createMockVectorClock(),
- },
- {
- taskId: 'task-9',
- local: createMockTask({ id: 'task-9', updatedAt: new Date(now).toISOString() }),
- remote: createMockTask({ id: 'task-9', updatedAt: new Date(now - 1000).toISOString() }),
- localClock: createMockVectorClock(),
- remoteClock: createMockVectorClock(),
- },
- ];
-
- const resolved = await autoResolveConflicts(conflicts);
-
- expect(resolved).toBe(1); // Only second conflict resolved
-
- const task9 = await db.tasks.get('task-9');
- expect(task9).toBeDefined();
- });
- });
-
- describe('vector clock comparison', () => {
- it('should detect concurrent modifications (conflict)', () => {
- const clockA = { 'device-1': 2, 'device-2': 1 };
- const clockB = { 'device-1': 1, 'device-2': 2 };
-
- const result = compareVectorClocks(clockA, clockB);
-
- expect(result).toBe('concurrent');
- });
-
- it('should detect when A happened before B', () => {
- const clockA = { 'device-1': 1, 'device-2': 1 };
- const clockB = { 'device-1': 2, 'device-2': 2 };
-
- const result = compareVectorClocks(clockA, clockB);
-
- expect(result).toBe('b_before_a'); // B is greater, so A happened before B
- });
-
- it('should detect when B happened before A', () => {
- const clockA = { 'device-1': 3, 'device-2': 2 };
- const clockB = { 'device-1': 1, 'device-2': 1 };
-
- const result = compareVectorClocks(clockA, clockB);
-
- expect(result).toBe('a_before_b'); // A is greater, so B happened before A
- });
-
- it('should detect identical clocks', () => {
- const clockA = { 'device-1': 2, 'device-2': 3 };
- const clockB = { 'device-1': 2, 'device-2': 3 };
-
- const result = compareVectorClocks(clockA, clockB);
-
- expect(result).toBe('identical');
- });
-
- it('should handle clocks with different devices', () => {
- const clockA = { 'device-1': 2 };
- const clockB = { 'device-2': 2 };
-
- const result = compareVectorClocks(clockA, clockB);
-
- expect(result).toBe('concurrent');
- });
-
- it('should handle empty clocks', () => {
- const clockA = {};
- const clockB = {};
-
- const result = compareVectorClocks(clockA, clockB);
-
- expect(result).toBe('identical');
- });
- });
-
- describe('vector clock merging', () => {
- it('should merge clocks by taking maximum for each device', () => {
- const clockA = { 'device-1': 5, 'device-2': 2, 'device-3': 1 };
- const clockB = { 'device-1': 3, 'device-2': 4, 'device-4': 2 };
-
- const merged = mergeVectorClocks(clockA, clockB);
-
- expect(merged).toEqual({
- 'device-1': 5, // max(5, 3)
- 'device-2': 4, // max(2, 4)
- 'device-3': 1, // only in A
- 'device-4': 2, // only in B
- });
- });
-
- it('should handle merging with empty clock', () => {
- const clockA = { 'device-1': 3, 'device-2': 2 };
- const clockB = {};
-
- const merged = mergeVectorClocks(clockA, clockB);
-
- expect(merged).toEqual(clockA);
- });
-
- it('should handle merging empty clock with non-empty', () => {
- const clockA = {};
- const clockB = { 'device-1': 3, 'device-2': 2 };
-
- const merged = mergeVectorClocks(clockA, clockB);
-
- expect(merged).toEqual(clockB);
- });
-
- it('should not mutate original clocks', () => {
- const clockA = { 'device-1': 2 };
- const clockB = { 'device-2': 3 };
-
- const originalA = { ...clockA };
- const originalB = { ...clockB };
-
- mergeVectorClocks(clockA, clockB);
-
- expect(clockA).toEqual(originalA);
- expect(clockB).toEqual(originalB);
- });
- });
-
- describe('conflict resolution with task data', () => {
- it('should preserve all task fields from winner', async () => {
- const now = Date.now();
- const remoteTask = createMockTask({
- id: 'task-10',
- title: 'Remote Title',
- description: 'Remote Description',
- urgent: true,
- important: false,
- completed: true,
- tags: ['remote', 'tag'],
- updatedAt: new Date(now).toISOString(),
- });
- const localTask = createMockTask({
- id: 'task-10',
- title: 'Local Title',
- description: 'Local Description',
- urgent: false,
- important: true,
- completed: false,
- tags: ['local'],
- updatedAt: new Date(now - 1000).toISOString(),
- });
-
- const conflict: ConflictInfo = {
- taskId: 'task-10',
- local: localTask,
- remote: remoteTask,
- localClock: createMockVectorClock(),
- remoteClock: createMockVectorClock(),
- };
-
- await autoResolveConflicts([conflict]);
-
- const savedTask = await db.tasks.get('task-10');
- expect(savedTask?.title).toBe('Remote Title');
- expect(savedTask?.description).toBe('Remote Description');
- expect(savedTask?.urgent).toBe(true);
- expect(savedTask?.important).toBe(false);
- expect(savedTask?.completed).toBe(true);
- expect(savedTask?.tags).toEqual(['remote', 'tag']);
- });
-
- it('should handle conflicts with subtasks', async () => {
- const now = Date.now();
- const remoteTask = createMockTask({
- id: 'task-11',
- subtasks: [
- { id: 'sub-1', title: 'Remote Subtask', completed: false },
- ],
- updatedAt: new Date(now).toISOString(),
- });
- const localTask = createMockTask({
- id: 'task-11',
- subtasks: [
- { id: 'sub-2', title: 'Local Subtask', completed: true },
- ],
- updatedAt: new Date(now - 1000).toISOString(),
- });
-
- const conflict: ConflictInfo = {
- taskId: 'task-11',
- local: localTask,
- remote: remoteTask,
- localClock: createMockVectorClock(),
- remoteClock: createMockVectorClock(),
- };
-
- await autoResolveConflicts([conflict]);
-
- const savedTask = await db.tasks.get('task-11');
- expect(savedTask?.subtasks).toHaveLength(1);
- expect(savedTask?.subtasks[0].title).toBe('Remote Subtask');
- });
- });
-});
diff --git a/tests/sync/token-manager.test.ts b/tests/sync/token-manager.test.ts
deleted file mode 100644
index 6bbac88a..00000000
--- a/tests/sync/token-manager.test.ts
+++ /dev/null
@@ -1,698 +0,0 @@
-/**
- * Tests for TokenManager - automatic token refresh functionality
- */
-
-import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
-import { getDb } from '@/lib/db';
-import { TokenManager, getTokenManager } from '@/lib/sync/token-manager';
-import { getApiClient } from '@/lib/sync/api-client';
-import type { SyncConfig } from '@/lib/sync/types';
-
-// Mock the API client
-vi.mock('@/lib/sync/api-client', () => {
- const mockApiClient = {
- setToken: vi.fn(),
- refreshToken: vi.fn(),
- };
-
- return {
- getApiClient: vi.fn(() => mockApiClient),
- };
-});
-
-describe('TokenManager', () => {
- let tokenManager: TokenManager;
- let db: ReturnType;
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- let mockApi: any;
-
- beforeEach(async () => {
- tokenManager = getTokenManager();
- db = getDb();
- mockApi = getApiClient();
-
- // Clear database
- await db.syncMetadata.clear();
- await db.tasks.clear();
-
- // Reset mocks
- vi.clearAllMocks();
- });
-
- afterEach(async () => {
- await db.syncMetadata.clear();
- await db.tasks.clear();
- });
-
- describe('needsRefresh()', () => {
- it('should return false when sync is not configured', async () => {
- const needsRefresh = await tokenManager.needsRefresh();
- expect(needsRefresh).toBe(false);
- });
-
- it('should return false when sync is disabled', async () => {
- await db.syncMetadata.put({
- key: 'sync_config',
- enabled: false,
- userId: 'user1',
- deviceId: 'device1',
- deviceName: 'Test Device',
- email: 'test@example.com',
- token: 'test-token',
- tokenExpiresAt: Date.now() + 10 * 60 * 1000, // 10 minutes from now
- lastSyncAt: null,
- vectorClock: {},
- conflictStrategy: 'last_write_wins',
- serverUrl: 'http://localhost:8787',
- consecutiveFailures: 0,
- lastFailureAt: null,
- lastFailureReason: null,
- nextRetryAt: null,
- });
-
- const needsRefresh = await tokenManager.needsRefresh();
- expect(needsRefresh).toBe(false);
- });
-
- it('should return false when token expires in more than 5 minutes', async () => {
- await db.syncMetadata.put({
- key: 'sync_config',
- enabled: true,
- userId: 'user1',
- deviceId: 'device1',
- deviceName: 'Test Device',
- email: 'test@example.com',
- token: 'test-token',
- tokenExpiresAt: Date.now() + 10 * 60 * 1000, // 10 minutes from now
- lastSyncAt: null,
- vectorClock: {},
- conflictStrategy: 'last_write_wins',
- serverUrl: 'http://localhost:8787',
- consecutiveFailures: 0,
- lastFailureAt: null,
- lastFailureReason: null,
- nextRetryAt: null,
- });
-
- const needsRefresh = await tokenManager.needsRefresh();
- expect(needsRefresh).toBe(false);
- });
-
- it('should return true when token expires within 5 minutes', async () => {
- await db.syncMetadata.put({
- key: 'sync_config',
- enabled: true,
- userId: 'user1',
- deviceId: 'device1',
- deviceName: 'Test Device',
- email: 'test@example.com',
- token: 'test-token',
- tokenExpiresAt: Date.now() + 4 * 60 * 1000, // 4 minutes from now
- lastSyncAt: null,
- vectorClock: {},
- conflictStrategy: 'last_write_wins',
- serverUrl: 'http://localhost:8787',
- consecutiveFailures: 0,
- lastFailureAt: null,
- lastFailureReason: null,
- nextRetryAt: null,
- });
-
- const needsRefresh = await tokenManager.needsRefresh();
- expect(needsRefresh).toBe(true);
- });
-
- it('should return true when token is already expired', async () => {
- await db.syncMetadata.put({
- key: 'sync_config',
- enabled: true,
- userId: 'user1',
- deviceId: 'device1',
- deviceName: 'Test Device',
- email: 'test@example.com',
- token: 'test-token',
- tokenExpiresAt: Date.now() - 1000, // Expired 1 second ago
- lastSyncAt: null,
- vectorClock: {},
- conflictStrategy: 'last_write_wins',
- serverUrl: 'http://localhost:8787',
- consecutiveFailures: 0,
- lastFailureAt: null,
- lastFailureReason: null,
- nextRetryAt: null,
- });
-
- const needsRefresh = await tokenManager.needsRefresh();
- expect(needsRefresh).toBe(true);
- });
- });
-
- describe('ensureValidToken()', () => {
- it('should throw error when sync is not configured', async () => {
- await expect(tokenManager.ensureValidToken()).rejects.toThrow('Sync not configured');
- });
-
- it('should throw error when no token is available', async () => {
- await db.syncMetadata.put({
- key: 'sync_config',
- enabled: true,
- userId: 'user1',
- deviceId: 'device1',
- deviceName: 'Test Device',
- email: 'test@example.com',
- token: null,
- tokenExpiresAt: null,
- lastSyncAt: null,
- vectorClock: {},
- conflictStrategy: 'last_write_wins',
- serverUrl: 'http://localhost:8787',
- consecutiveFailures: 0,
- lastFailureAt: null,
- lastFailureReason: null,
- nextRetryAt: null,
- });
-
- await expect(tokenManager.ensureValidToken()).rejects.toThrow('No authentication token available');
- });
-
- it('should return true without refresh when token is valid', async () => {
- await db.syncMetadata.put({
- key: 'sync_config',
- enabled: true,
- userId: 'user1',
- deviceId: 'device1',
- deviceName: 'Test Device',
- email: 'test@example.com',
- token: 'valid-token',
- tokenExpiresAt: Date.now() + 10 * 60 * 1000, // 10 minutes from now
- lastSyncAt: null,
- vectorClock: {},
- conflictStrategy: 'last_write_wins',
- serverUrl: 'http://localhost:8787',
- consecutiveFailures: 0,
- lastFailureAt: null,
- lastFailureReason: null,
- nextRetryAt: null,
- });
-
- const result = await tokenManager.ensureValidToken();
-
- expect(result).toBe(true);
- expect(mockApi.refreshToken).not.toHaveBeenCalled();
- });
-
- it('should refresh token when it expires within 5 minutes', async () => {
- const newToken = 'refreshed-token';
- const newExpiresAt = Date.now() + 60 * 60 * 1000; // 1 hour from now
-
- await db.syncMetadata.put({
- key: 'sync_config',
- enabled: true,
- userId: 'user1',
- deviceId: 'device1',
- deviceName: 'Test Device',
- email: 'test@example.com',
- token: 'old-token',
- tokenExpiresAt: Date.now() + 4 * 60 * 1000, // 4 minutes from now
- lastSyncAt: null,
- vectorClock: {},
- conflictStrategy: 'last_write_wins',
- serverUrl: 'http://localhost:8787',
- consecutiveFailures: 0,
- lastFailureAt: null,
- lastFailureReason: null,
- nextRetryAt: null,
- });
-
- mockApi.refreshToken.mockResolvedValue({
- token: newToken,
- expiresAt: newExpiresAt,
- });
-
- const result = await tokenManager.ensureValidToken();
-
- expect(result).toBe(true);
- expect(mockApi.setToken).toHaveBeenCalledWith('old-token');
- expect(mockApi.refreshToken).toHaveBeenCalled();
- expect(mockApi.setToken).toHaveBeenCalledWith(newToken);
-
- // Verify token was updated in database
- const config = await db.syncMetadata.get('sync_config') as SyncConfig;
- expect(config.token).toBe(newToken);
- expect(config.tokenExpiresAt).toBe(newExpiresAt);
- });
-
- it('should return false when token refresh fails', async () => {
- await db.syncMetadata.put({
- key: 'sync_config',
- enabled: true,
- userId: 'user1',
- deviceId: 'device1',
- deviceName: 'Test Device',
- email: 'test@example.com',
- token: 'old-token',
- tokenExpiresAt: Date.now() + 2 * 60 * 1000, // 2 minutes from now
- lastSyncAt: null,
- vectorClock: {},
- conflictStrategy: 'last_write_wins',
- serverUrl: 'http://localhost:8787',
- consecutiveFailures: 0,
- lastFailureAt: null,
- lastFailureReason: null,
- nextRetryAt: null,
- });
-
- mockApi.refreshToken.mockRejectedValue(new Error('Refresh failed'));
-
- const result = await tokenManager.ensureValidToken();
-
- expect(result).toBe(false);
- expect(mockApi.refreshToken).toHaveBeenCalled();
- });
- });
-
- describe('handleUnauthorized()', () => {
- it('should return false when sync is not configured', async () => {
- const result = await tokenManager.handleUnauthorized();
- expect(result).toBe(false);
- });
-
- it('should attempt token refresh on 401 error', async () => {
- const newToken = 'refreshed-token-after-401';
- const newExpiresAt = Date.now() + 60 * 60 * 1000;
-
- await db.syncMetadata.put({
- key: 'sync_config',
- enabled: true,
- userId: 'user1',
- deviceId: 'device1',
- deviceName: 'Test Device',
- email: 'test@example.com',
- token: 'expired-token',
- tokenExpiresAt: Date.now() - 1000, // Already expired
- lastSyncAt: null,
- vectorClock: {},
- conflictStrategy: 'last_write_wins',
- serverUrl: 'http://localhost:8787',
- consecutiveFailures: 0,
- lastFailureAt: null,
- lastFailureReason: null,
- nextRetryAt: null,
- });
-
- mockApi.refreshToken.mockResolvedValue({
- token: newToken,
- expiresAt: newExpiresAt,
- });
-
- const result = await tokenManager.handleUnauthorized();
-
- expect(result).toBe(true);
- expect(mockApi.refreshToken).toHaveBeenCalled();
-
- // Verify token was updated
- const config = await db.syncMetadata.get('sync_config') as SyncConfig;
- expect(config.token).toBe(newToken);
- expect(config.tokenExpiresAt).toBe(newExpiresAt);
- });
-
- it('should return false when refresh fails after 401', async () => {
- await db.syncMetadata.put({
- key: 'sync_config',
- enabled: true,
- userId: 'user1',
- deviceId: 'device1',
- deviceName: 'Test Device',
- email: 'test@example.com',
- token: 'expired-token',
- tokenExpiresAt: Date.now() - 1000,
- lastSyncAt: null,
- vectorClock: {},
- conflictStrategy: 'last_write_wins',
- serverUrl: 'http://localhost:8787',
- consecutiveFailures: 0,
- lastFailureAt: null,
- lastFailureReason: null,
- nextRetryAt: null,
- });
-
- mockApi.refreshToken.mockRejectedValue(new Error('Token refresh failed'));
-
- const result = await tokenManager.handleUnauthorized();
-
- expect(result).toBe(false);
- });
- });
-
- describe('getTimeUntilExpiry()', () => {
- it('should return -1 when no token expiration is set', async () => {
- await db.syncMetadata.put({
- key: 'sync_config',
- enabled: true,
- userId: 'user1',
- deviceId: 'device1',
- deviceName: 'Test Device',
- email: 'test@example.com',
- token: 'test-token',
- tokenExpiresAt: null,
- lastSyncAt: null,
- vectorClock: {},
- conflictStrategy: 'last_write_wins',
- serverUrl: 'http://localhost:8787',
- consecutiveFailures: 0,
- lastFailureAt: null,
- lastFailureReason: null,
- nextRetryAt: null,
- });
-
- const timeUntilExpiry = await tokenManager.getTimeUntilExpiry();
- expect(timeUntilExpiry).toBe(-1);
- });
-
- it('should return positive value when token has not expired', async () => {
- const expiresAt = Date.now() + 10 * 60 * 1000; // 10 minutes from now
-
- await db.syncMetadata.put({
- key: 'sync_config',
- enabled: true,
- userId: 'user1',
- deviceId: 'device1',
- deviceName: 'Test Device',
- email: 'test@example.com',
- token: 'test-token',
- tokenExpiresAt: expiresAt,
- lastSyncAt: null,
- vectorClock: {},
- conflictStrategy: 'last_write_wins',
- serverUrl: 'http://localhost:8787',
- consecutiveFailures: 0,
- lastFailureAt: null,
- lastFailureReason: null,
- nextRetryAt: null,
- });
-
- const timeUntilExpiry = await tokenManager.getTimeUntilExpiry();
-
- // Should be approximately 10 minutes (allow 1 second tolerance)
- expect(timeUntilExpiry).toBeGreaterThan(9 * 60 * 1000);
- expect(timeUntilExpiry).toBeLessThanOrEqual(10 * 60 * 1000);
- });
-
- it('should return negative value when token has expired', async () => {
- const expiresAt = Date.now() - 5 * 60 * 1000; // Expired 5 minutes ago
-
- await db.syncMetadata.put({
- key: 'sync_config',
- enabled: true,
- userId: 'user1',
- deviceId: 'device1',
- deviceName: 'Test Device',
- email: 'test@example.com',
- token: 'test-token',
- tokenExpiresAt: expiresAt,
- lastSyncAt: null,
- vectorClock: {},
- conflictStrategy: 'last_write_wins',
- serverUrl: 'http://localhost:8787',
- consecutiveFailures: 0,
- lastFailureAt: null,
- lastFailureReason: null,
- nextRetryAt: null,
- });
-
- const timeUntilExpiry = await tokenManager.getTimeUntilExpiry();
-
- expect(timeUntilExpiry).toBeLessThan(0);
- });
- });
-
- describe('Integration: Token Lifecycle', () => {
- it('should handle complete token refresh cycle', async () => {
- // Initial setup with token expiring soon
- const initialToken = 'initial-token';
- const initialExpiresAt = Date.now() + 3 * 60 * 1000; // 3 minutes
-
- await db.syncMetadata.put({
- key: 'sync_config',
- enabled: true,
- userId: 'user1',
- deviceId: 'device1',
- deviceName: 'Test Device',
- email: 'test@example.com',
- token: initialToken,
- tokenExpiresAt: initialExpiresAt,
- lastSyncAt: null,
- vectorClock: {},
- conflictStrategy: 'last_write_wins',
- serverUrl: 'http://localhost:8787',
- consecutiveFailures: 0,
- lastFailureAt: null,
- lastFailureReason: null,
- nextRetryAt: null,
- });
-
- // Check if refresh is needed
- const needsRefresh = await tokenManager.needsRefresh();
- expect(needsRefresh).toBe(true);
-
- // Perform refresh
- const newToken = 'refreshed-token';
- const newExpiresAt = Date.now() + 60 * 60 * 1000; // 1 hour
-
- mockApi.refreshToken.mockResolvedValue({
- token: newToken,
- expiresAt: newExpiresAt,
- });
-
- const refreshed = await tokenManager.ensureValidToken();
- expect(refreshed).toBe(true);
-
- // Verify token was updated
- const config = await db.syncMetadata.get('sync_config') as SyncConfig;
- expect(config.token).toBe(newToken);
- expect(config.tokenExpiresAt).toBe(newExpiresAt);
-
- // Verify no longer needs refresh
- const stillNeedsRefresh = await tokenManager.needsRefresh();
- expect(stillNeedsRefresh).toBe(false);
- });
- });
-
- describe('Issue #2: Token Expiration Normalization Integration', () => {
- it('should normalize token expiration from seconds when refreshing token', async () => {
- await db.syncMetadata.put({
- key: 'sync_config',
- enabled: true,
- userId: 'user1',
- deviceId: 'device1',
- deviceName: 'Test Device',
- email: 'test@example.com',
- token: 'old-token',
- tokenExpiresAt: Date.now() + 2 * 60 * 1000, // 2 minutes (needs refresh)
- lastSyncAt: null,
- vectorClock: {},
- conflictStrategy: 'last_write_wins',
- serverUrl: 'http://localhost:8787',
- consecutiveFailures: 0,
- lastFailureAt: null,
- lastFailureReason: null,
- nextRetryAt: null,
- });
-
- // Mock refreshToken to return expiresAt in seconds (typical JWT format)
- const expiresAtSeconds = 1735689600; // Jan 1, 2025 00:00:00 UTC in seconds
- mockApi.refreshToken.mockResolvedValue({
- token: 'new-token',
- expiresAt: expiresAtSeconds,
- });
-
- const result = await tokenManager.ensureValidToken();
- expect(result).toBe(true);
-
- // Verify stored value was normalized to milliseconds
- const config = await db.syncMetadata.get('sync_config') as SyncConfig;
- expect(config.tokenExpiresAt).toBe(expiresAtSeconds * 1000);
- expect(config.tokenExpiresAt).toBe(1735689600000);
- });
-
- it('should handle token expiration already in milliseconds when refreshing', async () => {
- await db.syncMetadata.put({
- key: 'sync_config',
- enabled: true,
- userId: 'user1',
- deviceId: 'device1',
- deviceName: 'Test Device',
- email: 'test@example.com',
- token: 'old-token',
- tokenExpiresAt: Date.now() + 2 * 60 * 1000, // 2 minutes (needs refresh)
- lastSyncAt: null,
- vectorClock: {},
- conflictStrategy: 'last_write_wins',
- serverUrl: 'http://localhost:8787',
- consecutiveFailures: 0,
- lastFailureAt: null,
- lastFailureReason: null,
- nextRetryAt: null,
- });
-
- // Mock refreshToken to return expiresAt already in milliseconds
- const expiresAtMs = 1735689600000; // Jan 1, 2025 00:00:00 UTC in milliseconds
- mockApi.refreshToken.mockResolvedValue({
- token: 'new-token',
- expiresAt: expiresAtMs,
- });
-
- const result = await tokenManager.ensureValidToken();
- expect(result).toBe(true);
-
- // Verify stored value remained unchanged (already in milliseconds)
- const config = await db.syncMetadata.get('sync_config') as SyncConfig;
- expect(config.tokenExpiresAt).toBe(expiresAtMs);
- expect(config.tokenExpiresAt).toBe(1735689600000);
- });
-
- it('should normalize token expiration on 401 error recovery', async () => {
- await db.syncMetadata.put({
- key: 'sync_config',
- enabled: true,
- userId: 'user1',
- deviceId: 'device1',
- deviceName: 'Test Device',
- email: 'test@example.com',
- token: 'expired-token',
- tokenExpiresAt: Date.now() - 1000, // Already expired
- lastSyncAt: null,
- vectorClock: {},
- conflictStrategy: 'last_write_wins',
- serverUrl: 'http://localhost:8787',
- consecutiveFailures: 0,
- lastFailureAt: null,
- lastFailureReason: null,
- nextRetryAt: null,
- });
-
- // Mock refresh with token in seconds
- const expiresAtSeconds = 1735689600;
- mockApi.refreshToken.mockResolvedValue({
- token: 'refreshed-token-after-401',
- expiresAt: expiresAtSeconds,
- });
-
- const result = await tokenManager.handleUnauthorized();
- expect(result).toBe(true);
-
- // Verify normalization occurred
- const config = await db.syncMetadata.get('sync_config') as SyncConfig;
- expect(config.tokenExpiresAt).toBe(expiresAtSeconds * 1000);
- });
-
- it('should handle threshold boundary value (seconds) correctly', async () => {
- await db.syncMetadata.put({
- key: 'sync_config',
- enabled: true,
- userId: 'user1',
- deviceId: 'device1',
- deviceName: 'Test Device',
- email: 'test@example.com',
- token: 'old-token',
- tokenExpiresAt: Date.now() + 1 * 60 * 1000,
- lastSyncAt: null,
- vectorClock: {},
- conflictStrategy: 'last_write_wins',
- serverUrl: 'http://localhost:8787',
- consecutiveFailures: 0,
- lastFailureAt: null,
- lastFailureReason: null,
- nextRetryAt: null,
- });
-
- // Just below 10 billion threshold (should be treated as seconds)
- const expiresAtSeconds = 9_999_999_999;
- mockApi.refreshToken.mockResolvedValue({
- token: 'new-token',
- expiresAt: expiresAtSeconds,
- });
-
- await tokenManager.ensureValidToken();
-
- const config = await db.syncMetadata.get('sync_config') as SyncConfig;
- // Should be multiplied by 1000
- expect(config.tokenExpiresAt).toBe(expiresAtSeconds * 1000);
- });
-
- it('should handle threshold boundary value (milliseconds) correctly', async () => {
- await db.syncMetadata.put({
- key: 'sync_config',
- enabled: true,
- userId: 'user1',
- deviceId: 'device1',
- deviceName: 'Test Device',
- email: 'test@example.com',
- token: 'old-token',
- tokenExpiresAt: Date.now() + 1 * 60 * 1000,
- lastSyncAt: null,
- vectorClock: {},
- conflictStrategy: 'last_write_wins',
- serverUrl: 'http://localhost:8787',
- consecutiveFailures: 0,
- lastFailureAt: null,
- lastFailureReason: null,
- nextRetryAt: null,
- });
-
- // At threshold (should be treated as milliseconds)
- const expiresAtMs = 10_000_000_000;
- mockApi.refreshToken.mockResolvedValue({
- token: 'new-token',
- expiresAt: expiresAtMs,
- });
-
- await tokenManager.ensureValidToken();
-
- const config = await db.syncMetadata.get('sync_config') as SyncConfig;
- // Should NOT be multiplied
- expect(config.tokenExpiresAt).toBe(expiresAtMs);
- });
-
- it('should handle realistic JWT token expiration (1 hour from now in seconds)', async () => {
- await db.syncMetadata.put({
- key: 'sync_config',
- enabled: true,
- userId: 'user1',
- deviceId: 'device1',
- deviceName: 'Test Device',
- email: 'test@example.com',
- token: 'old-token',
- tokenExpiresAt: Date.now() + 1 * 60 * 1000,
- lastSyncAt: null,
- vectorClock: {},
- conflictStrategy: 'last_write_wins',
- serverUrl: 'http://localhost:8787',
- consecutiveFailures: 0,
- lastFailureAt: null,
- lastFailureReason: null,
- nextRetryAt: null,
- });
-
- // Typical JWT: current time + 1 hour (in seconds)
- const nowSeconds = Math.floor(Date.now() / 1000);
- const oneHourLaterSeconds = nowSeconds + 3600;
-
- mockApi.refreshToken.mockResolvedValue({
- token: 'new-jwt-token',
- expiresAt: oneHourLaterSeconds,
- });
-
- await tokenManager.ensureValidToken();
-
- const config = await db.syncMetadata.get('sync_config') as SyncConfig;
- // Should be normalized to milliseconds
- expect(config.tokenExpiresAt).toBe(oneHourLaterSeconds * 1000);
-
- // Verify it's approximately 1 hour from now
- const timeUntilExpiry = config.tokenExpiresAt - Date.now();
- expect(timeUntilExpiry).toBeGreaterThan(55 * 60 * 1000); // At least 55 minutes
- expect(timeUntilExpiry).toBeLessThanOrEqual(60 * 60 * 1000); // At most 60 minutes
- });
- });
-});
diff --git a/tests/ui/encryption-passphrase-dialog.test.tsx b/tests/ui/encryption-passphrase-dialog.test.tsx
index afdf25d7..ddb08e5b 100644
--- a/tests/ui/encryption-passphrase-dialog.test.tsx
+++ b/tests/ui/encryption-passphrase-dialog.test.tsx
@@ -10,18 +10,20 @@ import userEvent from '@testing-library/user-event';
// Use vi.hoisted to ensure these are available to vi.mock
const {
mockQueueExistingTasks,
- mockRequestSync,
+ mockSync,
mockGenerateEncryptionSalt,
mockStoreEncryptionConfig,
mockInitializeEncryptionFromPassphrase,
+ mockSetEncryptionSalt,
mockToastSuccess,
mockToastError,
} = vi.hoisted(() => ({
mockQueueExistingTasks: vi.fn(),
- mockRequestSync: vi.fn(),
+ mockSync: vi.fn(),
mockGenerateEncryptionSalt: vi.fn(),
mockStoreEncryptionConfig: vi.fn(),
mockInitializeEncryptionFromPassphrase: vi.fn(),
+ mockSetEncryptionSalt: vi.fn(),
mockToastSuccess: vi.fn(),
mockToastError: vi.fn(),
}));
@@ -37,12 +39,7 @@ vi.mock('sonner', () => ({
vi.mock('@/lib/sync/engine', () => ({
getSyncEngine: () => ({
queueExistingTasks: mockQueueExistingTasks,
- }),
-}));
-
-vi.mock('@/lib/sync/sync-coordinator', () => ({
- getSyncCoordinator: () => ({
- requestSync: mockRequestSync,
+ sync: mockSync,
}),
}));
@@ -52,12 +49,16 @@ vi.mock('@/lib/sync/crypto', () => ({
initializeEncryptionFromPassphrase: mockInitializeEncryptionFromPassphrase,
}));
+vi.mock('@/lib/sync/supabase-sync-client', () => ({
+ setEncryptionSalt: mockSetEncryptionSalt,
+}));
+
vi.mock('@/lib/db', () => ({
getDb: () => ({
syncMetadata: {
get: vi.fn().mockResolvedValue({
key: 'sync_config',
- token: 'test-token',
+ userId: 'user-123',
}),
},
}),
@@ -83,13 +84,8 @@ describe('EncryptionPassphraseDialog', () => {
mockStoreEncryptionConfig.mockResolvedValue(undefined);
mockInitializeEncryptionFromPassphrase.mockResolvedValue(true);
mockQueueExistingTasks.mockResolvedValue(0);
- mockRequestSync.mockResolvedValue(undefined);
-
- // Mock fetch for encryption salt upload
- global.fetch = vi.fn().mockResolvedValue({
- ok: true,
- json: async () => ({}),
- });
+ mockSync.mockResolvedValue(undefined);
+ mockSetEncryptionSalt.mockResolvedValue(undefined);
});
describe('Core Functionality', () => {
@@ -253,7 +249,7 @@ describe('EncryptionPassphraseDialog', () => {
});
describe('Issue #5: Timeout Cleanup - Memory Leak Prevention', () => {
- it('should trigger auto-sync after 1 second delay', async () => {
+ it('should trigger auto-sync after delay', async () => {
const user = userEvent.setup({ delay: null });
mockQueueExistingTasks.mockResolvedValue(3);
@@ -274,14 +270,14 @@ describe('EncryptionPassphraseDialog', () => {
});
// Auto-sync should not be called yet
- expect(mockRequestSync).not.toHaveBeenCalled();
+ expect(mockSync).not.toHaveBeenCalled();
- // Wait for the 1 second timeout to fire
+ // Wait for the timeout to fire
await new Promise(resolve => setTimeout(resolve, 1100));
// Now auto-sync should have been triggered
await waitFor(() => {
- expect(mockRequestSync).toHaveBeenCalledWith('auto');
+ expect(mockSync).toHaveBeenCalledWith('auto');
});
});
@@ -311,7 +307,7 @@ describe('EncryptionPassphraseDialog', () => {
await new Promise(resolve => setTimeout(resolve, 1100));
// Auto-sync should NOT be called because component was unmounted
- expect(mockRequestSync).not.toHaveBeenCalled();
+ expect(mockSync).not.toHaveBeenCalled();
});
it('should catch and log auto-sync errors without showing user toast', async () => {
@@ -319,7 +315,7 @@ describe('EncryptionPassphraseDialog', () => {
const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {});
mockQueueExistingTasks.mockResolvedValue(1);
- mockRequestSync.mockRejectedValue(new Error('Sync failed'));
+ mockSync.mockRejectedValue(new Error('Sync failed'));
render();
@@ -340,7 +336,7 @@ describe('EncryptionPassphraseDialog', () => {
await new Promise(resolve => setTimeout(resolve, 1100));
await waitFor(() => {
- expect(mockRequestSync).toHaveBeenCalled();
+ expect(mockSync).toHaveBeenCalled();
});
// Error should be logged but no user toast
diff --git a/tests/ui/oauth-buttons.test.tsx b/tests/ui/oauth-buttons.test.tsx
deleted file mode 100644
index 36f7d3c8..00000000
--- a/tests/ui/oauth-buttons.test.tsx
+++ /dev/null
@@ -1,591 +0,0 @@
-/**
- * Tests for OAuthButtons component
- * Tests OAuth provider button rendering, click handlers, loading states, and error handling
- */
-
-import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
-import { render, screen, waitFor } from '@testing-library/react';
-import userEvent from '@testing-library/user-event';
-import type { OAuthAuthData, OAuthHandshakeEvent } from '@/lib/sync/oauth-handshake';
-
-// Hoisted mocks
-const {
- mockFetch,
- mockSubscribeToOAuthHandshake,
- mockCanUsePopups,
- mockGetPlatformInfo,
- mockWindowOpen,
-} = vi.hoisted(() => ({
- mockFetch: vi.fn(),
- mockSubscribeToOAuthHandshake: vi.fn(),
- mockCanUsePopups: vi.fn(),
- mockGetPlatformInfo: vi.fn(),
- mockWindowOpen: vi.fn(),
-}));
-
-// Mock modules
-vi.mock('@/lib/sync/oauth-handshake', () => ({
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- subscribeToOAuthHandshake: (callback: any) => mockSubscribeToOAuthHandshake(callback),
-}));
-
-vi.mock('@/lib/pwa-detection', () => ({
- canUsePopups: () => mockCanUsePopups(),
- getPlatformInfo: () => mockGetPlatformInfo(),
-}));
-
-vi.mock('@/lib/oauth-config', () => ({
- OAUTH_STATE_CONFIG: {
- MAX_STATE_AGE_MS: 10 * 60 * 1000,
- MIN_STATE_LENGTH: 32,
- CLEANUP_INTERVAL_MS: 60 * 1000,
- },
- getOAuthEnvironment: () => 'local',
-}));
-
-vi.mock('@/lib/env-config', () => ({
- getEnvironmentConfig: () => ({
- apiBaseUrl: 'http://localhost:8787',
- oauthCallbackUrl: 'http://localhost:3000/auth/callback',
- isDevelopment: true,
- isProduction: false,
- isStaging: false,
- environment: 'development',
- }),
-}));
-
-// Import component after mocks
-import { OAuthButtons } from '@/components/sync/oauth-buttons';
-
-describe('OAuthButtons', () => {
- let oauthCallback: ((event: OAuthHandshakeEvent) => void) | null = null;
- let unsubscribeFn: ReturnType;
-
- beforeEach(() => {
- vi.clearAllMocks();
- oauthCallback = null;
-
- // Setup global fetch mock
- global.fetch = mockFetch;
-
- // Setup window.open mock
- mockWindowOpen.mockReturnValue({
- focus: vi.fn(),
- close: vi.fn(),
- });
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- global.window.open = mockWindowOpen as any;
-
- // Setup default mock implementations
- mockCanUsePopups.mockReturnValue(true);
- mockGetPlatformInfo.mockReturnValue({
- platform: 'desktop',
- standalone: false,
- mobile: false,
- canUsePopups: true,
- });
-
- // Capture OAuth callback
- unsubscribeFn = vi.fn();
- mockSubscribeToOAuthHandshake.mockImplementation((callback) => {
- oauthCallback = callback;
- return unsubscribeFn;
- });
-
- // Default successful fetch response
- mockFetch.mockResolvedValue({
- ok: true,
- json: async () => ({
- authUrl: 'https://accounts.google.com/oauth',
- state: 'test-state-token-12345678901234567890',
- }),
- });
- });
-
- afterEach(() => {
- vi.clearAllTimers();
- });
-
- describe('Button Rendering', () => {
- it('should render Google OAuth button', () => {
- render();
-
- const googleButton = screen.getByRole('button', { name: /continue with google/i });
- expect(googleButton).toBeInTheDocument();
- });
-
- it('should render Apple OAuth button', () => {
- render();
-
- const appleButton = screen.getByRole('button', { name: /continue with apple/i });
- expect(appleButton).toBeInTheDocument();
- });
-
- it('should render both buttons enabled by default', () => {
- render();
-
- const googleButton = screen.getByRole('button', { name: /continue with google/i });
- const appleButton = screen.getByRole('button', { name: /continue with apple/i });
-
- expect(googleButton).not.toBeDisabled();
- expect(appleButton).not.toBeDisabled();
- });
- });
-
- describe('Click Handlers', () => {
- it('should call onStart callback when Google button is clicked', async () => {
- const user = userEvent.setup();
- const onStart = vi.fn();
-
- render();
-
- const googleButton = screen.getByRole('button', { name: /continue with google/i });
- await user.click(googleButton);
-
- expect(onStart).toHaveBeenCalledWith('google');
- });
-
- it('should call onStart callback when Apple button is clicked', async () => {
- const user = userEvent.setup();
- const onStart = vi.fn();
-
- render();
-
- const appleButton = screen.getByRole('button', { name: /continue with apple/i });
- await user.click(appleButton);
-
- expect(onStart).toHaveBeenCalledWith('apple');
- });
-
- it('should fetch OAuth start endpoint for Google', async () => {
- const user = userEvent.setup();
-
- render();
-
- const googleButton = screen.getByRole('button', { name: /continue with google/i });
- await user.click(googleButton);
-
- await waitFor(() => {
- expect(mockFetch).toHaveBeenCalledWith(
- expect.stringContaining('/api/auth/oauth/google/start'),
- expect.objectContaining({
- method: 'GET',
- headers: { Accept: 'application/json' },
- credentials: 'include',
- })
- );
- });
- });
-
- it('should fetch OAuth start endpoint for Apple', async () => {
- const user = userEvent.setup();
-
- render();
-
- const appleButton = screen.getByRole('button', { name: /continue with apple/i });
- await user.click(appleButton);
-
- await waitFor(() => {
- expect(mockFetch).toHaveBeenCalledWith(
- expect.stringContaining('/api/auth/oauth/apple/start'),
- expect.objectContaining({
- method: 'GET',
- headers: { Accept: 'application/json' },
- credentials: 'include',
- })
- );
- });
- });
-
- it('should open popup window when popups are supported', async () => {
- const user = userEvent.setup();
- mockCanUsePopups.mockReturnValue(true);
-
- render();
-
- const googleButton = screen.getByRole('button', { name: /continue with google/i });
- await user.click(googleButton);
-
- await waitFor(() => {
- expect(mockWindowOpen).toHaveBeenCalledWith(
- 'https://accounts.google.com/oauth',
- 'google_oauth',
- expect.stringContaining('width=500')
- );
- });
- });
- });
-
- describe('Loading States', () => {
- it('should show loading text when Google OAuth is in progress', async () => {
- const user = userEvent.setup();
-
- render();
-
- const googleButton = screen.getByRole('button', { name: /continue with google/i });
- await user.click(googleButton);
-
- await waitFor(() => {
- expect(screen.getByText(/connecting\.\.\./i)).toBeInTheDocument();
- });
- });
-
- it('should show loading text when Apple OAuth is in progress', async () => {
- const user = userEvent.setup();
-
- render();
-
- const appleButton = screen.getByRole('button', { name: /continue with apple/i });
- await user.click(appleButton);
-
- await waitFor(() => {
- expect(screen.getByText(/connecting\.\.\./i)).toBeInTheDocument();
- });
- });
-
- it('should disable both buttons when one OAuth flow is in progress', async () => {
- const user = userEvent.setup();
-
- render();
-
- const googleButton = screen.getByRole('button', { name: /continue with google/i });
- await user.click(googleButton);
-
- await waitFor(() => {
- const buttons = screen.getAllByRole('button');
- buttons.forEach((button) => {
- expect(button).toBeDisabled();
- });
- });
- });
-
- it('should re-enable buttons after successful OAuth', async () => {
- const user = userEvent.setup();
-
- render();
-
- const googleButton = screen.getByRole('button', { name: /continue with google/i });
- await user.click(googleButton);
-
- // Wait for loading state
- await waitFor(() => {
- expect(screen.getByText(/connecting\.\.\./i)).toBeInTheDocument();
- });
-
- // Simulate successful OAuth
- const authData: OAuthAuthData = {
- userId: 'user123',
- deviceId: 'device123',
- email: 'test@example.com',
- token: 'oauth-token',
- expiresAt: Date.now() + 3600000,
- provider: 'google',
- };
-
- oauthCallback?.({
- status: 'success',
- authData,
- state: 'test-state-token-12345678901234567890',
- });
-
- await waitFor(() => {
- const buttons = screen.getAllByRole('button');
- buttons.forEach((button) => {
- expect(button).not.toBeDisabled();
- });
- });
- });
- });
-
- describe('Success Handling', () => {
- it('should call onSuccess callback with auth data', async () => {
- const user = userEvent.setup();
- const onSuccess = vi.fn();
-
- render();
-
- const googleButton = screen.getByRole('button', { name: /continue with google/i });
- await user.click(googleButton);
-
- const authData: OAuthAuthData = {
- userId: 'user123',
- deviceId: 'device123',
- email: 'test@example.com',
- token: 'oauth-token',
- expiresAt: Date.now() + 3600000,
- provider: 'google',
- };
-
- oauthCallback?.({
- status: 'success',
- authData,
- state: 'test-state-token-12345678901234567890',
- });
-
- await waitFor(() => {
- expect(onSuccess).toHaveBeenCalledWith(authData);
- });
- });
-
- it('should close popup on successful OAuth', async () => {
- const user = userEvent.setup();
- const mockPopup = {
- focus: vi.fn(),
- close: vi.fn(),
- };
- mockWindowOpen.mockReturnValue(mockPopup);
-
- render();
-
- const googleButton = screen.getByRole('button', { name: /continue with google/i });
- await user.click(googleButton);
-
- const authData: OAuthAuthData = {
- userId: 'user123',
- deviceId: 'device123',
- email: 'test@example.com',
- token: 'oauth-token',
- expiresAt: Date.now() + 3600000,
- provider: 'google',
- };
-
- oauthCallback?.({
- status: 'success',
- authData,
- state: 'test-state-token-12345678901234567890',
- });
-
- await waitFor(() => {
- expect(mockPopup.close).toHaveBeenCalled();
- });
- });
-
- it('should ignore OAuth result with mismatched provider', async () => {
- const user = userEvent.setup();
- const onSuccess = vi.fn();
- const consoleWarnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {});
-
- render();
-
- const googleButton = screen.getByRole('button', { name: /continue with google/i });
- await user.click(googleButton);
-
- // Return Apple auth data when Google was requested
- const authData: OAuthAuthData = {
- userId: 'user123',
- deviceId: 'device123',
- email: 'test@example.com',
- token: 'oauth-token',
- expiresAt: Date.now() + 3600000,
- provider: 'apple', // Mismatch!
- };
-
- oauthCallback?.({
- status: 'success',
- authData,
- state: 'test-state-token-12345678901234567890',
- });
-
- await waitFor(() => {
- expect(consoleWarnSpy).toHaveBeenCalledWith(
- expect.stringContaining('Provider mismatch'),
- expect.any(Object)
- );
- });
-
- expect(onSuccess).not.toHaveBeenCalled();
- consoleWarnSpy.mockRestore();
- });
- });
-
- describe('Error Handling', () => {
- it('should call onError callback on OAuth failure', async () => {
- const user = userEvent.setup();
- const onError = vi.fn();
-
- render();
-
- const googleButton = screen.getByRole('button', { name: /continue with google/i });
- await user.click(googleButton);
-
- oauthCallback?.({
- status: 'error',
- error: 'Authentication failed',
- state: 'test-state-token-12345678901234567890',
- });
-
- await waitFor(() => {
- expect(onError).toHaveBeenCalledWith(expect.any(Error));
- expect(onError.mock.calls[0][0].message).toBe('Authentication failed');
- });
- });
-
- it('should handle network errors during fetch', async () => {
- const user = userEvent.setup();
- const onError = vi.fn();
-
- mockFetch.mockRejectedValue(new Error('Network error'));
-
- render();
-
- const googleButton = screen.getByRole('button', { name: /continue with google/i });
- await user.click(googleButton);
-
- await waitFor(() => {
- expect(onError).toHaveBeenCalledWith(expect.any(Error));
- expect(onError.mock.calls[0][0].message).toBe('Network error');
- });
- });
-
- it('should handle HTTP error responses', async () => {
- const user = userEvent.setup();
- const onError = vi.fn();
-
- mockFetch.mockResolvedValue({
- ok: false,
- status: 500,
- text: async () => 'Internal Server Error',
- });
-
- render();
-
- const googleButton = screen.getByRole('button', { name: /continue with google/i });
- await user.click(googleButton);
-
- await waitFor(() => {
- expect(onError).toHaveBeenCalledWith(expect.any(Error));
- expect(onError.mock.calls[0][0].message).toContain('500');
- });
- });
-
- it('should handle invalid state token from server', async () => {
- const user = userEvent.setup();
- const onError = vi.fn();
-
- mockFetch.mockResolvedValue({
- ok: true,
- json: async () => ({
- authUrl: 'https://accounts.google.com/oauth',
- state: 'short', // Too short
- }),
- });
-
- render();
-
- const googleButton = screen.getByRole('button', { name: /continue with google/i });
- await user.click(googleButton);
-
- await waitFor(() => {
- expect(onError).toHaveBeenCalledWith(expect.any(Error));
- expect(onError.mock.calls[0][0].message).toContain('Invalid state token');
- });
- });
-
- it('should handle blocked popup', async () => {
- const user = userEvent.setup();
- const onError = vi.fn();
-
- mockWindowOpen.mockReturnValue(null); // Popup blocked
-
- render();
-
- const googleButton = screen.getByRole('button', { name: /continue with google/i });
- await user.click(googleButton);
-
- await waitFor(() => {
- expect(onError).toHaveBeenCalledWith(expect.any(Error));
- expect(onError.mock.calls[0][0].message).toContain('Popup blocked');
- });
- });
-
- it('should clear loading state on error', async () => {
- const user = userEvent.setup();
-
- mockFetch.mockRejectedValue(new Error('Network error'));
-
- render();
-
- const googleButton = screen.getByRole('button', { name: /continue with google/i });
- await user.click(googleButton);
-
- await waitFor(() => {
- const buttons = screen.getAllByRole('button');
- buttons.forEach((button) => {
- expect(button).not.toBeDisabled();
- });
- });
- });
- });
-
- describe('Redirect Flow', () => {
- it('should redirect to auth URL when popups are not supported', async () => {
- const user = userEvent.setup();
- mockCanUsePopups.mockReturnValue(false);
-
- // Save original location and replace with mock
- const originalLocation = window.location;
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- delete (window as any).location;
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- window.location = { href: '', hostname: 'localhost' } as any;
-
- render();
-
- const googleButton = screen.getByRole('button', { name: /continue with google/i });
- await user.click(googleButton);
-
- await waitFor(() => {
- expect(window.location.href).toBe('https://accounts.google.com/oauth');
- });
-
- // Restore original location to prevent test contamination
- window.location = originalLocation;
- });
- });
-
- describe('Platform Detection', () => {
- it('should log platform information when initiating OAuth', async () => {
- const user = userEvent.setup();
- const consoleInfoSpy = vi.spyOn(console, 'info').mockImplementation(() => {});
-
- mockGetPlatformInfo.mockReturnValue({
- platform: 'ios',
- standalone: true,
- mobile: true,
- canUsePopups: false,
- });
-
- render();
-
- const googleButton = screen.getByRole('button', { name: /continue with google/i });
- await user.click(googleButton);
-
- await waitFor(() => {
- expect(consoleInfoSpy).toHaveBeenCalledWith(
- expect.stringContaining('Initiating flow'),
- expect.objectContaining({
- provider: 'google',
- platform: expect.objectContaining({
- platform: 'ios',
- standalone: true,
- }),
- })
- );
- });
-
- consoleInfoSpy.mockRestore();
- });
- });
-
- describe('Subscription Cleanup', () => {
- it('should unsubscribe from OAuth handshake on unmount', () => {
- const { unmount } = render();
-
- expect(mockSubscribeToOAuthHandshake).toHaveBeenCalled();
-
- unmount();
-
- expect(unsubscribeFn).toHaveBeenCalled();
- });
- });
-});
diff --git a/tests/ui/oauth-callback-handler.test.tsx b/tests/ui/oauth-callback-handler.test.tsx
deleted file mode 100644
index c01a30a1..00000000
--- a/tests/ui/oauth-callback-handler.test.tsx
+++ /dev/null
@@ -1,580 +0,0 @@
-/**
- * Tests for OAuthCallbackHandler component
- * Focus on token expiration normalization (Issue #2) and OAuth flow
- */
-
-import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
-import { render, waitFor } from '@testing-library/react';
-import type { OAuthAuthData } from '@/lib/sync/oauth-handshake';
-
-// Use vi.hoisted to ensure these are available to vi.mock
-const {
- mockRouterReplace,
- mockSearchParams,
- mockToast,
- mockSubscribeToOAuthHandshake,
- mockIsEncryptionConfigured,
-} = vi.hoisted(() => ({
- mockRouterReplace: vi.fn(),
- mockSearchParams: new URLSearchParams(),
- mockToast: {
- info: vi.fn(),
- success: vi.fn(),
- error: vi.fn(),
- },
- mockSubscribeToOAuthHandshake: vi.fn(),
- mockIsEncryptionConfigured: vi.fn(),
-}));
-
-// Mock modules
-vi.mock('next/navigation', () => ({
- useRouter: () => ({
- replace: mockRouterReplace,
- }),
- useSearchParams: () => mockSearchParams,
-}));
-
-vi.mock('sonner', () => ({
- toast: mockToast,
-}));
-
-vi.mock('@/lib/sync/oauth-handshake', () => ({
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- subscribeToOAuthHandshake: (callback: any) => mockSubscribeToOAuthHandshake(callback),
-}));
-
-vi.mock('@/lib/sync/crypto', () => ({
- isEncryptionConfigured: () => mockIsEncryptionConfigured(),
-}));
-
-// Now import the component and dependencies
-import { OAuthCallbackHandler } from '@/components/oauth-callback-handler';
-import { getDb } from '@/lib/db';
-
-describe('OAuthCallbackHandler', () => {
- let db: ReturnType;
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- let oauthCallback: any;
-
- beforeEach(async () => {
- vi.clearAllMocks();
- db = getDb();
-
- // Clear database
- await db.syncMetadata.clear();
- await db.tasks.clear();
-
- // Set up default mock values
- mockIsEncryptionConfigured.mockResolvedValue(false);
-
- // Capture the OAuth callback
- mockSubscribeToOAuthHandshake.mockImplementation((callback) => {
- oauthCallback = callback;
- return vi.fn(); // Return unsubscribe function
- });
- });
-
- afterEach(async () => {
- await db.syncMetadata.clear();
- await db.tasks.clear();
- });
-
- describe('Issue #2: Token Expiration Normalization', () => {
- it('should normalize token expiration from seconds to milliseconds', async () => {
- render();
-
- // Simulate OAuth success with token expiration in seconds (typical JWT format)
- const authData: OAuthAuthData = {
- userId: 'user123',
- deviceId: 'device123',
- email: 'test@example.com',
- token: 'jwt-token',
- expiresAt: 1735689600, // Jan 1, 2025 00:00:00 UTC in SECONDS
- provider: 'google',
- requiresEncryptionSetup: true,
- encryptionSalt: null,
- };
-
- await oauthCallback({
- status: 'success',
- authData,
- state: 'test-state-123',
- });
-
- // Wait for processing to complete
- await waitFor(async () => {
- const config = await db.syncMetadata.get('sync_config');
- expect(config).toBeDefined();
- });
-
- // Verify token expiration was normalized to milliseconds
- const config = await db.syncMetadata.get('sync_config');
- expect(config?.tokenExpiresAt).toBe(1735689600 * 1000); // Now in milliseconds
- expect(config?.tokenExpiresAt).toBe(1735689600000);
- });
-
- it('should keep token expiration unchanged if already in milliseconds', async () => {
- render();
-
- // Simulate OAuth success with token expiration already in milliseconds
- const authData: OAuthAuthData = {
- userId: 'user123',
- deviceId: 'device123',
- email: 'test@example.com',
- token: 'jwt-token',
- expiresAt: 1735689600000, // Already in MILLISECONDS
- provider: 'google',
- requiresEncryptionSetup: true,
- encryptionSalt: null,
- };
-
- await oauthCallback({
- status: 'success',
- authData,
- state: 'test-state-456',
- });
-
- await waitFor(async () => {
- const config = await db.syncMetadata.get('sync_config');
- expect(config).toBeDefined();
- });
-
- // Should remain unchanged (already in milliseconds)
- const config = await db.syncMetadata.get('sync_config');
- expect(config?.tokenExpiresAt).toBe(1735689600000);
- });
-
- it('should handle threshold boundary correctly (seconds)', async () => {
- render();
-
- // Just below the 10 billion threshold (should be treated as seconds)
- const authData: OAuthAuthData = {
- userId: 'user123',
- deviceId: 'device123',
- email: 'test@example.com',
- token: 'jwt-token',
- expiresAt: 9_999_999_999, // Just below threshold
- provider: 'google',
- requiresEncryptionSetup: true,
- encryptionSalt: null,
- };
-
- await oauthCallback({
- status: 'success',
- authData,
- state: 'test-state-789',
- });
-
- await waitFor(async () => {
- const config = await db.syncMetadata.get('sync_config');
- expect(config).toBeDefined();
- });
-
- // Should be multiplied by 1000
- const config = await db.syncMetadata.get('sync_config');
- expect(config?.tokenExpiresAt).toBe(9_999_999_999 * 1000);
- });
-
- it('should handle threshold boundary correctly (milliseconds)', async () => {
- render();
-
- // At threshold: 10 billion (should be treated as milliseconds)
- const authData: OAuthAuthData = {
- userId: 'user123',
- deviceId: 'device123',
- email: 'test@example.com',
- token: 'jwt-token',
- expiresAt: 10_000_000_000, // At threshold
- provider: 'google',
- requiresEncryptionSetup: true,
- encryptionSalt: null,
- };
-
- await oauthCallback({
- status: 'success',
- authData,
- state: 'test-state-abc',
- });
-
- await waitFor(async () => {
- const config = await db.syncMetadata.get('sync_config');
- expect(config).toBeDefined();
- });
-
- // Should NOT be multiplied (already in milliseconds)
- const config = await db.syncMetadata.get('sync_config');
- expect(config?.tokenExpiresAt).toBe(10_000_000_000);
- });
- });
-
- describe('OAuth Flow', () => {
- it('should process OAuth handshake success event', async () => {
- render();
-
- const authData: OAuthAuthData = {
- userId: 'user123',
- deviceId: 'device123',
- email: 'test@example.com',
- token: 'oauth-token',
- expiresAt: 1735689600,
- provider: 'google',
- requiresEncryptionSetup: true,
- encryptionSalt: null,
- };
-
- await oauthCallback({
- status: 'success',
- authData,
- state: 'test-state',
- });
-
- await waitFor(() => {
- expect(mockToast.info).toHaveBeenCalledWith('Processing OAuth for test@example.com...');
- });
-
- await waitFor(async () => {
- const config = await db.syncMetadata.get('sync_config');
- expect(config).toBeDefined();
- });
- });
-
- it('should store sync config in IndexedDB with correct fields', async () => {
- render();
-
- const authData: OAuthAuthData = {
- userId: 'user123',
- deviceId: 'device456',
- email: 'test@example.com',
- token: 'oauth-token-xyz',
- expiresAt: 1735689600,
- provider: 'apple',
- requiresEncryptionSetup: true,
- encryptionSalt: null,
- };
-
- await oauthCallback({
- status: 'success',
- authData,
- state: 'test-state',
- });
-
- await waitFor(async () => {
- const config = await db.syncMetadata.get('sync_config');
- expect(config).toMatchObject({
- key: 'sync_config',
- enabled: true,
- userId: 'user123',
- deviceId: 'device456',
- email: 'test@example.com',
- token: 'oauth-token-xyz',
- provider: 'apple',
- conflictStrategy: 'last_write_wins',
- consecutiveFailures: 0,
- lastFailureAt: null,
- lastFailureReason: null,
- nextRetryAt: null,
- });
- });
- });
-
- it('should show encryption dialog for new users', async () => {
- render();
-
- const authData: OAuthAuthData = {
- userId: 'user123',
- deviceId: 'device123',
- email: 'new-user@example.com',
- token: 'oauth-token',
- expiresAt: 1735689600,
- provider: 'google',
- requiresEncryptionSetup: true, // New user
- encryptionSalt: null,
- };
-
- await oauthCallback({
- status: 'success',
- authData,
- state: 'test-state',
- });
-
- await waitFor(() => {
- expect(mockToast.success).toHaveBeenCalledWith(
- 'Sync enabled successfully! Finish encryption setup to start syncing.'
- );
- });
- });
-
- it('should show encryption unlock dialog for existing users without local encryption', async () => {
- render();
- mockIsEncryptionConfigured.mockResolvedValue(false);
-
- const authData: OAuthAuthData = {
- userId: 'user123',
- deviceId: 'device123',
- email: 'existing@example.com',
- token: 'oauth-token',
- expiresAt: 1735689600,
- provider: 'google',
- requiresEncryptionSetup: false, // Existing user
- encryptionSalt: 'server-salt',
- };
-
- await oauthCallback({
- status: 'success',
- authData,
- state: 'test-state',
- });
-
- await waitFor(() => {
- expect(mockIsEncryptionConfigured).toHaveBeenCalled();
- });
- });
-
- it('should handle OAuth handshake errors', async () => {
- render();
-
- await oauthCallback({
- status: 'error',
- error: 'Authentication failed',
- state: 'test-state',
- });
-
- await waitFor(() => {
- expect(mockToast.error).toHaveBeenCalledWith('Authentication failed');
- });
- });
-
- it('should handle OAuth handshake errors with generic message', async () => {
- render();
-
- await oauthCallback({
- status: 'error',
- error: null,
- state: 'test-state',
- });
-
- await waitFor(() => {
- expect(mockToast.error).toHaveBeenCalledWith('Sign in failed. Please try again.');
- });
- });
- });
-
- describe('Server URL Detection', () => {
- it('should use localhost URL in development', async () => {
- // Mock window.location.hostname
- Object.defineProperty(window, 'location', {
- value: {
- hostname: 'localhost',
- origin: 'http://localhost:3000',
- },
- writable: true,
- });
-
- render();
-
- const authData: OAuthAuthData = {
- userId: 'user123',
- deviceId: 'device123',
- email: 'test@example.com',
- token: 'oauth-token',
- expiresAt: 1735689600,
- provider: 'google',
- requiresEncryptionSetup: true,
- encryptionSalt: null,
- };
-
- await oauthCallback({
- status: 'success',
- authData,
- state: 'test-state',
- });
-
- await waitFor(async () => {
- const config = await db.syncMetadata.get('sync_config');
- expect(config?.serverUrl).toBe('http://localhost:8787');
- });
- });
-
- it('should use window origin in production', async () => {
- Object.defineProperty(window, 'location', {
- value: {
- hostname: 'gsd.vinny.dev',
- origin: 'https://gsd.vinny.dev',
- },
- writable: true,
- });
-
- render();
-
- const authData: OAuthAuthData = {
- userId: 'user123',
- deviceId: 'device123',
- email: 'test@example.com',
- token: 'oauth-token',
- expiresAt: 1735689600,
- provider: 'google',
- requiresEncryptionSetup: true,
- encryptionSalt: null,
- };
-
- await oauthCallback({
- status: 'success',
- authData,
- state: 'test-state',
- });
-
- await waitFor(async () => {
- const config = await db.syncMetadata.get('sync_config');
- expect(config?.serverUrl).toBe('https://gsd.vinny.dev');
- });
- });
-
- it('should preserve existing serverUrl if already configured', async () => {
- // Pre-populate existing sync config
- await db.syncMetadata.put({
- key: 'sync_config',
- enabled: false,
- userId: 'user123',
- deviceId: 'device123',
- deviceName: 'Old Device',
- email: 'old@example.com',
- token: null,
- tokenExpiresAt: null,
- lastSyncAt: null,
- vectorClock: {},
- conflictStrategy: 'last_write_wins',
- serverUrl: 'https://custom-server.com',
- consecutiveFailures: 0,
- lastFailureAt: null,
- lastFailureReason: null,
- nextRetryAt: null,
- });
-
- render();
-
- const authData: OAuthAuthData = {
- userId: 'user123',
- deviceId: 'device789',
- email: 'test@example.com',
- token: 'oauth-token',
- expiresAt: 1735689600,
- provider: 'google',
- requiresEncryptionSetup: true,
- encryptionSalt: null,
- };
-
- await oauthCallback({
- status: 'success',
- authData,
- state: 'test-state',
- });
-
- await waitFor(async () => {
- const config = await db.syncMetadata.get('sync_config');
- // Should preserve custom server URL
- expect(config?.serverUrl).toBe('https://custom-server.com');
- });
- });
- });
-
- describe('Query Parameter Handling', () => {
- it('should replace URL when oauth_complete query param is present', () => {
- mockSearchParams.set('oauth_complete', 'true');
-
- render();
-
- expect(mockRouterReplace).toHaveBeenCalledWith('/');
- });
-
- it('should not replace URL when oauth_complete is absent', () => {
- mockSearchParams.delete('oauth_complete');
-
- render();
-
- expect(mockRouterReplace).not.toHaveBeenCalled();
- });
- });
-
- describe('Error Handling', () => {
- it('should handle database errors gracefully', async () => {
- const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {});
-
- // Mock database error
- vi.spyOn(db.syncMetadata, 'put').mockRejectedValue(new Error('Database error'));
-
- render();
-
- const authData: OAuthAuthData = {
- userId: 'user123',
- deviceId: 'device123',
- email: 'test@example.com',
- token: 'oauth-token',
- expiresAt: 1735689600,
- provider: 'google',
- requiresEncryptionSetup: true,
- encryptionSalt: null,
- };
-
- await oauthCallback({
- status: 'success',
- authData,
- state: 'test-state',
- });
-
- await waitFor(() => {
- expect(mockToast.error).toHaveBeenCalledWith(
- expect.stringContaining('Failed to process OAuth callback')
- );
- });
-
- consoleErrorSpy.mockRestore();
- });
-
- // Duplicate state prevention IS implemented via processingState check in component (lines 71-73)
- it('should prevent duplicate processing of same state', async () => {
- render();
-
- const authData: OAuthAuthData = {
- userId: 'user123',
- deviceId: 'device123',
- email: 'test@example.com',
- token: 'oauth-token',
- expiresAt: 1735689600,
- provider: 'google',
- requiresEncryptionSetup: true,
- encryptionSalt: null,
- };
-
- const sameState = 'duplicate-state';
-
- // First call - should process normally
- await oauthCallback({
- status: 'success',
- authData,
- state: sameState,
- });
-
- // Wait for first callback to fully process (state update + toast)
- await waitFor(() => {
- expect(mockToast.info).toHaveBeenCalledWith(
- expect.stringContaining('Processing OAuth')
- );
- });
-
- // Clear mocks after first call is fully processed
- mockToast.info.mockClear();
- mockToast.success.mockClear();
-
- // Call again with same state - should be blocked by processingState check
- await oauthCallback({
- status: 'success',
- authData,
- state: sameState,
- });
-
- // Second call should be ignored (no toasts)
- expect(mockToast.info).not.toHaveBeenCalled();
- expect(mockToast.success).not.toHaveBeenCalled();
- });
- });
-});
diff --git a/tests/ui/sync-auth-dialog.test.tsx b/tests/ui/sync-auth-dialog.test.tsx
deleted file mode 100644
index 220a8968..00000000
--- a/tests/ui/sync-auth-dialog.test.tsx
+++ /dev/null
@@ -1,733 +0,0 @@
-/**
- * Tests for SyncAuthDialog component
- * Tests authentication flow steps, provider selection, passphrase entry, error display and recovery
- */
-
-import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
-import { act, render, screen, waitFor } from '@testing-library/react';
-import userEvent from '@testing-library/user-event';
-import type { OAuthHandshakeEvent, OAuthAuthData } from '@/lib/sync/oauth-handshake';
-
-// Hoisted mocks
-const {
- mockGetDb,
- mockSubscribeToOAuthHandshake,
- mockIsEncryptionConfigured,
- mockGetCryptoManager,
- mockClearCryptoManager,
- mockToastSuccess,
- mockToastError,
- mockToastInfo,
-} = vi.hoisted(() => ({
- mockGetDb: vi.fn(),
- mockSubscribeToOAuthHandshake: vi.fn(),
- mockIsEncryptionConfigured: vi.fn(),
- mockGetCryptoManager: vi.fn(),
- mockClearCryptoManager: vi.fn(),
- mockToastSuccess: vi.fn(),
- mockToastError: vi.fn(),
- mockToastInfo: vi.fn(),
-}));
-
-// Mock modules
-vi.mock('@/lib/db', () => ({
- getDb: () => mockGetDb(),
-}));
-
-vi.mock('@/lib/sync/oauth-handshake', () => ({
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- subscribeToOAuthHandshake: (callback: any) => mockSubscribeToOAuthHandshake(callback),
-}));
-
-vi.mock('@/lib/sync/crypto', () => ({
- isEncryptionConfigured: () => mockIsEncryptionConfigured(),
- getCryptoManager: () => mockGetCryptoManager(),
- clearCryptoManager: () => mockClearCryptoManager(),
-}));
-
-vi.mock('sonner', () => ({
- toast: {
- success: mockToastSuccess,
- error: mockToastError,
- info: mockToastInfo,
- },
-}));
-
-// Mock OAuthButtons component
-vi.mock('@/components/sync/oauth-buttons', () => ({
- OAuthButtons: ({ onStart }: { onStart?: () => void }) => (
-
-
-
- ),
-}));
-
-// Mock EncryptionPassphraseDialog component
-vi.mock('@/components/sync/encryption-passphrase-dialog', () => ({
- EncryptionPassphraseDialog: ({
- isOpen,
- onComplete,
- onCancel,
- }: {
- isOpen: boolean;
- onComplete: () => void;
- onCancel: () => void;
- }) =>
- isOpen ? (
-
-
-
-
- ) : null,
-}));
-
-// Import component after mocks
-import { SyncAuthDialog } from '@/components/sync/sync-auth-dialog';
-
-describe('SyncAuthDialog', () => {
- let oauthCallback: ((event: OAuthHandshakeEvent) => void) | null = null;
- let unsubscribeFn: ReturnType;
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- let mockDb: any;
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- let mockCryptoManager: any;
-
- beforeEach(() => {
- vi.clearAllMocks();
- oauthCallback = null;
-
- // Setup mock database
- mockDb = {
- syncMetadata: {
- get: vi.fn(),
- put: vi.fn(),
- delete: vi.fn(),
- },
- };
- mockGetDb.mockReturnValue(mockDb);
-
- // Setup mock crypto manager
- mockCryptoManager = {
- isInitialized: vi.fn().mockReturnValue(false),
- clear: vi.fn(),
- };
- mockGetCryptoManager.mockReturnValue(mockCryptoManager);
-
- // Setup OAuth subscription
- unsubscribeFn = vi.fn();
- mockSubscribeToOAuthHandshake.mockImplementation((callback) => {
- oauthCallback = callback;
- return unsubscribeFn;
- });
-
- // Default: no encryption configured
- mockIsEncryptionConfigured.mockResolvedValue(false);
-
- // Default: no sync config
- mockDb.syncMetadata.get.mockResolvedValue(null);
- });
-
- afterEach(() => {
- vi.clearAllTimers();
- });
-
- describe('Dialog Rendering', () => {
- it('should not render when isOpen is false', () => {
- render();
-
- expect(screen.queryByText('Sync Settings')).not.toBeInTheDocument();
- });
-
- it('should render when isOpen is true', async () => {
- render();
-
- await waitFor(() => {
- expect(screen.getByText('Sync Settings')).toBeInTheDocument();
- });
- });
-
- it('should show enable sync message when not authenticated', async () => {
- render();
-
- await waitFor(() => {
- expect(screen.getByText('Enable cloud sync')).toBeInTheDocument();
- });
- });
-
- it('should show manage account message when authenticated', async () => {
- mockDb.syncMetadata.get.mockResolvedValue({
- key: 'sync_config',
- enabled: true,
- email: 'test@example.com',
- provider: 'google',
- });
-
- render();
-
- await waitFor(() => {
- expect(screen.getByText('Manage your sync account')).toBeInTheDocument();
- });
- });
-
- it('should call onClose when close button is clicked', async () => {
- const user = userEvent.setup();
- const onClose = vi.fn();
-
- render();
-
- await waitFor(() => {
- expect(screen.getByLabelText('Close')).toBeInTheDocument();
- });
-
- const closeButton = screen.getByLabelText('Close');
- await user.click(closeButton);
-
- expect(onClose).toHaveBeenCalled();
- });
-
- it('should call onClose when backdrop is clicked', async () => {
- const user = userEvent.setup();
- const onClose = vi.fn();
-
- render();
-
- await waitFor(() => {
- expect(screen.getByText('Sync Settings')).toBeInTheDocument();
- });
-
- // Find the backdrop by its class
- const backdrop = document.querySelector('.fixed.inset-0.z-50.bg-black\\/50');
- if (backdrop) {
- await user.click(backdrop as Element);
- expect(onClose).toHaveBeenCalled();
- }
- });
- });
-
- describe('Authentication Flow - Not Authenticated', () => {
- it('should render OAuth buttons when not authenticated', async () => {
- render();
-
- await waitFor(() => {
- expect(screen.getByTestId('oauth-buttons')).toBeInTheDocument();
- });
- });
-
- it('should show encryption information message', async () => {
- render();
-
- await waitFor(() => {
- expect(screen.getByText('🔐 End-to-end encrypted')).toBeInTheDocument();
- expect(
- screen.getByText(/Your tasks are encrypted on your device before syncing/i)
- ).toBeInTheDocument();
- });
- });
-
- it('should set loading state when OAuth starts', async () => {
- const user = userEvent.setup();
-
- render();
-
- await waitFor(() => {
- expect(screen.getByTestId('oauth-buttons')).toBeInTheDocument();
- });
-
- const signInButton = screen.getByText('Sign in with Google');
- await user.click(signInButton);
-
- // Loading spinner should appear
- await waitFor(() => {
- const spinner = document.querySelector('.animate-spin');
- expect(spinner).toBeInTheDocument();
- });
- });
-
- it('should handle successful OAuth authentication', async () => {
- const user = userEvent.setup();
- const onSuccess = vi.fn();
-
- render();
-
- await waitFor(() => {
- expect(screen.getByTestId('oauth-buttons')).toBeInTheDocument();
- });
-
- const signInButton = screen.getByText('Sign in with Google');
- await user.click(signInButton);
-
- // Simulate successful OAuth
- const authData: OAuthAuthData = {
- userId: 'user123',
- deviceId: 'device123',
- email: 'test@example.com',
- token: 'oauth-token',
- expiresAt: Date.now() + 3600000,
- provider: 'google',
- };
-
- await act(async () => {
- oauthCallback?.({
- status: 'success',
- authData,
- state: 'test-state-token',
- });
- });
-
- await waitFor(() => {
- expect(mockToastSuccess).toHaveBeenCalledWith(
- expect.stringContaining('Signed in as test@example.com')
- );
- expect(onSuccess).toHaveBeenCalled();
- });
- });
-
- it('should display error message on OAuth failure', async () => {
- const user = userEvent.setup();
-
- render();
-
- await waitFor(() => {
- expect(screen.getByTestId('oauth-buttons')).toBeInTheDocument();
- });
-
- const signInButton = screen.getByText('Sign in with Google');
- await user.click(signInButton);
-
- // Simulate OAuth error
- await act(async () => {
- oauthCallback?.({
- status: 'error',
- error: 'Authentication failed',
- state: 'test-state-token',
- });
- });
-
- await waitFor(() => {
- expect(screen.getByText('Authentication failed')).toBeInTheDocument();
- expect(mockToastError).toHaveBeenCalledWith('Authentication failed');
- });
- });
-
- it('should clear error when starting new OAuth flow', async () => {
- const user = userEvent.setup();
-
- render();
-
- await waitFor(() => {
- expect(screen.getByTestId('oauth-buttons')).toBeInTheDocument();
- });
-
- const signInButton = screen.getByText('Sign in with Google');
-
- // First attempt - error
- await user.click(signInButton);
- await act(async () => {
- oauthCallback?.({
- status: 'error',
- error: 'First error',
- state: 'state1',
- });
- });
-
- await waitFor(() => {
- expect(screen.getByText('First error')).toBeInTheDocument();
- });
-
- // Second attempt - should clear error
- await user.click(signInButton);
-
- await waitFor(() => {
- expect(screen.queryByText('First error')).not.toBeInTheDocument();
- });
- });
- });
-
- describe('Authentication Flow - Authenticated', () => {
- beforeEach(() => {
- mockDb.syncMetadata.get.mockResolvedValue({
- key: 'sync_config',
- enabled: true,
- email: 'test@example.com',
- provider: 'google',
- });
- });
-
- it('should display signed in user email', async () => {
- render();
-
- await waitFor(() => {
- expect(screen.getByText('Signed in as')).toBeInTheDocument();
- expect(screen.getByText('test@example.com')).toBeInTheDocument();
- });
- });
-
- it('should display provider information', async () => {
- render();
-
- await waitFor(() => {
- expect(screen.getByText('via google')).toBeInTheDocument();
- });
- });
-
- it('should render logout button when authenticated', async () => {
- render();
-
- await waitFor(() => {
- expect(screen.getByRole('button', { name: /logout/i })).toBeInTheDocument();
- });
- });
-
- it('should handle logout successfully', async () => {
- const user = userEvent.setup();
- const onSuccess = vi.fn();
-
- render();
-
- await waitFor(() => {
- expect(screen.getByRole('button', { name: /logout/i })).toBeInTheDocument();
- });
-
- const logoutButton = screen.getByRole('button', { name: /logout/i });
- await user.click(logoutButton);
-
- await waitFor(() => {
- expect(mockDb.syncMetadata.delete).toHaveBeenCalledWith('sync_config');
- expect(mockDb.syncMetadata.delete).toHaveBeenCalledWith('encryption_salt');
- expect(mockClearCryptoManager).toHaveBeenCalled();
- expect(mockToastSuccess).toHaveBeenCalledWith('Logged out successfully');
- expect(onSuccess).toHaveBeenCalled();
- });
- });
-
- it('should show loading state during logout', async () => {
- const user = userEvent.setup();
-
- // Make delete operation slow to catch loading state
- mockDb.syncMetadata.delete.mockImplementation(() =>
- new Promise(resolve => setTimeout(resolve, 100))
- );
-
- render();
-
- await waitFor(() => {
- expect(screen.getByRole('button', { name: /logout/i })).toBeInTheDocument();
- });
-
- const logoutButton = screen.getByRole('button', { name: /logout/i });
- await user.click(logoutButton);
-
- // Check for loading text immediately
- expect(screen.getByText('Logging out...')).toBeInTheDocument();
- });
-
- it('should handle logout error', async () => {
- const user = userEvent.setup();
- mockDb.syncMetadata.delete.mockRejectedValue(new Error('Delete failed'));
-
- render();
-
- await waitFor(() => {
- expect(screen.getByRole('button', { name: /logout/i })).toBeInTheDocument();
- });
-
- const logoutButton = screen.getByRole('button', { name: /logout/i });
- await user.click(logoutButton);
-
- await waitFor(() => {
- expect(screen.getByText('Delete failed')).toBeInTheDocument();
- });
- });
-
- it('should disable logout button while logging out', async () => {
- const user = userEvent.setup();
-
- render();
-
- await waitFor(() => {
- expect(screen.getByRole('button', { name: /logout/i })).toBeInTheDocument();
- });
-
- const logoutButton = screen.getByRole('button', { name: /logout/i });
- await user.click(logoutButton);
-
- await waitFor(() => {
- expect(logoutButton).toBeDisabled();
- });
- });
- });
-
- describe('Encryption Passphrase Flow', () => {
- beforeEach(() => {
- mockDb.syncMetadata.get.mockResolvedValue({
- key: 'sync_config',
- enabled: true,
- email: 'test@example.com',
- provider: 'google',
- });
- mockIsEncryptionConfigured.mockResolvedValue(true);
- mockCryptoManager.isInitialized.mockReturnValue(false);
- });
-
- it('should show encryption dialog when encryption is configured but not initialized', async () => {
- render();
-
- await waitFor(() => {
- expect(screen.getByTestId('encryption-dialog')).toBeInTheDocument();
- expect(mockToastInfo).toHaveBeenCalledWith(
- 'Please enter your encryption passphrase to unlock sync.'
- );
- });
- });
-
- it('should not show encryption dialog when crypto manager is initialized', async () => {
- mockCryptoManager.isInitialized.mockReturnValue(true);
-
- render();
-
- await waitFor(() => {
- expect(screen.getByText('test@example.com')).toBeInTheDocument();
- });
-
- expect(screen.queryByTestId('encryption-dialog')).not.toBeInTheDocument();
- });
-
- it('should handle encryption dialog completion', async () => {
- const user = userEvent.setup();
- const onSuccess = vi.fn();
-
- render();
-
- await waitFor(() => {
- expect(screen.getByTestId('encryption-dialog')).toBeInTheDocument();
- });
-
- const completeButton = screen.getByText('Complete Encryption');
- await user.click(completeButton);
-
- await waitFor(() => {
- expect(mockToastSuccess).toHaveBeenCalledWith(
- 'Encryption unlocked. You can close this dialog.'
- );
- expect(onSuccess).toHaveBeenCalled();
- });
- });
-
- it('should handle encryption dialog cancellation', async () => {
- const user = userEvent.setup();
-
- render();
-
- await waitFor(() => {
- expect(screen.getByTestId('encryption-dialog')).toBeInTheDocument();
- });
-
- const cancelButton = screen.getByText('Cancel Encryption');
- await user.click(cancelButton);
-
- await waitFor(() => {
- expect(screen.queryByTestId('encryption-dialog')).not.toBeInTheDocument();
- });
- });
-
- it('should refresh sync status after encryption completion', async () => {
- const user = userEvent.setup();
-
- render();
-
- await waitFor(() => {
- expect(screen.getByTestId('encryption-dialog')).toBeInTheDocument();
- });
-
- const completeButton = screen.getByText('Complete Encryption');
- await user.click(completeButton);
-
- await waitFor(() => {
- expect(mockDb.syncMetadata.get).toHaveBeenCalledWith('sync_config');
- });
- });
- });
-
- describe('OAuth State Management', () => {
- it('should ignore OAuth events with mismatched state', async () => {
- const user = userEvent.setup();
- const onSuccess = vi.fn();
-
- render();
-
- await waitFor(() => {
- expect(screen.getByTestId('oauth-buttons')).toBeInTheDocument();
- });
-
- const signInButton = screen.getByText('Sign in with Google');
- await user.click(signInButton);
-
- // First OAuth success sets active state
- const authData1: OAuthAuthData = {
- userId: 'user123',
- deviceId: 'device123',
- email: 'test@example.com',
- token: 'oauth-token',
- expiresAt: Date.now() + 3600000,
- provider: 'google',
- };
-
- await act(async () => {
- oauthCallback?.({
- status: 'success',
- authData: authData1,
- state: 'state1',
- });
- });
-
- await waitFor(() => {
- expect(onSuccess).toHaveBeenCalledTimes(1);
- });
-
- // Second OAuth event with different state should be ignored
- await act(async () => {
- oauthCallback?.({
- status: 'error',
- error: 'Should be ignored',
- state: 'state2',
- });
- });
-
- // onSuccess should not be called again
- expect(onSuccess).toHaveBeenCalledTimes(1);
- expect(screen.queryByText('Should be ignored')).not.toBeInTheDocument();
- });
-
- it('should only process OAuth events when dialog is open', async () => {
- const { rerender } = render();
-
- // Trigger OAuth event while dialog is closed
- const authData: OAuthAuthData = {
- userId: 'user123',
- deviceId: 'device123',
- email: 'test@example.com',
- token: 'oauth-token',
- expiresAt: Date.now() + 3600000,
- provider: 'google',
- };
-
- await act(async () => {
- oauthCallback?.({
- status: 'success',
- authData,
- state: 'test-state',
- });
- });
-
- // Should not show success toast
- expect(mockToastSuccess).not.toHaveBeenCalled();
-
- // Open dialog
- rerender();
-
- // Still should not process the old event
- await waitFor(() => {
- expect(screen.getByText('Sync Settings')).toBeInTheDocument();
- });
-
- expect(mockToastSuccess).not.toHaveBeenCalled();
- });
- });
-
- describe('Subscription Cleanup', () => {
- it('should unsubscribe from OAuth handshake on unmount', () => {
- const { unmount } = render();
-
- expect(mockSubscribeToOAuthHandshake).toHaveBeenCalled();
-
- unmount();
-
- expect(unsubscribeFn).toHaveBeenCalled();
- });
-
- it('should not subscribe when not mounted', () => {
- render();
-
- // Should wait for mounted state before subscribing
- expect(mockSubscribeToOAuthHandshake).toHaveBeenCalled();
- });
- });
-
- describe('Status Refresh', () => {
- it('should refresh status after successful OAuth', async () => {
- const user = userEvent.setup();
-
- render();
-
- await waitFor(() => {
- expect(screen.getByTestId('oauth-buttons')).toBeInTheDocument();
- });
-
- const signInButton = screen.getByText('Sign in with Google');
- await user.click(signInButton);
-
- // Simulate successful OAuth
- const authData: OAuthAuthData = {
- userId: 'user123',
- deviceId: 'device123',
- email: 'test@example.com',
- token: 'oauth-token',
- expiresAt: Date.now() + 3600000,
- provider: 'google',
- };
-
- // Update mock to return sync config after OAuth
- mockDb.syncMetadata.get.mockResolvedValue({
- key: 'sync_config',
- enabled: true,
- email: 'test@example.com',
- provider: 'google',
- });
-
- await act(async () => {
- oauthCallback?.({
- status: 'success',
- authData,
- state: 'test-state',
- });
- });
-
- // Wait for the status refresh (happens after 600ms timeout)
- await waitFor(() => {
- expect(mockDb.syncMetadata.get).toHaveBeenCalledWith('sync_config');
- }, { timeout: 2000 });
- });
- });
-
- describe('Error Display', () => {
- beforeEach(() => {
- mockDb.syncMetadata.get.mockResolvedValue({
- key: 'sync_config',
- enabled: true,
- email: 'test@example.com',
- provider: 'google',
- });
- });
-
- it('should display error in authenticated state', async () => {
- mockDb.syncMetadata.delete.mockRejectedValue(new Error('Network error'));
-
- render();
-
- await waitFor(() => {
- expect(screen.getByText('test@example.com')).toBeInTheDocument();
- });
-
- const logoutButton = screen.getByRole('button', { name: /logout/i });
-
- const user = userEvent.setup();
- await user.click(logoutButton);
-
- await waitFor(() => {
- const errorElement = screen.getByText('Network error');
- expect(errorElement).toBeInTheDocument();
- expect(errorElement.closest('div')).toHaveClass('bg-red-50');
- }, { timeout: 2000 });
- });
- });
-});
diff --git a/worker/.gitignore b/worker/.gitignore
deleted file mode 100644
index f89d68e5..00000000
--- a/worker/.gitignore
+++ /dev/null
@@ -1,11 +0,0 @@
-node_modules
-dist
-.wrangler
-.dev.vars
-*.log
-.DS_Store
-secrets.txt
-secrets-*.txt
-
-# Secret management scripts (contain sensitive credentials)
-set-google-client-id.sh
diff --git a/worker/QUICKSTART.sh b/worker/QUICKSTART.sh
deleted file mode 100755
index 655f1b03..00000000
--- a/worker/QUICKSTART.sh
+++ /dev/null
@@ -1,97 +0,0 @@
-#!/bin/bash
-# GSD Sync Worker - Quick Setup Script
-# Run this in your terminal after authenticating with Cloudflare
-
-set -e # Exit on error
-
-echo "🚀 GSD Sync Worker Setup"
-echo "========================"
-echo ""
-
-# Check if wrangler is authenticated
-echo "📋 Step 1: Checking Cloudflare authentication..."
-if ! npx wrangler whoami > /dev/null 2>&1; then
- echo "❌ Not authenticated. Please run:"
- echo " npx wrangler login"
- exit 1
-fi
-echo "✅ Authenticated"
-echo ""
-
-# Create D1 Database
-echo "📋 Step 2: Creating D1 database..."
-echo "Running: npx wrangler d1 create gsd-sync"
-echo ""
-echo "⚠️ IMPORTANT: Copy the database_id from the output below"
-echo " and update wrangler.toml line 14"
-echo ""
-npx wrangler d1 create gsd-sync
-echo ""
-read -p "Press Enter after updating wrangler.toml with database_id..."
-
-# Create KV Namespace
-echo ""
-echo "📋 Step 3: Creating KV namespace..."
-echo "Running: npx wrangler kv namespace create KV"
-echo ""
-echo "⚠️ IMPORTANT: Copy the 'id' from the output below"
-echo " and update wrangler.toml line 29"
-echo ""
-npx wrangler kv namespace create "KV"
-echo ""
-read -p "Press Enter after updating wrangler.toml with KV id..."
-
-# Create R2 Bucket
-echo ""
-echo "📋 Step 4: Creating R2 bucket..."
-npx wrangler r2 bucket create gsd-backups
-echo "✅ R2 bucket created"
-echo ""
-
-# Set secrets
-echo "📋 Step 5: Setting secrets..."
-echo ""
-echo "Generating secure random secrets..."
-JWT_SECRET=$(openssl rand -base64 32)
-ENCRYPTION_SALT=$(openssl rand -base64 32)
-
-echo ""
-echo "⚠️ SAVE THESE SECRETS IN A SECURE LOCATION:"
-echo " JWT_SECRET: $JWT_SECRET"
-echo " ENCRYPTION_SALT: $ENCRYPTION_SALT"
-echo ""
-read -p "Press Enter to set JWT_SECRET..."
-echo "$JWT_SECRET" | npx wrangler secret put JWT_SECRET
-
-echo ""
-read -p "Press Enter to set ENCRYPTION_SALT..."
-echo "$ENCRYPTION_SALT" | npx wrangler secret put ENCRYPTION_SALT
-
-echo "✅ Secrets configured"
-echo ""
-
-# Apply database schema
-echo "📋 Step 6: Applying database schema..."
-echo "Applying to local database..."
-npx wrangler d1 execute gsd-sync --local --file=./schema.sql
-echo ""
-echo "Applying to remote database..."
-npx wrangler d1 execute gsd-sync --remote --file=./schema.sql
-echo "✅ Schema applied"
-echo ""
-
-# Deploy
-echo "📋 Step 7: Deploying to Cloudflare..."
-npx wrangler deploy
-echo "✅ Deployed!"
-echo ""
-
-echo "🎉 Setup Complete!"
-echo ""
-echo "Next steps:"
-echo "1. Note your Worker URL from the output above"
-echo "2. Update src/middleware/cors.ts with your domain"
-echo "3. Redeploy with: npx wrangler deploy"
-echo "4. Test with: curl https://your-worker-url/health"
-echo ""
-echo "📚 See SETUP.md for detailed documentation"
diff --git a/worker/README.md b/worker/README.md
deleted file mode 100644
index 0063bda4..00000000
--- a/worker/README.md
+++ /dev/null
@@ -1,329 +0,0 @@
-# GSD Sync Worker
-
-Cloudflare Worker backend for GSD Task Manager secure sync feature.
-
-## Features
-
-- **End-to-end encryption**: Server never sees plaintext task data
-- **Vector clocks**: Robust conflict detection for distributed sync
-- **JWT authentication**: Secure token-based auth with revocation
-- **Rate limiting**: Per-user, per-endpoint protection
-- **Device management**: Multi-device support with remote revocation
-- **Automatic cleanup**: Scheduled cron jobs for old data
-- **Multi-environment support**: Separate development, staging, and production deployments
-
-## Quick Start (Multi-Environment Setup)
-
-### Automated Setup for All Environments
-
-The easiest way to set up all environments (development, staging, production):
-
-```bash
-cd worker
-npm install
-
-# Authenticate with Cloudflare
-npx wrangler login
-
-# Run automated setup (creates all resources and sets secrets)
-npm run setup:all
-```
-
-This will:
-- Create D1 databases, KV namespaces, and R2 buckets for all environments
-- Generate and set JWT secrets for each environment
-- Apply database schemas
-- Update `wrangler.toml` with resource IDs
-
-### Deploy to All Environments
-
-```bash
-# Deploy to all environments sequentially
-npm run deploy:all
-```
-
-Or deploy to individual environments:
-
-```bash
-npm run deploy # Development
-npm run deploy:staging # Staging
-npm run deploy:production # Production
-```
-
-### Monitor Logs
-
-```bash
-npm run tail # Development
-npm run tail:staging # Staging
-npm run tail:production # Production
-```
-
-## Manual Setup (Advanced)
-
-If you prefer manual setup or need to configure a single environment:
-
-### Prerequisites
-
-- [Bun](https://bun.sh) (latest version)
-- Cloudflare account with Workers enabled
-- Wrangler CLI installed globally: `bun add -g wrangler`
-
-### Installation
-
-```bash
-cd worker
-npm install
-```
-
-### Configure Cloudflare Resources
-
-1. **Create D1 Database**:
-```bash
-npx wrangler d1 create gsd-sync-dev
-```
-
-Copy the `database_id` from the output and update `wrangler.toml`.
-
-2. **Create KV Namespace**:
-```bash
-npx wrangler kv namespace create "KV" --env development
-```
-
-Copy the `id` from the output and update `wrangler.toml`.
-
-3. **Create R2 Bucket**:
-```bash
-npx wrangler r2 bucket create gsd-backups-dev
-```
-
-4. **Set Secrets**:
-```bash
-# Generate a secure random secret (at least 32 characters)
-wrangler secret put JWT_SECRET
-
-# Optional: additional salt for server-side operations
-wrangler secret put ENCRYPTION_SALT
-```
-
-### Apply Database Schema
-
-```bash
-# Local development database
-bun db:migrations:apply
-
-# Remote production database
-bun db:migrations:apply:remote
-```
-
-## Development
-
-### Run locally
-```bash
-bun dev
-```
-
-The worker will be available at `http://localhost:8787`.
-
-### Type checking
-```bash
-bun typecheck
-```
-
-## Deployment
-
-### Deploy to staging
-```bash
-bun deploy:staging
-```
-
-### Deploy to production
-```bash
-bun deploy:production
-```
-
-### View logs
-```bash
-bun tail
-```
-
-## API Endpoints
-
-### Authentication
-
-**GET /api/auth/oauth/:provider/start**
-- Start OAuth flow (`provider` is `google` or `apple`)
-- Returns: redirect
-
-**GET /api/auth/oauth/callback**
-**POST /api/auth/oauth/callback**
-- OAuth callback handler
-- Returns: `{ success, stateId }` (stored for result polling)
-
-**GET /api/auth/oauth/result**
-- Retrieve OAuth result by state
-- Returns: `{ userId, deviceId, token, expiresAt, encryptionSalt? }`
-
-**POST /api/auth/refresh**
-- Refresh JWT token
-- Headers: `Authorization: Bearer `
-- Returns: `{ token, expiresAt }`
-
-**POST /api/auth/logout**
-- Logout and revoke token
-- Headers: `Authorization: Bearer `
-- Returns: `{ success: true }`
-
-**GET /api/auth/encryption-salt**
-- Fetch encryption salt for authenticated user
-- Headers: `Authorization: Bearer `
-- Returns: `{ encryptionSalt }`
-
-### Sync Operations
-
-**POST /api/sync/push**
-- Push local changes to server
-- Headers: `Authorization: Bearer `
-- Body: `{ deviceId, operations[], clientVectorClock }`
-- Returns: `{ accepted[], rejected[], conflicts[], serverVectorClock }`
-
-**POST /api/sync/pull**
-- Pull remote changes from server
-- Headers: `Authorization: Bearer `
-- Body: `{ deviceId, lastVectorClock, sinceTimestamp?, limit?, cursor? }`
-- Returns: `{ tasks[], deletedTaskIds[], serverVectorClock, conflicts[], hasMore, nextCursor? }`
-
-**POST /api/sync/resolve**
-- Resolve a conflict
-- Headers: `Authorization: Bearer `
-- Body: `{ taskId, resolution, mergedTask? }`
-- Returns: `{ success: true }`
-
-**GET /api/sync/status**
-- Get sync status
-- Headers: `Authorization: Bearer `
-- Returns: `{ lastSyncAt, pendingPushCount, pendingPullCount, conflictCount, deviceCount, storageUsed, storageQuota }`
-
-### Device Management
-
-**GET /api/devices**
-- List user's devices
-- Headers: `Authorization: Bearer `
-- Returns: `{ devices: DeviceInfo[] }`
-
-**DELETE /api/devices/:id**
-- Revoke a device
-- Headers: `Authorization: Bearer `
-- Returns: `{ success: true }`
-
-## Architecture
-
-```
-┌─────────────────────────────────────────────────────────┐
-│ Cloudflare Worker │
-├─────────────────────────────────────────────────────────┤
-│ Router (itty-router) │
-│ ├─ Auth Endpoints │
-│ ├─ Sync Endpoints (with auth + rate limiting) │
-│ └─ Device Management │
-├─────────────────────────────────────────────────────────┤
-│ Middleware │
-│ ├─ CORS & Security Headers │
-│ ├─ JWT Authentication │
-│ └─ Rate Limiting (KV-based) │
-├─────────────────────────────────────────────────────────┤
-│ Handlers │
-│ ├─ oidc.ts (initiate, callback, result) │
-│ ├─ auth (refresh, logout) │
-│ └─ sync.ts (push, pull, resolve, status, devices) │
-├─────────────────────────────────────────────────────────┤
-│ Utilities │
-│ ├─ crypto.ts (ID generation) │
-│ ├─ jwt.ts (token creation, verification) │
-│ └─ vector-clock.ts (conflict detection) │
-└─────────────────────────────────────────────────────────┘
- │ │ │
- ▼ ▼ ▼
- ┌─────────┐ ┌──────────┐ ┌──────────┐
- │ D1 │ │ KV │ │ R2 │
- │ (Tasks) │ │(Sessions)│ │(Backups) │
- └─────────┘ └──────────┘ └──────────┘
-```
-
-## Security
-
-### Encryption
-- **Client-side**: AES-256-GCM with PBKDF2 key derivation (600k iterations)
-- **Server-side**: No passwords stored (OAuth-only)
-- **Transport**: TLS 1.3 enforced
-
-### Authentication
-- JWT tokens with 7-day expiry
-- Token revocation via KV store
-- Device-specific sessions
-- Refresh token rotation
-
-### Rate Limiting
-- Per-user, per-endpoint limits
-- Configurable windows and thresholds
-- 429 responses with Retry-After headers
-
-### Data Protection
-- Zero-knowledge architecture (server never sees plaintext)
-- Encrypted blobs only
-- SHA-256 checksums for integrity
-- Soft deletes with 30-day retention
-
-## Monitoring
-
-### Metrics (via Cloudflare Analytics)
-- Request count per endpoint
-- Error rates
-- Response times
-- CPU usage
-
-### Logs
-- All auth failures logged
-- Conflict resolutions tracked
-- Cleanup tasks logged
-
-### Alerts
-- High error rate (>5%)
-- Rate limit exceeded frequently
-- Database errors
-
-## Troubleshooting
-
-### "Database not found" error
-- Ensure D1 database is created and ID is in `wrangler.toml`
-- Run migrations: `bun db:migrations:apply:remote`
-
-### "KV namespace not found" error
-- Create KV namespace: `wrangler kv:namespace create "KV"`
-- Update `wrangler.toml` with the namespace ID
-
-### "JWT_SECRET not set" error
-- Set secret: `wrangler secret put JWT_SECRET`
-
-### Rate limit issues
-- Adjust limits in `src/middleware/rate-limit.ts`
-- Check KV storage for rate limit keys
-
-## Cost Estimation
-
-### Free Tier (sufficient for MVP)
-- Workers: 100k requests/day
-- D1: 5GB storage, 5M reads/day
-- KV: 100k reads/day, 1k writes/day
-- R2: 10GB storage
-
-### Paid Tier (1000+ users)
-- Workers: $5/month (10M requests)
-- D1: $5/month (10GB + 25M reads)
-- KV: $5/month (1M writes)
-- R2: ~$1.50/month (100GB)
-
-**Total: ~$15-20/month for 1000 active users**
-
-## License
-
-MIT
diff --git a/worker/SETUP.md b/worker/SETUP.md
deleted file mode 100644
index 51515429..00000000
--- a/worker/SETUP.md
+++ /dev/null
@@ -1,266 +0,0 @@
-# Cloudflare Worker Setup Guide
-
-Follow these steps to deploy your GSD Sync Worker to Cloudflare.
-
-## Prerequisites
-
-- Cloudflare account (free tier is sufficient)
-- [Bun](https://bun.sh) (latest version)
-
-## Step 1: Authenticate with Cloudflare
-
-Open your terminal in the `worker` directory and run:
-
-```bash
-cd /Users/vinnycarpenter/Projects/gsd-taskmanager/worker
-npx wrangler login
-```
-
-This will:
-1. Open your browser to Cloudflare
-2. Ask you to authorize Wrangler
-3. Save your credentials locally
-
-**Alternative: API Token Method**
-
-If you prefer using an API token:
-
-1. Go to https://dash.cloudflare.com/profile/api-tokens
-2. Click "Create Token"
-3. Use the "Edit Cloudflare Workers" template
-4. Copy the token and save it:
-
-```bash
-export CLOUDFLARE_API_TOKEN="your-token-here"
-# Add to your ~/.zshrc or ~/.bashrc to make it permanent
-echo 'export CLOUDFLARE_API_TOKEN="your-token-here"' >> ~/.zshrc
-```
-
-## Step 2: Create D1 Database
-
-```bash
-npx wrangler d1 create gsd-sync
-```
-
-**Expected output:**
-```
-✅ Successfully created DB 'gsd-sync'!
-
-[[d1_databases]]
-binding = "DB"
-database_name = "gsd-sync"
-database_id = "abc123-def456-ghi789"
-```
-
-**Action required:** Copy the `database_id` and update `wrangler.toml` line 14:
-```toml
-database_id = "abc123-def456-ghi789" # Replace with your actual ID
-```
-
-## Step 3: Create KV Namespace
-
-```bash
-npx wrangler kv namespace create "KV"
-```
-
-**Expected output:**
-```
-🌀 Creating namespace with title "gsd-sync-worker-KV"
-✨ Success!
-Add the following to your configuration file in your kv_namespaces array:
-{ binding = "KV", id = "xyz123abc456" }
-```
-
-**Action required:** Copy the `id` and update `wrangler.toml` line 29:
-```toml
-id = "xyz123abc456" # Replace with your actual ID
-```
-
-## Step 4: Create R2 Bucket (for backups)
-
-```bash
-npx wrangler r2 bucket create gsd-backups
-```
-
-**Expected output:**
-```
-✅ Created bucket 'gsd-backups'
-```
-
-No configuration change needed - the bucket name in `wrangler.toml` is already correct.
-
-## Step 5: Set Secrets
-
-Generate and set secure secrets for JWT signing and encryption:
-
-```bash
-# Generate a random secret (or use your own 32+ character string)
-# On macOS/Linux:
-JWT_SECRET=$(openssl rand -base64 32)
-echo $JWT_SECRET
-
-# Set the secret
-npx wrangler secret put JWT_SECRET
-# Paste the secret when prompted
-
-# Optional: Encryption salt (for additional server-side security)
-ENCRYPTION_SALT=$(openssl rand -base64 32)
-echo $ENCRYPTION_SALT
-
-npx wrangler secret put ENCRYPTION_SALT
-# Paste the salt when prompted
-```
-
-**Important:** Save these secrets somewhere secure (password manager). If you lose them, users will need to re-authenticate.
-
-## Step 6: Apply Database Schema
-
-```bash
-# Apply schema to local development database
-npx wrangler d1 execute gsd-sync --local --file=./schema.sql
-
-# Apply schema to remote production database
-npx wrangler d1 execute gsd-sync --remote --file=./schema.sql
-```
-
-**Expected output:**
-```
-🌀 Executing on remote database gsd-sync (abc123-def456-ghi789):
-🚣 Executed 7 commands in 0.234ms
-```
-
-## Step 7: Test Locally
-
-```bash
-bun dev
-```
-
-**Expected output:**
-```
-⎔ Starting local server...
-[wrangler:inf] Ready on http://localhost:8787
-```
-
-Test the health endpoint:
-```bash
-curl http://localhost:8787/health
-```
-
-Should return:
-```json
-{"status":"ok","timestamp":1234567890}
-```
-
-## Step 8: Deploy to Production
-
-```bash
-# First deployment (creates the worker)
-npx wrangler deploy
-
-# Or deploy to staging first
-npx wrangler deploy --env staging
-```
-
-**Expected output:**
-```
-Total Upload: xx.xx KiB / gzip: xx.xx KiB
-Uploaded gsd-sync-worker (x.xx sec)
-Published gsd-sync-worker (x.xx sec)
- https://gsd-sync-worker.your-subdomain.workers.dev
-```
-
-**Save this URL!** You'll need it for the client-side configuration.
-
-## Step 9: Configure CORS (Important!)
-
-Update `src/middleware/cors.ts` line 3 to restrict to your domain:
-
-```typescript
-'Access-Control-Allow-Origin': 'https://gsd.vinny.dev', // Your actual domain
-```
-
-Then redeploy:
-```bash
-npx wrangler deploy
-```
-
-## Step 10: Test Production Deployment
-
-```bash
-# Test health endpoint
-curl https://gsd-sync-worker.your-subdomain.workers.dev/health
-
-# Test auth-protected endpoint (replace with your worker URL and OAuth token)
-curl https://gsd-sync-worker.your-subdomain.workers.dev/api/sync/status \
- -H "Authorization: Bearer "
-```
-
-## Troubleshooting
-
-### "Database not found" error
-- Make sure you updated the `database_id` in `wrangler.toml`
-- Verify the database exists: `npx wrangler d1 list`
-
-### "KV namespace not found" error
-- Make sure you updated the KV namespace `id` in `wrangler.toml`
-- Verify it exists: `npx wrangler kv namespace list`
-
-### "JWT_SECRET not set" error
-- Make sure you ran `npx wrangler secret put JWT_SECRET`
-- Verify secrets: `npx wrangler secret list`
-
-### CORS errors from browser
-- Update `src/middleware/cors.ts` with your actual domain
-- Redeploy the worker
-
-### Rate limit errors during testing
-- Temporarily increase limits in `src/middleware/rate-limit.ts`
-- Or wait for the rate limit window to reset (60 seconds)
-
-## Monitoring and Logs
-
-### View real-time logs
-```bash
-npx wrangler tail
-```
-
-### View Cloudflare dashboard
-https://dash.cloudflare.com/
-
-Navigate to: Workers & Pages → gsd-sync-worker
-
-## Next Steps
-
-Once deployed successfully:
-
-1. ✅ Note your worker URL
-2. ✅ Test all endpoints (see README.md for API docs)
-3. ✅ Configure client-side sync to use this URL
-4. ✅ Set up monitoring/alerts in Cloudflare dashboard
-
-## Cost Tracking
-
-### View current usage
-https://dash.cloudflare.com/ → Workers & Pages → Usage
-
-### Free tier limits
-- 100k requests/day
-- 5GB D1 storage
-- 100k KV reads/day
-- 10GB R2 storage
-
-You'll get email alerts if you approach these limits.
-
-## Security Checklist
-
-- [ ] JWT_SECRET is set and secure (32+ characters)
-- [ ] ENCRYPTION_SALT is set (optional but recommended)
-- [ ] CORS is restricted to your domain (not '*')
-- [ ] Secrets are saved in password manager
-- [ ] Worker URL is noted for client configuration
-- [ ] Test registration/login flow works
-- [ ] Monitor logs for errors
-
----
-
-**Need help?** Check the main README.md or create an issue.
diff --git a/worker/bun.lock b/worker/bun.lock
deleted file mode 100644
index d3475d99..00000000
--- a/worker/bun.lock
+++ /dev/null
@@ -1,336 +0,0 @@
-{
- "lockfileVersion": 1,
- "configVersion": 0,
- "workspaces": {
- "": {
- "name": "gsd-sync-worker",
- "dependencies": {
- "itty-router": "^5.0.18",
- "jose": "^5.10.0",
- "zod": "^4.3.5",
- },
- "devDependencies": {
- "@cloudflare/workers-types": "^4.20250110.0",
- "@types/node": "^25.0.3",
- "typescript": "^5.9.3",
- "vitest": "^4.0.16",
- "wrangler": "^4.43.0",
- },
- },
- },
- "packages": {
- "@cloudflare/kv-asset-handler": ["@cloudflare/kv-asset-handler@0.4.2", "", {}, "sha512-SIOD2DxrRRwQ+jgzlXCqoEFiKOFqaPjhnNTGKXSRLvp1HiOvapLaFG2kEr9dYQTYe8rKrd9uvDUzmAITeNyaHQ=="],
-
- "@cloudflare/unenv-preset": ["@cloudflare/unenv-preset@2.11.0", "", { "peerDependencies": { "unenv": "2.0.0-rc.24", "workerd": "^1.20260115.0" }, "optionalPeers": ["workerd"] }, "sha512-z3hxFajL765VniNPGV0JRStZolNz63gU3B3AktwoGdDlnQvz5nP+Ah4RL04PONlZQjwmDdGHowEStJ94+RsaJg=="],
-
- "@cloudflare/workerd-darwin-64": ["@cloudflare/workerd-darwin-64@1.20260120.0", "", { "os": "darwin", "cpu": "x64" }, "sha512-JLHx3p5dpwz4wjVSis45YNReftttnI3ndhdMh5BUbbpdreN/g0jgxNt5Qp9tDFqEKl++N63qv+hxJiIIvSLR+Q=="],
-
- "@cloudflare/workerd-darwin-arm64": ["@cloudflare/workerd-darwin-arm64@1.20260120.0", "", { "os": "darwin", "cpu": "arm64" }, "sha512-1Md2tCRhZjwajsZNOiBeOVGiS3zbpLPzUDjHr4+XGTXWOA6FzzwScJwQZLa0Doc28Cp4Nr1n7xGL0Dwiz1XuOA=="],
-
- "@cloudflare/workerd-linux-64": ["@cloudflare/workerd-linux-64@1.20260120.0", "", { "os": "linux", "cpu": "x64" }, "sha512-O0mIfJfvU7F8N5siCoRDaVDuI12wkz2xlG4zK6/Ct7U9c9FiE0ViXNFWXFQm5PPj+qbkNRyhjUwhP+GCKTk5EQ=="],
-
- "@cloudflare/workerd-linux-arm64": ["@cloudflare/workerd-linux-arm64@1.20260120.0", "", { "os": "linux", "cpu": "arm64" }, "sha512-aRHO/7bjxVpjZEmVVcpmhbzpN6ITbFCxuLLZSW0H9O0C0w40cDCClWSi19T87Ax/PQcYjFNT22pTewKsupkckA=="],
-
- "@cloudflare/workerd-windows-64": ["@cloudflare/workerd-windows-64@1.20260120.0", "", { "os": "win32", "cpu": "x64" }, "sha512-ASZIz1E8sqZQqQCgcfY1PJbBpUDrxPt8NZ+lqNil0qxnO4qX38hbCsdDF2/TDAuq0Txh7nu8ztgTelfNDlb4EA=="],
-
- "@cloudflare/workers-types": ["@cloudflare/workers-types@4.20260124.0", "", {}, "sha512-h6TJlew6AtGuEXFc+k5ifalk+tg3fkg0lla6XbMAb2AKKfJGwlFNTwW2xyT/Ha92KY631CIJ+Ace08DPdFohdA=="],
-
- "@cspotcode/source-map-support": ["@cspotcode/source-map-support@0.8.1", "", { "dependencies": { "@jridgewell/trace-mapping": "0.3.9" } }, "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw=="],
-
- "@emnapi/runtime": ["@emnapi/runtime@1.7.1", "", { "dependencies": { "tslib": "^2.4.0" } }, "sha512-PVtJr5CmLwYAU9PZDMITZoR5iAOShYREoR45EyyLrbntV50mdePTgUn4AmOw90Ifcj+x2kRjdzr1HP3RrNiHGA=="],
-
- "@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.27.0", "", { "os": "aix", "cpu": "ppc64" }, "sha512-KuZrd2hRjz01y5JK9mEBSD3Vj3mbCvemhT466rSuJYeE/hjuBrHfjjcjMdTm/sz7au+++sdbJZJmuBwQLuw68A=="],
-
- "@esbuild/android-arm": ["@esbuild/android-arm@0.27.0", "", { "os": "android", "cpu": "arm" }, "sha512-j67aezrPNYWJEOHUNLPj9maeJte7uSMM6gMoxfPC9hOg8N02JuQi/T7ewumf4tNvJadFkvLZMlAq73b9uwdMyQ=="],
-
- "@esbuild/android-arm64": ["@esbuild/android-arm64@0.27.0", "", { "os": "android", "cpu": "arm64" }, "sha512-CC3vt4+1xZrs97/PKDkl0yN7w8edvU2vZvAFGD16n9F0Cvniy5qvzRXjfO1l94efczkkQE6g1x0i73Qf5uthOQ=="],
-
- "@esbuild/android-x64": ["@esbuild/android-x64@0.27.0", "", { "os": "android", "cpu": "x64" }, "sha512-wurMkF1nmQajBO1+0CJmcN17U4BP6GqNSROP8t0X/Jiw2ltYGLHpEksp9MpoBqkrFR3kv2/te6Sha26k3+yZ9Q=="],
-
- "@esbuild/darwin-arm64": ["@esbuild/darwin-arm64@0.27.0", "", { "os": "darwin", "cpu": "arm64" }, "sha512-uJOQKYCcHhg07DL7i8MzjvS2LaP7W7Pn/7uA0B5S1EnqAirJtbyw4yC5jQ5qcFjHK9l6o/MX9QisBg12kNkdHg=="],
-
- "@esbuild/darwin-x64": ["@esbuild/darwin-x64@0.27.0", "", { "os": "darwin", "cpu": "x64" }, "sha512-8mG6arH3yB/4ZXiEnXof5MK72dE6zM9cDvUcPtxhUZsDjESl9JipZYW60C3JGreKCEP+p8P/72r69m4AZGJd5g=="],
-
- "@esbuild/freebsd-arm64": ["@esbuild/freebsd-arm64@0.27.0", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-9FHtyO988CwNMMOE3YIeci+UV+x5Zy8fI2qHNpsEtSF83YPBmE8UWmfYAQg6Ux7Gsmd4FejZqnEUZCMGaNQHQw=="],
-
- "@esbuild/freebsd-x64": ["@esbuild/freebsd-x64@0.27.0", "", { "os": "freebsd", "cpu": "x64" }, "sha512-zCMeMXI4HS/tXvJz8vWGexpZj2YVtRAihHLk1imZj4efx1BQzN76YFeKqlDr3bUWI26wHwLWPd3rwh6pe4EV7g=="],
-
- "@esbuild/linux-arm": ["@esbuild/linux-arm@0.27.0", "", { "os": "linux", "cpu": "arm" }, "sha512-t76XLQDpxgmq2cNXKTVEB7O7YMb42atj2Re2Haf45HkaUpjM2J0UuJZDuaGbPbamzZ7bawyGFUkodL+zcE+jvQ=="],
-
- "@esbuild/linux-arm64": ["@esbuild/linux-arm64@0.27.0", "", { "os": "linux", "cpu": "arm64" }, "sha512-AS18v0V+vZiLJyi/4LphvBE+OIX682Pu7ZYNsdUHyUKSoRwdnOsMf6FDekwoAFKej14WAkOef3zAORJgAtXnlQ=="],
-
- "@esbuild/linux-ia32": ["@esbuild/linux-ia32@0.27.0", "", { "os": "linux", "cpu": "ia32" }, "sha512-Mz1jxqm/kfgKkc/KLHC5qIujMvnnarD9ra1cEcrs7qshTUSksPihGrWHVG5+osAIQ68577Zpww7SGapmzSt4Nw=="],
-
- "@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.27.0", "", { "os": "linux", "cpu": "none" }, "sha512-QbEREjdJeIreIAbdG2hLU1yXm1uu+LTdzoq1KCo4G4pFOLlvIspBm36QrQOar9LFduavoWX2msNFAAAY9j4BDg=="],
-
- "@esbuild/linux-mips64el": ["@esbuild/linux-mips64el@0.27.0", "", { "os": "linux", "cpu": "none" }, "sha512-sJz3zRNe4tO2wxvDpH/HYJilb6+2YJxo/ZNbVdtFiKDufzWq4JmKAiHy9iGoLjAV7r/W32VgaHGkk35cUXlNOg=="],
-
- "@esbuild/linux-ppc64": ["@esbuild/linux-ppc64@0.27.0", "", { "os": "linux", "cpu": "ppc64" }, "sha512-z9N10FBD0DCS2dmSABDBb5TLAyF1/ydVb+N4pi88T45efQ/w4ohr/F/QYCkxDPnkhkp6AIpIcQKQ8F0ANoA2JA=="],
-
- "@esbuild/linux-riscv64": ["@esbuild/linux-riscv64@0.27.0", "", { "os": "linux", "cpu": "none" }, "sha512-pQdyAIZ0BWIC5GyvVFn5awDiO14TkT/19FTmFcPdDec94KJ1uZcmFs21Fo8auMXzD4Tt+diXu1LW1gHus9fhFQ=="],
-
- "@esbuild/linux-s390x": ["@esbuild/linux-s390x@0.27.0", "", { "os": "linux", "cpu": "s390x" }, "sha512-hPlRWR4eIDDEci953RI1BLZitgi5uqcsjKMxwYfmi4LcwyWo2IcRP+lThVnKjNtk90pLS8nKdroXYOqW+QQH+w=="],
-
- "@esbuild/linux-x64": ["@esbuild/linux-x64@0.27.0", "", { "os": "linux", "cpu": "x64" }, "sha512-1hBWx4OUJE2cab++aVZ7pObD6s+DK4mPGpemtnAORBvb5l/g5xFGk0vc0PjSkrDs0XaXj9yyob3d14XqvnQ4gw=="],
-
- "@esbuild/netbsd-arm64": ["@esbuild/netbsd-arm64@0.27.0", "", { "os": "none", "cpu": "arm64" }, "sha512-6m0sfQfxfQfy1qRuecMkJlf1cIzTOgyaeXaiVaaki8/v+WB+U4hc6ik15ZW6TAllRlg/WuQXxWj1jx6C+dfy3w=="],
-
- "@esbuild/netbsd-x64": ["@esbuild/netbsd-x64@0.27.0", "", { "os": "none", "cpu": "x64" }, "sha512-xbbOdfn06FtcJ9d0ShxxvSn2iUsGd/lgPIO2V3VZIPDbEaIj1/3nBBe1AwuEZKXVXkMmpr6LUAgMkLD/4D2PPA=="],
-
- "@esbuild/openbsd-arm64": ["@esbuild/openbsd-arm64@0.27.0", "", { "os": "openbsd", "cpu": "arm64" }, "sha512-fWgqR8uNbCQ/GGv0yhzttj6sU/9Z5/Sv/VGU3F5OuXK6J6SlriONKrQ7tNlwBrJZXRYk5jUhuWvF7GYzGguBZQ=="],
-
- "@esbuild/openbsd-x64": ["@esbuild/openbsd-x64@0.27.0", "", { "os": "openbsd", "cpu": "x64" }, "sha512-aCwlRdSNMNxkGGqQajMUza6uXzR/U0dIl1QmLjPtRbLOx3Gy3otfFu/VjATy4yQzo9yFDGTxYDo1FfAD9oRD2A=="],
-
- "@esbuild/openharmony-arm64": ["@esbuild/openharmony-arm64@0.27.0", "", { "os": "none", "cpu": "arm64" }, "sha512-nyvsBccxNAsNYz2jVFYwEGuRRomqZ149A39SHWk4hV0jWxKM0hjBPm3AmdxcbHiFLbBSwG6SbpIcUbXjgyECfA=="],
-
- "@esbuild/sunos-x64": ["@esbuild/sunos-x64@0.27.0", "", { "os": "sunos", "cpu": "x64" }, "sha512-Q1KY1iJafM+UX6CFEL+F4HRTgygmEW568YMqDA5UV97AuZSm21b7SXIrRJDwXWPzr8MGr75fUZPV67FdtMHlHA=="],
-
- "@esbuild/win32-arm64": ["@esbuild/win32-arm64@0.27.0", "", { "os": "win32", "cpu": "arm64" }, "sha512-W1eyGNi6d+8kOmZIwi/EDjrL9nxQIQ0MiGqe/AWc6+IaHloxHSGoeRgDRKHFISThLmsewZ5nHFvGFWdBYlgKPg=="],
-
- "@esbuild/win32-ia32": ["@esbuild/win32-ia32@0.27.0", "", { "os": "win32", "cpu": "ia32" }, "sha512-30z1aKL9h22kQhilnYkORFYt+3wp7yZsHWus+wSKAJR8JtdfI76LJ4SBdMsCopTR3z/ORqVu5L1vtnHZWVj4cQ=="],
-
- "@esbuild/win32-x64": ["@esbuild/win32-x64@0.27.0", "", { "os": "win32", "cpu": "x64" }, "sha512-aIitBcjQeyOhMTImhLZmtxfdOcuNRpwlPNmlFKPcHQYPhEssw75Cl1TSXJXpMkzaua9FUetx/4OQKq7eJul5Cg=="],
-
- "@img/colour": ["@img/colour@1.0.0", "", {}, "sha512-A5P/LfWGFSl6nsckYtjw9da+19jB8hkJ6ACTGcDfEJ0aE+l2n2El7dsVM7UVHZQ9s2lmYMWlrS21YLy2IR1LUw=="],
-
- "@img/sharp-darwin-arm64": ["@img/sharp-darwin-arm64@0.34.5", "", { "optionalDependencies": { "@img/sharp-libvips-darwin-arm64": "1.2.4" }, "os": "darwin", "cpu": "arm64" }, "sha512-imtQ3WMJXbMY4fxb/Ndp6HBTNVtWCUI0WdobyheGf5+ad6xX8VIDO8u2xE4qc/fr08CKG/7dDseFtn6M6g/r3w=="],
-
- "@img/sharp-darwin-x64": ["@img/sharp-darwin-x64@0.34.5", "", { "optionalDependencies": { "@img/sharp-libvips-darwin-x64": "1.2.4" }, "os": "darwin", "cpu": "x64" }, "sha512-YNEFAF/4KQ/PeW0N+r+aVVsoIY0/qxxikF2SWdp+NRkmMB7y9LBZAVqQ4yhGCm/H3H270OSykqmQMKLBhBJDEw=="],
-
- "@img/sharp-libvips-darwin-arm64": ["@img/sharp-libvips-darwin-arm64@1.2.4", "", { "os": "darwin", "cpu": "arm64" }, "sha512-zqjjo7RatFfFoP0MkQ51jfuFZBnVE2pRiaydKJ1G/rHZvnsrHAOcQALIi9sA5co5xenQdTugCvtb1cuf78Vf4g=="],
-
- "@img/sharp-libvips-darwin-x64": ["@img/sharp-libvips-darwin-x64@1.2.4", "", { "os": "darwin", "cpu": "x64" }, "sha512-1IOd5xfVhlGwX+zXv2N93k0yMONvUlANylbJw1eTah8K/Jtpi15KC+WSiaX/nBmbm2HxRM1gZ0nSdjSsrZbGKg=="],
-
- "@img/sharp-libvips-linux-arm": ["@img/sharp-libvips-linux-arm@1.2.4", "", { "os": "linux", "cpu": "arm" }, "sha512-bFI7xcKFELdiNCVov8e44Ia4u2byA+l3XtsAj+Q8tfCwO6BQ8iDojYdvoPMqsKDkuoOo+X6HZA0s0q11ANMQ8A=="],
-
- "@img/sharp-libvips-linux-arm64": ["@img/sharp-libvips-linux-arm64@1.2.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-excjX8DfsIcJ10x1Kzr4RcWe1edC9PquDRRPx3YVCvQv+U5p7Yin2s32ftzikXojb1PIFc/9Mt28/y+iRklkrw=="],
-
- "@img/sharp-libvips-linux-ppc64": ["@img/sharp-libvips-linux-ppc64@1.2.4", "", { "os": "linux", "cpu": "ppc64" }, "sha512-FMuvGijLDYG6lW+b/UvyilUWu5Ayu+3r2d1S8notiGCIyYU/76eig1UfMmkZ7vwgOrzKzlQbFSuQfgm7GYUPpA=="],
-
- "@img/sharp-libvips-linux-riscv64": ["@img/sharp-libvips-linux-riscv64@1.2.4", "", { "os": "linux", "cpu": "none" }, "sha512-oVDbcR4zUC0ce82teubSm+x6ETixtKZBh/qbREIOcI3cULzDyb18Sr/Wcyx7NRQeQzOiHTNbZFF1UwPS2scyGA=="],
-
- "@img/sharp-libvips-linux-s390x": ["@img/sharp-libvips-linux-s390x@1.2.4", "", { "os": "linux", "cpu": "s390x" }, "sha512-qmp9VrzgPgMoGZyPvrQHqk02uyjA0/QrTO26Tqk6l4ZV0MPWIW6LTkqOIov+J1yEu7MbFQaDpwdwJKhbJvuRxQ=="],
-
- "@img/sharp-libvips-linux-x64": ["@img/sharp-libvips-linux-x64@1.2.4", "", { "os": "linux", "cpu": "x64" }, "sha512-tJxiiLsmHc9Ax1bz3oaOYBURTXGIRDODBqhveVHonrHJ9/+k89qbLl0bcJns+e4t4rvaNBxaEZsFtSfAdquPrw=="],
-
- "@img/sharp-libvips-linuxmusl-arm64": ["@img/sharp-libvips-linuxmusl-arm64@1.2.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-FVQHuwx1IIuNow9QAbYUzJ+En8KcVm9Lk5+uGUQJHaZmMECZmOlix9HnH7n1TRkXMS0pGxIJokIVB9SuqZGGXw=="],
-
- "@img/sharp-libvips-linuxmusl-x64": ["@img/sharp-libvips-linuxmusl-x64@1.2.4", "", { "os": "linux", "cpu": "x64" }, "sha512-+LpyBk7L44ZIXwz/VYfglaX/okxezESc6UxDSoyo2Ks6Jxc4Y7sGjpgU9s4PMgqgjj1gZCylTieNamqA1MF7Dg=="],
-
- "@img/sharp-linux-arm": ["@img/sharp-linux-arm@0.34.5", "", { "optionalDependencies": { "@img/sharp-libvips-linux-arm": "1.2.4" }, "os": "linux", "cpu": "arm" }, "sha512-9dLqsvwtg1uuXBGZKsxem9595+ujv0sJ6Vi8wcTANSFpwV/GONat5eCkzQo/1O6zRIkh0m/8+5BjrRr7jDUSZw=="],
-
- "@img/sharp-linux-arm64": ["@img/sharp-linux-arm64@0.34.5", "", { "optionalDependencies": { "@img/sharp-libvips-linux-arm64": "1.2.4" }, "os": "linux", "cpu": "arm64" }, "sha512-bKQzaJRY/bkPOXyKx5EVup7qkaojECG6NLYswgktOZjaXecSAeCWiZwwiFf3/Y+O1HrauiE3FVsGxFg8c24rZg=="],
-
- "@img/sharp-linux-ppc64": ["@img/sharp-linux-ppc64@0.34.5", "", { "optionalDependencies": { "@img/sharp-libvips-linux-ppc64": "1.2.4" }, "os": "linux", "cpu": "ppc64" }, "sha512-7zznwNaqW6YtsfrGGDA6BRkISKAAE1Jo0QdpNYXNMHu2+0dTrPflTLNkpc8l7MUP5M16ZJcUvysVWWrMefZquA=="],
-
- "@img/sharp-linux-riscv64": ["@img/sharp-linux-riscv64@0.34.5", "", { "optionalDependencies": { "@img/sharp-libvips-linux-riscv64": "1.2.4" }, "os": "linux", "cpu": "none" }, "sha512-51gJuLPTKa7piYPaVs8GmByo7/U7/7TZOq+cnXJIHZKavIRHAP77e3N2HEl3dgiqdD/w0yUfiJnII77PuDDFdw=="],
-
- "@img/sharp-linux-s390x": ["@img/sharp-linux-s390x@0.34.5", "", { "optionalDependencies": { "@img/sharp-libvips-linux-s390x": "1.2.4" }, "os": "linux", "cpu": "s390x" }, "sha512-nQtCk0PdKfho3eC5MrbQoigJ2gd1CgddUMkabUj+rBevs8tZ2cULOx46E7oyX+04WGfABgIwmMC0VqieTiR4jg=="],
-
- "@img/sharp-linux-x64": ["@img/sharp-linux-x64@0.34.5", "", { "optionalDependencies": { "@img/sharp-libvips-linux-x64": "1.2.4" }, "os": "linux", "cpu": "x64" }, "sha512-MEzd8HPKxVxVenwAa+JRPwEC7QFjoPWuS5NZnBt6B3pu7EG2Ge0id1oLHZpPJdn3OQK+BQDiw9zStiHBTJQQQQ=="],
-
- "@img/sharp-linuxmusl-arm64": ["@img/sharp-linuxmusl-arm64@0.34.5", "", { "optionalDependencies": { "@img/sharp-libvips-linuxmusl-arm64": "1.2.4" }, "os": "linux", "cpu": "arm64" }, "sha512-fprJR6GtRsMt6Kyfq44IsChVZeGN97gTD331weR1ex1c1rypDEABN6Tm2xa1wE6lYb5DdEnk03NZPqA7Id21yg=="],
-
- "@img/sharp-linuxmusl-x64": ["@img/sharp-linuxmusl-x64@0.34.5", "", { "optionalDependencies": { "@img/sharp-libvips-linuxmusl-x64": "1.2.4" }, "os": "linux", "cpu": "x64" }, "sha512-Jg8wNT1MUzIvhBFxViqrEhWDGzqymo3sV7z7ZsaWbZNDLXRJZoRGrjulp60YYtV4wfY8VIKcWidjojlLcWrd8Q=="],
-
- "@img/sharp-wasm32": ["@img/sharp-wasm32@0.34.5", "", { "dependencies": { "@emnapi/runtime": "^1.7.0" }, "cpu": "none" }, "sha512-OdWTEiVkY2PHwqkbBI8frFxQQFekHaSSkUIJkwzclWZe64O1X4UlUjqqqLaPbUpMOQk6FBu/HtlGXNblIs0huw=="],
-
- "@img/sharp-win32-arm64": ["@img/sharp-win32-arm64@0.34.5", "", { "os": "win32", "cpu": "arm64" }, "sha512-WQ3AgWCWYSb2yt+IG8mnC6Jdk9Whs7O0gxphblsLvdhSpSTtmu69ZG1Gkb6NuvxsNACwiPV6cNSZNzt0KPsw7g=="],
-
- "@img/sharp-win32-ia32": ["@img/sharp-win32-ia32@0.34.5", "", { "os": "win32", "cpu": "ia32" }, "sha512-FV9m/7NmeCmSHDD5j4+4pNI8Cp3aW+JvLoXcTUo0IqyjSfAZJ8dIUmijx1qaJsIiU+Hosw6xM5KijAWRJCSgNg=="],
-
- "@img/sharp-win32-x64": ["@img/sharp-win32-x64@0.34.5", "", { "os": "win32", "cpu": "x64" }, "sha512-+29YMsqY2/9eFEiW93eqWnuLcWcufowXewwSNIT6UwZdUUCrM3oFjMWH/Z6/TMmb4hlFenmfAVbpWeup2jryCw=="],
-
- "@jridgewell/resolve-uri": ["@jridgewell/resolve-uri@3.1.2", "", {}, "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw=="],
-
- "@jridgewell/sourcemap-codec": ["@jridgewell/sourcemap-codec@1.5.5", "", {}, "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og=="],
-
- "@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.9", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.0.3", "@jridgewell/sourcemap-codec": "^1.4.10" } }, "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ=="],
-
- "@poppinss/colors": ["@poppinss/colors@4.1.5", "", { "dependencies": { "kleur": "^4.1.5" } }, "sha512-FvdDqtcRCtz6hThExcFOgW0cWX+xwSMWcRuQe5ZEb2m7cVQOAVZOIMt+/v9RxGiD9/OY16qJBXK4CVKWAPalBw=="],
-
- "@poppinss/dumper": ["@poppinss/dumper@0.6.5", "", { "dependencies": { "@poppinss/colors": "^4.1.5", "@sindresorhus/is": "^7.0.2", "supports-color": "^10.0.0" } }, "sha512-NBdYIb90J7LfOI32dOewKI1r7wnkiH6m920puQ3qHUeZkxNkQiFnXVWoE6YtFSv6QOiPPf7ys6i+HWWecDz7sw=="],
-
- "@poppinss/exception": ["@poppinss/exception@1.2.2", "", {}, "sha512-m7bpKCD4QMlFCjA/nKTs23fuvoVFoA83brRKmObCUNmi/9tVu8Ve3w4YQAnJu4q3Tjf5fr685HYIC/IA2zHRSg=="],
-
- "@rollup/rollup-android-arm-eabi": ["@rollup/rollup-android-arm-eabi@4.56.0", "", { "os": "android", "cpu": "arm" }, "sha512-LNKIPA5k8PF1+jAFomGe3qN3bbIgJe/IlpDBwuVjrDKrJhVWywgnJvflMt/zkbVNLFtF1+94SljYQS6e99klnw=="],
-
- "@rollup/rollup-android-arm64": ["@rollup/rollup-android-arm64@4.56.0", "", { "os": "android", "cpu": "arm64" }, "sha512-lfbVUbelYqXlYiU/HApNMJzT1E87UPGvzveGg2h0ktUNlOCxKlWuJ9jtfvs1sKHdwU4fzY7Pl8sAl49/XaEk6Q=="],
-
- "@rollup/rollup-darwin-arm64": ["@rollup/rollup-darwin-arm64@4.56.0", "", { "os": "darwin", "cpu": "arm64" }, "sha512-EgxD1ocWfhoD6xSOeEEwyE7tDvwTgZc8Bss7wCWe+uc7wO8G34HHCUH+Q6cHqJubxIAnQzAsyUsClt0yFLu06w=="],
-
- "@rollup/rollup-darwin-x64": ["@rollup/rollup-darwin-x64@4.56.0", "", { "os": "darwin", "cpu": "x64" }, "sha512-1vXe1vcMOssb/hOF8iv52A7feWW2xnu+c8BV4t1F//m9QVLTfNVpEdja5ia762j/UEJe2Z1jAmEqZAK42tVW3g=="],
-
- "@rollup/rollup-freebsd-arm64": ["@rollup/rollup-freebsd-arm64@4.56.0", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-bof7fbIlvqsyv/DtaXSck4VYQ9lPtoWNFCB/JY4snlFuJREXfZnm+Ej6yaCHfQvofJDXLDMTVxWscVSuQvVWUQ=="],
-
- "@rollup/rollup-freebsd-x64": ["@rollup/rollup-freebsd-x64@4.56.0", "", { "os": "freebsd", "cpu": "x64" }, "sha512-KNa6lYHloW+7lTEkYGa37fpvPq+NKG/EHKM8+G/g9WDU7ls4sMqbVRV78J6LdNuVaeeK5WB9/9VAFbKxcbXKYg=="],
-
- "@rollup/rollup-linux-arm-gnueabihf": ["@rollup/rollup-linux-arm-gnueabihf@4.56.0", "", { "os": "linux", "cpu": "arm" }, "sha512-E8jKK87uOvLrrLN28jnAAAChNq5LeCd2mGgZF+fGF5D507WlG/Noct3lP/QzQ6MrqJ5BCKNwI9ipADB6jyiq2A=="],
-
- "@rollup/rollup-linux-arm-musleabihf": ["@rollup/rollup-linux-arm-musleabihf@4.56.0", "", { "os": "linux", "cpu": "arm" }, "sha512-jQosa5FMYF5Z6prEpTCCmzCXz6eKr/tCBssSmQGEeozA9tkRUty/5Vx06ibaOP9RCrW1Pvb8yp3gvZhHwTDsJw=="],
-
- "@rollup/rollup-linux-arm64-gnu": ["@rollup/rollup-linux-arm64-gnu@4.56.0", "", { "os": "linux", "cpu": "arm64" }, "sha512-uQVoKkrC1KGEV6udrdVahASIsaF8h7iLG0U0W+Xn14ucFwi6uS539PsAr24IEF9/FoDtzMeeJXJIBo5RkbNWvQ=="],
-
- "@rollup/rollup-linux-arm64-musl": ["@rollup/rollup-linux-arm64-musl@4.56.0", "", { "os": "linux", "cpu": "arm64" }, "sha512-vLZ1yJKLxhQLFKTs42RwTwa6zkGln+bnXc8ueFGMYmBTLfNu58sl5/eXyxRa2RarTkJbXl8TKPgfS6V5ijNqEA=="],
-
- "@rollup/rollup-linux-loong64-gnu": ["@rollup/rollup-linux-loong64-gnu@4.56.0", "", { "os": "linux", "cpu": "none" }, "sha512-FWfHOCub564kSE3xJQLLIC/hbKqHSVxy8vY75/YHHzWvbJL7aYJkdgwD/xGfUlL5UV2SB7otapLrcCj2xnF1dg=="],
-
- "@rollup/rollup-linux-loong64-musl": ["@rollup/rollup-linux-loong64-musl@4.56.0", "", { "os": "linux", "cpu": "none" }, "sha512-z1EkujxIh7nbrKL1lmIpqFTc/sr0u8Uk0zK/qIEFldbt6EDKWFk/pxFq3gYj4Bjn3aa9eEhYRlL3H8ZbPT1xvA=="],
-
- "@rollup/rollup-linux-ppc64-gnu": ["@rollup/rollup-linux-ppc64-gnu@4.56.0", "", { "os": "linux", "cpu": "ppc64" }, "sha512-iNFTluqgdoQC7AIE8Q34R3AuPrJGJirj5wMUErxj22deOcY7XwZRaqYmB6ZKFHoVGqRcRd0mqO+845jAibKCkw=="],
-
- "@rollup/rollup-linux-ppc64-musl": ["@rollup/rollup-linux-ppc64-musl@4.56.0", "", { "os": "linux", "cpu": "ppc64" }, "sha512-MtMeFVlD2LIKjp2sE2xM2slq3Zxf9zwVuw0jemsxvh1QOpHSsSzfNOTH9uYW9i1MXFxUSMmLpeVeUzoNOKBaWg=="],
-
- "@rollup/rollup-linux-riscv64-gnu": ["@rollup/rollup-linux-riscv64-gnu@4.56.0", "", { "os": "linux", "cpu": "none" }, "sha512-in+v6wiHdzzVhYKXIk5U74dEZHdKN9KH0Q4ANHOTvyXPG41bajYRsy7a8TPKbYPl34hU7PP7hMVHRvv/5aCSew=="],
-
- "@rollup/rollup-linux-riscv64-musl": ["@rollup/rollup-linux-riscv64-musl@4.56.0", "", { "os": "linux", "cpu": "none" }, "sha512-yni2raKHB8m9NQpI9fPVwN754mn6dHQSbDTwxdr9SE0ks38DTjLMMBjrwvB5+mXrX+C0npX0CVeCUcvvvD8CNQ=="],
-
- "@rollup/rollup-linux-s390x-gnu": ["@rollup/rollup-linux-s390x-gnu@4.56.0", "", { "os": "linux", "cpu": "s390x" }, "sha512-zhLLJx9nQPu7wezbxt2ut+CI4YlXi68ndEve16tPc/iwoylWS9B3FxpLS2PkmfYgDQtosah07Mj9E0khc3Y+vQ=="],
-
- "@rollup/rollup-linux-x64-gnu": ["@rollup/rollup-linux-x64-gnu@4.56.0", "", { "os": "linux", "cpu": "x64" }, "sha512-MVC6UDp16ZSH7x4rtuJPAEoE1RwS8N4oK9DLHy3FTEdFoUTCFVzMfJl/BVJ330C+hx8FfprA5Wqx4FhZXkj2Kw=="],
-
- "@rollup/rollup-linux-x64-musl": ["@rollup/rollup-linux-x64-musl@4.56.0", "", { "os": "linux", "cpu": "x64" }, "sha512-ZhGH1eA4Qv0lxaV00azCIS1ChedK0V32952Md3FtnxSqZTBTd6tgil4nZT5cU8B+SIw3PFYkvyR4FKo2oyZIHA=="],
-
- "@rollup/rollup-openbsd-x64": ["@rollup/rollup-openbsd-x64@4.56.0", "", { "os": "openbsd", "cpu": "x64" }, "sha512-O16XcmyDeFI9879pEcmtWvD/2nyxR9mF7Gs44lf1vGGx8Vg2DRNx11aVXBEqOQhWb92WN4z7fW/q4+2NYzCbBA=="],
-
- "@rollup/rollup-openharmony-arm64": ["@rollup/rollup-openharmony-arm64@4.56.0", "", { "os": "none", "cpu": "arm64" }, "sha512-LhN/Reh+7F3RCgQIRbgw8ZMwUwyqJM+8pXNT6IIJAqm2IdKkzpCh/V9EdgOMBKuebIrzswqy4ATlrDgiOwbRcQ=="],
-
- "@rollup/rollup-win32-arm64-msvc": ["@rollup/rollup-win32-arm64-msvc@4.56.0", "", { "os": "win32", "cpu": "arm64" }, "sha512-kbFsOObXp3LBULg1d3JIUQMa9Kv4UitDmpS+k0tinPBz3watcUiV2/LUDMMucA6pZO3WGE27P7DsfaN54l9ing=="],
-
- "@rollup/rollup-win32-ia32-msvc": ["@rollup/rollup-win32-ia32-msvc@4.56.0", "", { "os": "win32", "cpu": "ia32" }, "sha512-vSSgny54D6P4vf2izbtFm/TcWYedw7f8eBrOiGGecyHyQB9q4Kqentjaj8hToe+995nob/Wv48pDqL5a62EWtg=="],
-
- "@rollup/rollup-win32-x64-gnu": ["@rollup/rollup-win32-x64-gnu@4.56.0", "", { "os": "win32", "cpu": "x64" }, "sha512-FeCnkPCTHQJFbiGG49KjV5YGW/8b9rrXAM2Mz2kiIoktq2qsJxRD5giEMEOD2lPdgs72upzefaUvS+nc8E3UzQ=="],
-
- "@rollup/rollup-win32-x64-msvc": ["@rollup/rollup-win32-x64-msvc@4.56.0", "", { "os": "win32", "cpu": "x64" }, "sha512-H8AE9Ur/t0+1VXujj90w0HrSOuv0Nq9r1vSZF2t5km20NTfosQsGGUXDaKdQZzwuLts7IyL1fYT4hM95TI9c4g=="],
-
- "@sindresorhus/is": ["@sindresorhus/is@7.1.1", "", {}, "sha512-rO92VvpgMc3kfiTjGT52LEtJ8Yc5kCWhZjLQ3LwlA4pSgPpQO7bVpYXParOD8Jwf+cVQECJo3yP/4I8aZtUQTQ=="],
-
- "@speed-highlight/core": ["@speed-highlight/core@1.2.12", "", {}, "sha512-uilwrK0Ygyri5dToHYdZSjcvpS2ZwX0w5aSt3GCEN9hrjxWCoeV4Z2DTXuxjwbntaLQIEEAlCeNQss5SoHvAEA=="],
-
- "@standard-schema/spec": ["@standard-schema/spec@1.1.0", "", {}, "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w=="],
-
- "@types/chai": ["@types/chai@5.2.3", "", { "dependencies": { "@types/deep-eql": "*", "assertion-error": "^2.0.1" } }, "sha512-Mw558oeA9fFbv65/y4mHtXDs9bPnFMZAL/jxdPFUpOHHIXX91mcgEHbS5Lahr+pwZFR8A7GQleRWeI6cGFC2UA=="],
-
- "@types/deep-eql": ["@types/deep-eql@4.0.2", "", {}, "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw=="],
-
- "@types/estree": ["@types/estree@1.0.8", "", {}, "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w=="],
-
- "@types/node": ["@types/node@25.0.10", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-zWW5KPngR/yvakJgGOmZ5vTBemDoSqF3AcV/LrO5u5wTWyEAVVh+IT39G4gtyAkh3CtTZs8aX/yRM82OfzHJRg=="],
-
- "@vitest/expect": ["@vitest/expect@4.0.18", "", { "dependencies": { "@standard-schema/spec": "^1.0.0", "@types/chai": "^5.2.2", "@vitest/spy": "4.0.18", "@vitest/utils": "4.0.18", "chai": "^6.2.1", "tinyrainbow": "^3.0.3" } }, "sha512-8sCWUyckXXYvx4opfzVY03EOiYVxyNrHS5QxX3DAIi5dpJAAkyJezHCP77VMX4HKA2LDT/Jpfo8i2r5BE3GnQQ=="],
-
- "@vitest/mocker": ["@vitest/mocker@4.0.18", "", { "dependencies": { "@vitest/spy": "4.0.18", "estree-walker": "^3.0.3", "magic-string": "^0.30.21" }, "peerDependencies": { "msw": "^2.4.9", "vite": "^6.0.0 || ^7.0.0-0" }, "optionalPeers": ["msw", "vite"] }, "sha512-HhVd0MDnzzsgevnOWCBj5Otnzobjy5wLBe4EdeeFGv8luMsGcYqDuFRMcttKWZA5vVO8RFjexVovXvAM4JoJDQ=="],
-
- "@vitest/pretty-format": ["@vitest/pretty-format@4.0.18", "", { "dependencies": { "tinyrainbow": "^3.0.3" } }, "sha512-P24GK3GulZWC5tz87ux0m8OADrQIUVDPIjjj65vBXYG17ZeU3qD7r+MNZ1RNv4l8CGU2vtTRqixrOi9fYk/yKw=="],
-
- "@vitest/runner": ["@vitest/runner@4.0.18", "", { "dependencies": { "@vitest/utils": "4.0.18", "pathe": "^2.0.3" } }, "sha512-rpk9y12PGa22Jg6g5M3UVVnTS7+zycIGk9ZNGN+m6tZHKQb7jrP7/77WfZy13Y/EUDd52NDsLRQhYKtv7XfPQw=="],
-
- "@vitest/snapshot": ["@vitest/snapshot@4.0.18", "", { "dependencies": { "@vitest/pretty-format": "4.0.18", "magic-string": "^0.30.21", "pathe": "^2.0.3" } }, "sha512-PCiV0rcl7jKQjbgYqjtakly6T1uwv/5BQ9SwBLekVg/EaYeQFPiXcgrC2Y7vDMA8dM1SUEAEV82kgSQIlXNMvA=="],
-
- "@vitest/spy": ["@vitest/spy@4.0.18", "", {}, "sha512-cbQt3PTSD7P2OARdVW3qWER5EGq7PHlvE+QfzSC0lbwO+xnt7+XH06ZzFjFRgzUX//JmpxrCu92VdwvEPlWSNw=="],
-
- "@vitest/utils": ["@vitest/utils@4.0.18", "", { "dependencies": { "@vitest/pretty-format": "4.0.18", "tinyrainbow": "^3.0.3" } }, "sha512-msMRKLMVLWygpK3u2Hybgi4MNjcYJvwTb0Ru09+fOyCXIgT5raYP041DRRdiJiI3k/2U6SEbAETB3YtBrUkCFA=="],
-
- "assertion-error": ["assertion-error@2.0.1", "", {}, "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA=="],
-
- "blake3-wasm": ["blake3-wasm@2.1.5", "", {}, "sha512-F1+K8EbfOZE49dtoPtmxUQrpXaBIl3ICvasLh+nJta0xkz+9kF/7uet9fLnwKqhDrmj6g+6K3Tw9yQPUg2ka5g=="],
-
- "chai": ["chai@6.2.2", "", {}, "sha512-NUPRluOfOiTKBKvWPtSD4PhFvWCqOi0BGStNWs57X9js7XGTprSmFoz5F0tWhR4WPjNeR9jXqdC7/UpSJTnlRg=="],
-
- "cookie": ["cookie@1.0.2", "", {}, "sha512-9Kr/j4O16ISv8zBBhJoi4bXOYNTkFLOqSL3UDB0njXxCXNezjeyVrJyGOWtgfs/q2km1gwBcfH8q1yEGoMYunA=="],
-
- "detect-libc": ["detect-libc@2.1.2", "", {}, "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ=="],
-
- "error-stack-parser-es": ["error-stack-parser-es@1.0.5", "", {}, "sha512-5qucVt2XcuGMcEGgWI7i+yZpmpByQ8J1lHhcL7PwqCwu9FPP3VUXzT4ltHe5i2z9dePwEHcDVOAfSnHsOlCXRA=="],
-
- "es-module-lexer": ["es-module-lexer@1.7.0", "", {}, "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA=="],
-
- "esbuild": ["esbuild@0.27.0", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.27.0", "@esbuild/android-arm": "0.27.0", "@esbuild/android-arm64": "0.27.0", "@esbuild/android-x64": "0.27.0", "@esbuild/darwin-arm64": "0.27.0", "@esbuild/darwin-x64": "0.27.0", "@esbuild/freebsd-arm64": "0.27.0", "@esbuild/freebsd-x64": "0.27.0", "@esbuild/linux-arm": "0.27.0", "@esbuild/linux-arm64": "0.27.0", "@esbuild/linux-ia32": "0.27.0", "@esbuild/linux-loong64": "0.27.0", "@esbuild/linux-mips64el": "0.27.0", "@esbuild/linux-ppc64": "0.27.0", "@esbuild/linux-riscv64": "0.27.0", "@esbuild/linux-s390x": "0.27.0", "@esbuild/linux-x64": "0.27.0", "@esbuild/netbsd-arm64": "0.27.0", "@esbuild/netbsd-x64": "0.27.0", "@esbuild/openbsd-arm64": "0.27.0", "@esbuild/openbsd-x64": "0.27.0", "@esbuild/openharmony-arm64": "0.27.0", "@esbuild/sunos-x64": "0.27.0", "@esbuild/win32-arm64": "0.27.0", "@esbuild/win32-ia32": "0.27.0", "@esbuild/win32-x64": "0.27.0" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-jd0f4NHbD6cALCyGElNpGAOtWxSq46l9X/sWB0Nzd5er4Kz2YTm+Vl0qKFT9KUJvD8+fiO8AvoHhFvEatfVixA=="],
-
- "estree-walker": ["estree-walker@3.0.3", "", { "dependencies": { "@types/estree": "^1.0.0" } }, "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g=="],
-
- "expect-type": ["expect-type@1.3.0", "", {}, "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA=="],
-
- "fdir": ["fdir@6.5.0", "", { "peerDependencies": { "picomatch": "^3 || ^4" }, "optionalPeers": ["picomatch"] }, "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg=="],
-
- "fsevents": ["fsevents@2.3.3", "", { "os": "darwin" }, "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw=="],
-
- "itty-router": ["itty-router@5.0.22", "", {}, "sha512-9hmdGErWdYDOurGYxSbqLhy4EFReIwk71hMZTJ5b+zfa2zjMNV1ftFno2b8VjAQvX615gNB8Qxbl9JMRqHnIVA=="],
-
- "jose": ["jose@5.10.0", "", {}, "sha512-s+3Al/p9g32Iq+oqXxkW//7jk2Vig6FF1CFqzVXoTUXt2qz89YWbL+OwS17NFYEvxC35n0FKeGO2LGYSxeM2Gg=="],
-
- "kleur": ["kleur@4.1.5", "", {}, "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ=="],
-
- "magic-string": ["magic-string@0.30.21", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.5" } }, "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ=="],
-
- "miniflare": ["miniflare@4.20260120.0", "", { "dependencies": { "@cspotcode/source-map-support": "0.8.1", "sharp": "^0.34.5", "undici": "7.18.2", "workerd": "1.20260120.0", "ws": "8.18.0", "youch": "4.1.0-beta.10", "zod": "^3.25.76" }, "bin": { "miniflare": "bootstrap.js" } }, "sha512-XXZyE2pDKMtP5OLuv0LPHEAzIYhov4jrYjcqrhhqtxGGtXneWOHvXIPo+eV8sqwqWd3R7j4DlEKcyb+87BR49Q=="],
-
- "nanoid": ["nanoid@3.3.11", "", { "bin": { "nanoid": "bin/nanoid.cjs" } }, "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w=="],
-
- "obug": ["obug@2.1.1", "", {}, "sha512-uTqF9MuPraAQ+IsnPf366RG4cP9RtUi7MLO1N3KEc+wb0a6yKpeL0lmk2IB1jY5KHPAlTc6T/JRdC/YqxHNwkQ=="],
-
- "path-to-regexp": ["path-to-regexp@6.3.0", "", {}, "sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ=="],
-
- "pathe": ["pathe@2.0.3", "", {}, "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w=="],
-
- "picocolors": ["picocolors@1.1.1", "", {}, "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="],
-
- "picomatch": ["picomatch@4.0.3", "", {}, "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q=="],
-
- "postcss": ["postcss@8.5.6", "", { "dependencies": { "nanoid": "^3.3.11", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" } }, "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg=="],
-
- "rollup": ["rollup@4.56.0", "", { "dependencies": { "@types/estree": "1.0.8" }, "optionalDependencies": { "@rollup/rollup-android-arm-eabi": "4.56.0", "@rollup/rollup-android-arm64": "4.56.0", "@rollup/rollup-darwin-arm64": "4.56.0", "@rollup/rollup-darwin-x64": "4.56.0", "@rollup/rollup-freebsd-arm64": "4.56.0", "@rollup/rollup-freebsd-x64": "4.56.0", "@rollup/rollup-linux-arm-gnueabihf": "4.56.0", "@rollup/rollup-linux-arm-musleabihf": "4.56.0", "@rollup/rollup-linux-arm64-gnu": "4.56.0", "@rollup/rollup-linux-arm64-musl": "4.56.0", "@rollup/rollup-linux-loong64-gnu": "4.56.0", "@rollup/rollup-linux-loong64-musl": "4.56.0", "@rollup/rollup-linux-ppc64-gnu": "4.56.0", "@rollup/rollup-linux-ppc64-musl": "4.56.0", "@rollup/rollup-linux-riscv64-gnu": "4.56.0", "@rollup/rollup-linux-riscv64-musl": "4.56.0", "@rollup/rollup-linux-s390x-gnu": "4.56.0", "@rollup/rollup-linux-x64-gnu": "4.56.0", "@rollup/rollup-linux-x64-musl": "4.56.0", "@rollup/rollup-openbsd-x64": "4.56.0", "@rollup/rollup-openharmony-arm64": "4.56.0", "@rollup/rollup-win32-arm64-msvc": "4.56.0", "@rollup/rollup-win32-ia32-msvc": "4.56.0", "@rollup/rollup-win32-x64-gnu": "4.56.0", "@rollup/rollup-win32-x64-msvc": "4.56.0", "fsevents": "~2.3.2" }, "bin": { "rollup": "dist/bin/rollup" } }, "sha512-9FwVqlgUHzbXtDg9RCMgodF3Ua4Na6Gau+Sdt9vyCN4RhHfVKX2DCHy3BjMLTDd47ITDhYAnTwGulWTblJSDLg=="],
-
- "semver": ["semver@7.7.3", "", { "bin": "bin/semver.js" }, "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q=="],
-
- "sharp": ["sharp@0.34.5", "", { "dependencies": { "@img/colour": "^1.0.0", "detect-libc": "^2.1.2", "semver": "^7.7.3" }, "optionalDependencies": { "@img/sharp-darwin-arm64": "0.34.5", "@img/sharp-darwin-x64": "0.34.5", "@img/sharp-libvips-darwin-arm64": "1.2.4", "@img/sharp-libvips-darwin-x64": "1.2.4", "@img/sharp-libvips-linux-arm": "1.2.4", "@img/sharp-libvips-linux-arm64": "1.2.4", "@img/sharp-libvips-linux-ppc64": "1.2.4", "@img/sharp-libvips-linux-riscv64": "1.2.4", "@img/sharp-libvips-linux-s390x": "1.2.4", "@img/sharp-libvips-linux-x64": "1.2.4", "@img/sharp-libvips-linuxmusl-arm64": "1.2.4", "@img/sharp-libvips-linuxmusl-x64": "1.2.4", "@img/sharp-linux-arm": "0.34.5", "@img/sharp-linux-arm64": "0.34.5", "@img/sharp-linux-ppc64": "0.34.5", "@img/sharp-linux-riscv64": "0.34.5", "@img/sharp-linux-s390x": "0.34.5", "@img/sharp-linux-x64": "0.34.5", "@img/sharp-linuxmusl-arm64": "0.34.5", "@img/sharp-linuxmusl-x64": "0.34.5", "@img/sharp-wasm32": "0.34.5", "@img/sharp-win32-arm64": "0.34.5", "@img/sharp-win32-ia32": "0.34.5", "@img/sharp-win32-x64": "0.34.5" } }, "sha512-Ou9I5Ft9WNcCbXrU9cMgPBcCK8LiwLqcbywW3t4oDV37n1pzpuNLsYiAV8eODnjbtQlSDwZ2cUEeQz4E54Hltg=="],
-
- "siginfo": ["siginfo@2.0.0", "", {}, "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g=="],
-
- "source-map-js": ["source-map-js@1.2.1", "", {}, "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA=="],
-
- "stackback": ["stackback@0.0.2", "", {}, "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw=="],
-
- "std-env": ["std-env@3.10.0", "", {}, "sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg=="],
-
- "supports-color": ["supports-color@10.2.2", "", {}, "sha512-SS+jx45GF1QjgEXQx4NJZV9ImqmO2NPz5FNsIHrsDjh2YsHnawpan7SNQ1o8NuhrbHZy9AZhIoCUiCeaW/C80g=="],
-
- "tinybench": ["tinybench@2.9.0", "", {}, "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg=="],
-
- "tinyexec": ["tinyexec@1.0.2", "", {}, "sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg=="],
-
- "tinyglobby": ["tinyglobby@0.2.15", "", { "dependencies": { "fdir": "^6.5.0", "picomatch": "^4.0.3" } }, "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ=="],
-
- "tinyrainbow": ["tinyrainbow@3.0.3", "", {}, "sha512-PSkbLUoxOFRzJYjjxHJt9xro7D+iilgMX/C9lawzVuYiIdcihh9DXmVibBe8lmcFrRi/VzlPjBxbN7rH24q8/Q=="],
-
- "tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="],
-
- "typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="],
-
- "undici": ["undici@7.18.2", "", {}, "sha512-y+8YjDFzWdQlSE9N5nzKMT3g4a5UBX1HKowfdXh0uvAnTaqqwqB92Jt4UXBAeKekDs5IaDKyJFR4X1gYVCgXcw=="],
-
- "undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="],
-
- "unenv": ["unenv@2.0.0-rc.24", "", { "dependencies": { "pathe": "^2.0.3" } }, "sha512-i7qRCmY42zmCwnYlh9H2SvLEypEFGye5iRmEMKjcGi7zk9UquigRjFtTLz0TYqr0ZGLZhaMHl/foy1bZR+Cwlw=="],
-
- "vite": ["vite@7.3.1", "", { "dependencies": { "esbuild": "^0.27.0", "fdir": "^6.5.0", "picomatch": "^4.0.3", "postcss": "^8.5.6", "rollup": "^4.43.0", "tinyglobby": "^0.2.15" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "peerDependencies": { "@types/node": "^20.19.0 || >=22.12.0", "jiti": ">=1.21.0", "less": "^4.0.0", "lightningcss": "^1.21.0", "sass": "^1.70.0", "sass-embedded": "^1.70.0", "stylus": ">=0.54.8", "sugarss": "^5.0.0", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["@types/node", "jiti", "less", "lightningcss", "sass", "sass-embedded", "stylus", "sugarss", "terser", "tsx", "yaml"], "bin": { "vite": "bin/vite.js" } }, "sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA=="],
-
- "vitest": ["vitest@4.0.18", "", { "dependencies": { "@vitest/expect": "4.0.18", "@vitest/mocker": "4.0.18", "@vitest/pretty-format": "4.0.18", "@vitest/runner": "4.0.18", "@vitest/snapshot": "4.0.18", "@vitest/spy": "4.0.18", "@vitest/utils": "4.0.18", "es-module-lexer": "^1.7.0", "expect-type": "^1.2.2", "magic-string": "^0.30.21", "obug": "^2.1.1", "pathe": "^2.0.3", "picomatch": "^4.0.3", "std-env": "^3.10.0", "tinybench": "^2.9.0", "tinyexec": "^1.0.2", "tinyglobby": "^0.2.15", "tinyrainbow": "^3.0.3", "vite": "^6.0.0 || ^7.0.0", "why-is-node-running": "^2.3.0" }, "peerDependencies": { "@edge-runtime/vm": "*", "@opentelemetry/api": "^1.9.0", "@types/node": "^20.0.0 || ^22.0.0 || >=24.0.0", "@vitest/browser-playwright": "4.0.18", "@vitest/browser-preview": "4.0.18", "@vitest/browser-webdriverio": "4.0.18", "@vitest/ui": "4.0.18", "happy-dom": "*", "jsdom": "*" }, "optionalPeers": ["@edge-runtime/vm", "@opentelemetry/api", "@types/node", "@vitest/browser-playwright", "@vitest/browser-preview", "@vitest/browser-webdriverio", "@vitest/ui", "happy-dom", "jsdom"], "bin": { "vitest": "vitest.mjs" } }, "sha512-hOQuK7h0FGKgBAas7v0mSAsnvrIgAvWmRFjmzpJ7SwFHH3g1k2u37JtYwOwmEKhK6ZO3v9ggDBBm0La1LCK4uQ=="],
-
- "why-is-node-running": ["why-is-node-running@2.3.0", "", { "dependencies": { "siginfo": "^2.0.0", "stackback": "0.0.2" }, "bin": { "why-is-node-running": "cli.js" } }, "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w=="],
-
- "workerd": ["workerd@1.20260120.0", "", { "optionalDependencies": { "@cloudflare/workerd-darwin-64": "1.20260120.0", "@cloudflare/workerd-darwin-arm64": "1.20260120.0", "@cloudflare/workerd-linux-64": "1.20260120.0", "@cloudflare/workerd-linux-arm64": "1.20260120.0", "@cloudflare/workerd-windows-64": "1.20260120.0" }, "bin": { "workerd": "bin/workerd" } }, "sha512-R6X/VQOkwLTBGLp4VRUwLQZZVxZ9T9J8pGiJ6GQUMaRkY7TVWrCSkVfoNMM1/YyFsY5UYhhPoQe5IehnhZ3Pdw=="],
-
- "wrangler": ["wrangler@4.60.0", "", { "dependencies": { "@cloudflare/kv-asset-handler": "0.4.2", "@cloudflare/unenv-preset": "2.11.0", "blake3-wasm": "2.1.5", "esbuild": "0.27.0", "miniflare": "4.20260120.0", "path-to-regexp": "6.3.0", "unenv": "2.0.0-rc.24", "workerd": "1.20260120.0" }, "optionalDependencies": { "fsevents": "~2.3.2" }, "peerDependencies": { "@cloudflare/workers-types": "^4.20260120.0" }, "optionalPeers": ["@cloudflare/workers-types"], "bin": { "wrangler": "bin/wrangler.js", "wrangler2": "bin/wrangler.js" } }, "sha512-n4kibm/xY0Qd5G2K/CbAQeVeOIlwPNVglmFjlDRCCYk3hZh8IggO/rg8AXt/vByK2Sxsugl5Z7yvgWxrUbmS6g=="],
-
- "ws": ["ws@8.18.0", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw=="],
-
- "youch": ["youch@4.1.0-beta.10", "", { "dependencies": { "@poppinss/colors": "^4.1.5", "@poppinss/dumper": "^0.6.4", "@speed-highlight/core": "^1.2.7", "cookie": "^1.0.2", "youch-core": "^0.3.3" } }, "sha512-rLfVLB4FgQneDr0dv1oddCVZmKjcJ6yX6mS4pU82Mq/Dt9a3cLZQ62pDBL4AUO+uVrCvtWz3ZFUL2HFAFJ/BXQ=="],
-
- "youch-core": ["youch-core@0.3.3", "", { "dependencies": { "@poppinss/exception": "^1.2.2", "error-stack-parser-es": "^1.0.5" } }, "sha512-ho7XuGjLaJ2hWHoK8yFnsUGy2Y5uDpqSTq1FkHLK4/oqKtyUU1AFbOOxY4IpC9f0fTLjwYbslUz0Po5BpD1wrA=="],
-
- "zod": ["zod@4.3.6", "", {}, "sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg=="],
-
- "miniflare/zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="],
- }
-}
diff --git a/worker/deploy-all-envs.sh b/worker/deploy-all-envs.sh
deleted file mode 100755
index 4a66db39..00000000
--- a/worker/deploy-all-envs.sh
+++ /dev/null
@@ -1,222 +0,0 @@
-#!/bin/bash
-# GSD Sync Worker - Deploy to All Environments
-# Sequentially deploys to development, staging, and production
-
-set -e # Exit on error
-
-# Colors for output
-RED='\033[0;31m'
-GREEN='\033[0;32m'
-BLUE='\033[0;34m'
-YELLOW='\033[1;33m'
-NC='\033[0m' # No Color
-
-# Track deployment status (indexed arrays for Bash 3.2 compatibility)
-DEPLOY_STATUS_DEV=""
-DEPLOY_STATUS_STAGING=""
-DEPLOY_STATUS_PROD=""
-DEPLOY_URL_DEV=""
-DEPLOY_URL_STAGING=""
-DEPLOY_URL_PROD=""
-
-ENVIRONMENTS=("development" "staging" "production")
-
-echo -e "${BLUE}╔════════════════════════════════════════════════════════════╗${NC}"
-echo -e "${BLUE}║ GSD Sync Worker - Deploy All Environments ║${NC}"
-echo -e "${BLUE}╔════════════════════════════════════════════════════════════╗${NC}"
-echo ""
-
-# ========================================
-# Pre-flight Checks
-# ========================================
-echo -e "${BLUE}[Pre-flight]${NC} Running checks..."
-echo ""
-
-# Check authentication
-echo -e " ${BLUE}→${NC} Checking Cloudflare authentication..."
-if ! npx wrangler whoami > /dev/null 2>&1; then
- echo -e " ${RED}✗ Not authenticated${NC}"
- echo ""
- echo "Please run: npx wrangler login"
- exit 1
-fi
-echo -e " ${GREEN}✓${NC} Authenticated"
-
-# Check TypeScript compilation
-echo -e " ${BLUE}→${NC} Running TypeScript type check..."
-if npm run typecheck > /dev/null 2>&1; then
- echo -e " ${GREEN}✓${NC} TypeScript compilation successful"
-else
- echo -e " ${RED}✗ TypeScript errors found${NC}"
- echo ""
- echo "Please fix TypeScript errors before deploying:"
- npm run typecheck
- exit 1
-fi
-
-echo ""
-echo -e "${GREEN}✓ Pre-flight checks passed${NC}"
-echo ""
-
-# ========================================
-# Deploy to Each Environment
-# ========================================
-for i in 0 1 2; do
- ENV="${ENVIRONMENTS[$i]}"
- ENV_NUM=$((i + 1))
- ENV_UPPER=$(echo "$ENV" | tr '[:lower:]' '[:upper:]')
-
- echo ""
- echo -e "${YELLOW}════════════════════════════════════════════════════════${NC}"
- echo -e "${YELLOW}[${ENV_NUM}/3] Deploying to ${ENV_UPPER}${NC}"
- echo -e "${YELLOW}════════════════════════════════════════════════════════${NC}"
- echo ""
-
- # Deploy (show output in real-time)
- echo -e " ${BLUE}→${NC} Running wrangler deploy..."
- echo ""
-
- if [ "$ENV" = "development" ]; then
- # Development uses default env
- npx wrangler deploy --env development
- DEPLOY_EXIT_CODE=$?
- else
- npx wrangler deploy --env "$ENV"
- DEPLOY_EXIT_CODE=$?
- fi
-
- echo ""
-
- # Check if deployment succeeded
- if [ $DEPLOY_EXIT_CODE -eq 0 ]; then
- WORKER_URL="N/A" # We'll show success without trying to extract URL for now
-
- # Store status and URL based on environment
- case "$ENV" in
- "development")
- DEPLOY_STATUS_DEV="✓"
- DEPLOY_URL_DEV="$WORKER_URL"
- ;;
- "staging")
- DEPLOY_STATUS_STAGING="✓"
- DEPLOY_URL_STAGING="$WORKER_URL"
- ;;
- "production")
- DEPLOY_STATUS_PROD="✓"
- DEPLOY_URL_PROD="$WORKER_URL"
- ;;
- esac
-
- echo -e "${GREEN}✓ Deployed successfully to ${ENV}${NC}"
- echo -e " URL: ${WORKER_URL}"
- else
- # Store failure status
- case "$ENV" in
- "development")
- DEPLOY_STATUS_DEV="✗"
- DEPLOY_URL_DEV="Failed"
- ;;
- "staging")
- DEPLOY_STATUS_STAGING="✗"
- DEPLOY_URL_STAGING="Failed"
- ;;
- "production")
- DEPLOY_STATUS_PROD="✗"
- DEPLOY_URL_PROD="Failed"
- ;;
- esac
-
- echo -e "${RED}✗ Deployment to ${ENV} failed${NC}"
- echo ""
- echo "See error output above."
- echo ""
-
- # Ask if user wants to continue
- read -p "Continue with remaining environments? (y/N): " -n 1 -r
- echo
- if [[ ! $REPLY =~ ^[Yy]$ ]]; then
- echo ""
- echo -e "${RED}Deployment aborted${NC}"
- exit 1
- fi
- fi
- echo ""
-done
-
-# ========================================
-# Summary
-# ========================================
-echo ""
-echo -e "${BLUE}╔════════════════════════════════════════════════════════════╗${NC}"
-echo -e "${BLUE}║ Deployment Summary ║${NC}"
-echo -e "${BLUE}╔════════════════════════════════════════════════════════════╗${NC}"
-echo ""
-
-ALL_SUCCESS=true
-
-for i in 0 1 2; do
- ENV="${ENVIRONMENTS[$i]}"
- ENV_UPPER=$(echo "$ENV" | tr '[:lower:]' '[:upper:]')
-
- # Get status and URL based on environment
- case "$ENV" in
- "development")
- STATUS="$DEPLOY_STATUS_DEV"
- URL="$DEPLOY_URL_DEV"
- ;;
- "staging")
- STATUS="$DEPLOY_STATUS_STAGING"
- URL="$DEPLOY_URL_STAGING"
- ;;
- "production")
- STATUS="$DEPLOY_STATUS_PROD"
- URL="$DEPLOY_URL_PROD"
- ;;
- esac
-
- if [ "$STATUS" = "✓" ]; then
- echo -e "${GREEN}${STATUS}${NC} ${ENV_UPPER}"
- echo -e " ${URL}"
- else
- echo -e "${RED}${STATUS}${NC} ${ENV_UPPER}"
- echo -e " Deployment failed"
- ALL_SUCCESS=false
- fi
- echo ""
-done
-
-echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
-echo ""
-
-if [ "$ALL_SUCCESS" = true ]; then
- echo -e "${GREEN}✓ All environments deployed successfully!${NC}"
- echo ""
- echo -e "${BLUE}Next Steps:${NC}"
- echo " 1. Check the worker URLs in the Cloudflare dashboard"
- echo " 2. Test each environment's /health endpoint"
- echo " 3. Verify OAuth configuration"
- echo " 4. Test sync functionality in each environment"
- echo ""
- echo -e "${BLUE}View workers:${NC}"
- echo " https://dash.cloudflare.com/workers"
- echo ""
- exit 0
-else
- echo -e "${YELLOW}⚠ Some deployments failed${NC}"
- echo ""
- echo "Please check the errors above and redeploy failed environments individually:"
-
- # Check each environment
- if [ "$DEPLOY_STATUS_DEV" != "✓" ]; then
- echo " npm run deploy # development"
- fi
- if [ "$DEPLOY_STATUS_STAGING" != "✓" ]; then
- echo " npm run deploy:staging # staging"
- fi
- if [ "$DEPLOY_STATUS_PROD" != "✓" ]; then
- echo " npm run deploy:production # production"
- fi
-
- echo ""
- exit 1
-fi
diff --git a/worker/migrations/002_oauth_migration.sql b/worker/migrations/002_oauth_migration.sql
deleted file mode 100644
index ed3fb343..00000000
--- a/worker/migrations/002_oauth_migration.sql
+++ /dev/null
@@ -1,37 +0,0 @@
--- Migration: Add OAuth support and remove password authentication
--- Version: 2.0.0
--- Date: 2025-10-15
-
--- Step 1: Create new users table with OAuth support
-CREATE TABLE IF NOT EXISTS users_new (
- id TEXT PRIMARY KEY,
- email TEXT UNIQUE NOT NULL,
- auth_provider TEXT NOT NULL CHECK(auth_provider IN ('google', 'apple')),
- provider_user_id TEXT NOT NULL,
- created_at INTEGER NOT NULL,
- updated_at INTEGER NOT NULL,
- last_login_at INTEGER,
- account_status TEXT DEFAULT 'active' CHECK(account_status IN ('active', 'suspended', 'deleted')),
- UNIQUE(auth_provider, provider_user_id)
-);
-
--- Step 2: Copy existing users (if any - for migration purposes)
--- Note: Existing password-based users will need to re-authenticate with OAuth
--- This is safe because the application is not yet in production
-
--- Step 3: Drop old users table
-DROP TABLE IF EXISTS users;
-
--- Step 4: Rename new table
-ALTER TABLE users_new RENAME TO users;
-
--- Step 5: Create indexes
-CREATE INDEX IF NOT EXISTS idx_users_email ON users(email);
-CREATE INDEX IF NOT EXISTS idx_users_status ON users(account_status);
-CREATE INDEX IF NOT EXISTS idx_users_provider ON users(auth_provider, provider_user_id);
-
--- Step 6: Clean up devices table (orphaned devices from old password-based users)
--- Will be recreated when users sign in with OAuth
-
--- Note: sync_operations, encrypted_tasks, sync_metadata, and conflict_log tables
--- remain unchanged as they reference users by ID, which is preserved
diff --git a/worker/migrations/003_add_encryption_salt.sql b/worker/migrations/003_add_encryption_salt.sql
deleted file mode 100644
index 09299288..00000000
--- a/worker/migrations/003_add_encryption_salt.sql
+++ /dev/null
@@ -1,2 +0,0 @@
--- Add encryption_salt column to users table
-ALTER TABLE users ADD COLUMN encryption_salt TEXT;
diff --git a/worker/package-lock.json b/worker/package-lock.json
deleted file mode 100644
index fbd3efd5..00000000
--- a/worker/package-lock.json
+++ /dev/null
@@ -1,1589 +0,0 @@
-{
- "name": "gsd-sync-worker",
- "version": "1.1.0",
- "lockfileVersion": 3,
- "requires": true,
- "packages": {
- "": {
- "name": "gsd-sync-worker",
- "version": "1.1.0",
- "license": "MIT",
- "dependencies": {
- "itty-router": "^5.0.18",
- "jose": "^5.10.0",
- "zod": "^3.25.76"
- },
- "devDependencies": {
- "@cloudflare/workers-types": "^4.20250110.0",
- "@types/node": "^25.0.3",
- "typescript": "^5.9.3",
- "wrangler": "^4.43.0"
- }
- },
- "node_modules/@cloudflare/kv-asset-handler": {
- "version": "0.4.0",
- "resolved": "https://registry.npmjs.org/@cloudflare/kv-asset-handler/-/kv-asset-handler-0.4.0.tgz",
- "integrity": "sha512-+tv3z+SPp+gqTIcImN9o0hqE9xyfQjI1XD9pL6NuKjua9B1y7mNYv0S9cP+QEbA4ppVgGZEmKOvHX5G5Ei1CVA==",
- "dev": true,
- "license": "MIT OR Apache-2.0",
- "dependencies": {
- "mime": "^3.0.0"
- },
- "engines": {
- "node": ">=18.0.0"
- }
- },
- "node_modules/@cloudflare/unenv-preset": {
- "version": "2.7.10",
- "resolved": "https://registry.npmjs.org/@cloudflare/unenv-preset/-/unenv-preset-2.7.10.tgz",
- "integrity": "sha512-mvsNAiJSduC/9yxv1ZpCxwgAXgcuoDvkl8yaHjxoLpFxXy2ugc6TZK20EKgv4yO0vZhAEKwqJm+eGOzf8Oc45w==",
- "dev": true,
- "license": "MIT OR Apache-2.0",
- "peerDependencies": {
- "unenv": "2.0.0-rc.24",
- "workerd": "^1.20251106.1"
- },
- "peerDependenciesMeta": {
- "workerd": {
- "optional": true
- }
- }
- },
- "node_modules/@cloudflare/workerd-darwin-64": {
- "version": "1.20251109.0",
- "resolved": "https://registry.npmjs.org/@cloudflare/workerd-darwin-64/-/workerd-darwin-64-1.20251109.0.tgz",
- "integrity": "sha512-GAYXHOgPTJm6F+mOt0/Zf+rL+xPfMp8zAxGN4pqkzJ6QVQA/mNVMMuj22dI5x8+Ey+lCulKC3rNs4K3VE12hlA==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "Apache-2.0",
- "optional": true,
- "os": [
- "darwin"
- ],
- "engines": {
- "node": ">=16"
- }
- },
- "node_modules/@cloudflare/workerd-darwin-arm64": {
- "version": "1.20251109.0",
- "resolved": "https://registry.npmjs.org/@cloudflare/workerd-darwin-arm64/-/workerd-darwin-arm64-1.20251109.0.tgz",
- "integrity": "sha512-fpLJvZi3i+btgrXJcOtKYrbmdnHVTKpaZigoKIcpBX4mbwxUh/GVbrCmOqLebr57asQC+PmBfghUEYniqRgnhA==",
- "cpu": [
- "arm64"
- ],
- "dev": true,
- "license": "Apache-2.0",
- "optional": true,
- "os": [
- "darwin"
- ],
- "engines": {
- "node": ">=16"
- }
- },
- "node_modules/@cloudflare/workerd-linux-64": {
- "version": "1.20251109.0",
- "resolved": "https://registry.npmjs.org/@cloudflare/workerd-linux-64/-/workerd-linux-64-1.20251109.0.tgz",
- "integrity": "sha512-5NjCnXQoaySFAGGn10w0rPfmEhTSKTP/k7f3aduvt1syt462+66X7luOME/k2x5EB/Z5L8xvwf3/LejSSZ4EVA==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "Apache-2.0",
- "optional": true,
- "os": [
- "linux"
- ],
- "engines": {
- "node": ">=16"
- }
- },
- "node_modules/@cloudflare/workerd-linux-arm64": {
- "version": "1.20251109.0",
- "resolved": "https://registry.npmjs.org/@cloudflare/workerd-linux-arm64/-/workerd-linux-arm64-1.20251109.0.tgz",
- "integrity": "sha512-f2AeJlpSwrEvEV57+JU+vRPL8c/Dv8nwY4XW+YwnzPo2TpbI/zzqloPXQ6PY79ftDfEsJJPzQuaDDPq3UOGJQA==",
- "cpu": [
- "arm64"
- ],
- "dev": true,
- "license": "Apache-2.0",
- "optional": true,
- "os": [
- "linux"
- ],
- "engines": {
- "node": ">=16"
- }
- },
- "node_modules/@cloudflare/workerd-windows-64": {
- "version": "1.20251109.0",
- "resolved": "https://registry.npmjs.org/@cloudflare/workerd-windows-64/-/workerd-windows-64-1.20251109.0.tgz",
- "integrity": "sha512-IGo/lzbYoeJdfLkpaKLoeG6C7Rwcf5kXjzV0wO8fLUSmlfOLQvXTIehWc7EkbHFHjPapDqYqR0KsmbizBi68Lg==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "Apache-2.0",
- "optional": true,
- "os": [
- "win32"
- ],
- "engines": {
- "node": ">=16"
- }
- },
- "node_modules/@cloudflare/workers-types": {
- "version": "4.20251115.0",
- "resolved": "https://registry.npmjs.org/@cloudflare/workers-types/-/workers-types-4.20251115.0.tgz",
- "integrity": "sha512-aM7jp7IfKhqKvfSaK1IhVTbSzxB6KQ4gX8e/W29tOuZk+YHlYXuRd/bMm4hWkfd7B1HWNWdsx1GTaEUoZIuVsw==",
- "dev": true,
- "license": "MIT OR Apache-2.0",
- "peer": true
- },
- "node_modules/@cspotcode/source-map-support": {
- "version": "0.8.1",
- "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz",
- "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@jridgewell/trace-mapping": "0.3.9"
- },
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/@emnapi/runtime": {
- "version": "1.7.1",
- "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.7.1.tgz",
- "integrity": "sha512-PVtJr5CmLwYAU9PZDMITZoR5iAOShYREoR45EyyLrbntV50mdePTgUn4AmOw90Ifcj+x2kRjdzr1HP3RrNiHGA==",
- "dev": true,
- "license": "MIT",
- "optional": true,
- "dependencies": {
- "tslib": "^2.4.0"
- }
- },
- "node_modules/@esbuild/aix-ppc64": {
- "version": "0.25.4",
- "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.4.tgz",
- "integrity": "sha512-1VCICWypeQKhVbE9oW/sJaAmjLxhVqacdkvPLEjwlttjfwENRSClS8EjBz0KzRyFSCPDIkuXW34Je/vk7zdB7Q==",
- "cpu": [
- "ppc64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "aix"
- ],
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/@esbuild/android-arm": {
- "version": "0.25.4",
- "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.4.tgz",
- "integrity": "sha512-QNdQEps7DfFwE3hXiU4BZeOV68HHzYwGd0Nthhd3uCkkEKK7/R6MTgM0P7H7FAs5pU/DIWsviMmEGxEoxIZ+ZQ==",
- "cpu": [
- "arm"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "android"
- ],
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/@esbuild/android-arm64": {
- "version": "0.25.4",
- "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.4.tgz",
- "integrity": "sha512-bBy69pgfhMGtCnwpC/x5QhfxAz/cBgQ9enbtwjf6V9lnPI/hMyT9iWpR1arm0l3kttTr4L0KSLpKmLp/ilKS9A==",
- "cpu": [
- "arm64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "android"
- ],
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/@esbuild/android-x64": {
- "version": "0.25.4",
- "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.4.tgz",
- "integrity": "sha512-TVhdVtQIFuVpIIR282btcGC2oGQoSfZfmBdTip2anCaVYcqWlZXGcdcKIUklfX2wj0JklNYgz39OBqh2cqXvcQ==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "android"
- ],
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/@esbuild/darwin-arm64": {
- "version": "0.25.4",
- "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.4.tgz",
- "integrity": "sha512-Y1giCfM4nlHDWEfSckMzeWNdQS31BQGs9/rouw6Ub91tkK79aIMTH3q9xHvzH8d0wDru5Ci0kWB8b3up/nl16g==",
- "cpu": [
- "arm64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "darwin"
- ],
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/@esbuild/darwin-x64": {
- "version": "0.25.4",
- "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.4.tgz",
- "integrity": "sha512-CJsry8ZGM5VFVeyUYB3cdKpd/H69PYez4eJh1W/t38vzutdjEjtP7hB6eLKBoOdxcAlCtEYHzQ/PJ/oU9I4u0A==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "darwin"
- ],
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/@esbuild/freebsd-arm64": {
- "version": "0.25.4",
- "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.4.tgz",
- "integrity": "sha512-yYq+39NlTRzU2XmoPW4l5Ifpl9fqSk0nAJYM/V/WUGPEFfek1epLHJIkTQM6bBs1swApjO5nWgvr843g6TjxuQ==",
- "cpu": [
- "arm64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "freebsd"
- ],
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/@esbuild/freebsd-x64": {
- "version": "0.25.4",
- "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.4.tgz",
- "integrity": "sha512-0FgvOJ6UUMflsHSPLzdfDnnBBVoCDtBTVyn/MrWloUNvq/5SFmh13l3dvgRPkDihRxb77Y17MbqbCAa2strMQQ==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "freebsd"
- ],
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/@esbuild/linux-arm": {
- "version": "0.25.4",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.4.tgz",
- "integrity": "sha512-kro4c0P85GMfFYqW4TWOpvmF8rFShbWGnrLqlzp4X1TNWjRY3JMYUfDCtOxPKOIY8B0WC8HN51hGP4I4hz4AaQ==",
- "cpu": [
- "arm"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ],
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/@esbuild/linux-arm64": {
- "version": "0.25.4",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.4.tgz",
- "integrity": "sha512-+89UsQTfXdmjIvZS6nUnOOLoXnkUTB9hR5QAeLrQdzOSWZvNSAXAtcRDHWtqAUtAmv7ZM1WPOOeSxDzzzMogiQ==",
- "cpu": [
- "arm64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ],
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/@esbuild/linux-ia32": {
- "version": "0.25.4",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.4.tgz",
- "integrity": "sha512-yTEjoapy8UP3rv8dB0ip3AfMpRbyhSN3+hY8mo/i4QXFeDxmiYbEKp3ZRjBKcOP862Ua4b1PDfwlvbuwY7hIGQ==",
- "cpu": [
- "ia32"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ],
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/@esbuild/linux-loong64": {
- "version": "0.25.4",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.4.tgz",
- "integrity": "sha512-NeqqYkrcGzFwi6CGRGNMOjWGGSYOpqwCjS9fvaUlX5s3zwOtn1qwg1s2iE2svBe4Q/YOG1q6875lcAoQK/F4VA==",
- "cpu": [
- "loong64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ],
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/@esbuild/linux-mips64el": {
- "version": "0.25.4",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.4.tgz",
- "integrity": "sha512-IcvTlF9dtLrfL/M8WgNI/qJYBENP3ekgsHbYUIzEzq5XJzzVEV/fXY9WFPfEEXmu3ck2qJP8LG/p3Q8f7Zc2Xg==",
- "cpu": [
- "mips64el"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ],
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/@esbuild/linux-ppc64": {
- "version": "0.25.4",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.4.tgz",
- "integrity": "sha512-HOy0aLTJTVtoTeGZh4HSXaO6M95qu4k5lJcH4gxv56iaycfz1S8GO/5Jh6X4Y1YiI0h7cRyLi+HixMR+88swag==",
- "cpu": [
- "ppc64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ],
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/@esbuild/linux-riscv64": {
- "version": "0.25.4",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.4.tgz",
- "integrity": "sha512-i8JUDAufpz9jOzo4yIShCTcXzS07vEgWzyX3NH2G7LEFVgrLEhjwL3ajFE4fZI3I4ZgiM7JH3GQ7ReObROvSUA==",
- "cpu": [
- "riscv64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ],
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/@esbuild/linux-s390x": {
- "version": "0.25.4",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.4.tgz",
- "integrity": "sha512-jFnu+6UbLlzIjPQpWCNh5QtrcNfMLjgIavnwPQAfoGx4q17ocOU9MsQ2QVvFxwQoWpZT8DvTLooTvmOQXkO51g==",
- "cpu": [
- "s390x"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ],
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/@esbuild/linux-x64": {
- "version": "0.25.4",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.4.tgz",
- "integrity": "sha512-6e0cvXwzOnVWJHq+mskP8DNSrKBr1bULBvnFLpc1KY+d+irZSgZ02TGse5FsafKS5jg2e4pbvK6TPXaF/A6+CA==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ],
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/@esbuild/netbsd-arm64": {
- "version": "0.25.4",
- "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.4.tgz",
- "integrity": "sha512-vUnkBYxZW4hL/ie91hSqaSNjulOnYXE1VSLusnvHg2u3jewJBz3YzB9+oCw8DABeVqZGg94t9tyZFoHma8gWZQ==",
- "cpu": [
- "arm64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "netbsd"
- ],
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/@esbuild/netbsd-x64": {
- "version": "0.25.4",
- "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.4.tgz",
- "integrity": "sha512-XAg8pIQn5CzhOB8odIcAm42QsOfa98SBeKUdo4xa8OvX8LbMZqEtgeWE9P/Wxt7MlG2QqvjGths+nq48TrUiKw==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "netbsd"
- ],
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/@esbuild/openbsd-arm64": {
- "version": "0.25.4",
- "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.4.tgz",
- "integrity": "sha512-Ct2WcFEANlFDtp1nVAXSNBPDxyU+j7+tId//iHXU2f/lN5AmO4zLyhDcpR5Cz1r08mVxzt3Jpyt4PmXQ1O6+7A==",
- "cpu": [
- "arm64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "openbsd"
- ],
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/@esbuild/openbsd-x64": {
- "version": "0.25.4",
- "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.4.tgz",
- "integrity": "sha512-xAGGhyOQ9Otm1Xu8NT1ifGLnA6M3sJxZ6ixylb+vIUVzvvd6GOALpwQrYrtlPouMqd/vSbgehz6HaVk4+7Afhw==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "openbsd"
- ],
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/@esbuild/sunos-x64": {
- "version": "0.25.4",
- "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.4.tgz",
- "integrity": "sha512-Mw+tzy4pp6wZEK0+Lwr76pWLjrtjmJyUB23tHKqEDP74R3q95luY/bXqXZeYl4NYlvwOqoRKlInQialgCKy67Q==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "sunos"
- ],
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/@esbuild/win32-arm64": {
- "version": "0.25.4",
- "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.4.tgz",
- "integrity": "sha512-AVUP428VQTSddguz9dO9ngb+E5aScyg7nOeJDrF1HPYu555gmza3bDGMPhmVXL8svDSoqPCsCPjb265yG/kLKQ==",
- "cpu": [
- "arm64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "win32"
- ],
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/@esbuild/win32-ia32": {
- "version": "0.25.4",
- "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.4.tgz",
- "integrity": "sha512-i1sW+1i+oWvQzSgfRcxxG2k4I9n3O9NRqy8U+uugaT2Dy7kLO9Y7wI72haOahxceMX8hZAzgGou1FhndRldxRg==",
- "cpu": [
- "ia32"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "win32"
- ],
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/@esbuild/win32-x64": {
- "version": "0.25.4",
- "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.4.tgz",
- "integrity": "sha512-nOT2vZNw6hJ+z43oP1SPea/G/6AbN6X+bGNhNuq8NtRHy4wsMhw765IKLNmnjek7GvjWBYQ8Q5VBoYTFg9y1UQ==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "win32"
- ],
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/@img/sharp-darwin-arm64": {
- "version": "0.33.5",
- "resolved": "https://registry.npmjs.org/@img/sharp-darwin-arm64/-/sharp-darwin-arm64-0.33.5.tgz",
- "integrity": "sha512-UT4p+iz/2H4twwAoLCqfA9UH5pI6DggwKEGuaPy7nCVQ8ZsiY5PIcrRvD1DzuY3qYL07NtIQcWnBSY/heikIFQ==",
- "cpu": [
- "arm64"
- ],
- "dev": true,
- "license": "Apache-2.0",
- "optional": true,
- "os": [
- "darwin"
- ],
- "engines": {
- "node": "^18.17.0 || ^20.3.0 || >=21.0.0"
- },
- "funding": {
- "url": "https://opencollective.com/libvips"
- },
- "optionalDependencies": {
- "@img/sharp-libvips-darwin-arm64": "1.0.4"
- }
- },
- "node_modules/@img/sharp-darwin-x64": {
- "version": "0.33.5",
- "resolved": "https://registry.npmjs.org/@img/sharp-darwin-x64/-/sharp-darwin-x64-0.33.5.tgz",
- "integrity": "sha512-fyHac4jIc1ANYGRDxtiqelIbdWkIuQaI84Mv45KvGRRxSAa7o7d1ZKAOBaYbnepLC1WqxfpimdeWfvqqSGwR2Q==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "Apache-2.0",
- "optional": true,
- "os": [
- "darwin"
- ],
- "engines": {
- "node": "^18.17.0 || ^20.3.0 || >=21.0.0"
- },
- "funding": {
- "url": "https://opencollective.com/libvips"
- },
- "optionalDependencies": {
- "@img/sharp-libvips-darwin-x64": "1.0.4"
- }
- },
- "node_modules/@img/sharp-libvips-darwin-arm64": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-arm64/-/sharp-libvips-darwin-arm64-1.0.4.tgz",
- "integrity": "sha512-XblONe153h0O2zuFfTAbQYAX2JhYmDHeWikp1LM9Hul9gVPjFY427k6dFEcOL72O01QxQsWi761svJ/ev9xEDg==",
- "cpu": [
- "arm64"
- ],
- "dev": true,
- "license": "LGPL-3.0-or-later",
- "optional": true,
- "os": [
- "darwin"
- ],
- "funding": {
- "url": "https://opencollective.com/libvips"
- }
- },
- "node_modules/@img/sharp-libvips-darwin-x64": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-x64/-/sharp-libvips-darwin-x64-1.0.4.tgz",
- "integrity": "sha512-xnGR8YuZYfJGmWPvmlunFaWJsb9T/AO2ykoP3Fz/0X5XV2aoYBPkX6xqCQvUTKKiLddarLaxpzNe+b1hjeWHAQ==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "LGPL-3.0-or-later",
- "optional": true,
- "os": [
- "darwin"
- ],
- "funding": {
- "url": "https://opencollective.com/libvips"
- }
- },
- "node_modules/@img/sharp-libvips-linux-arm": {
- "version": "1.0.5",
- "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm/-/sharp-libvips-linux-arm-1.0.5.tgz",
- "integrity": "sha512-gvcC4ACAOPRNATg/ov8/MnbxFDJqf/pDePbBnuBDcjsI8PssmjoKMAz4LtLaVi+OnSb5FK/yIOamqDwGmXW32g==",
- "cpu": [
- "arm"
- ],
- "dev": true,
- "license": "LGPL-3.0-or-later",
- "optional": true,
- "os": [
- "linux"
- ],
- "funding": {
- "url": "https://opencollective.com/libvips"
- }
- },
- "node_modules/@img/sharp-libvips-linux-arm64": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm64/-/sharp-libvips-linux-arm64-1.0.4.tgz",
- "integrity": "sha512-9B+taZ8DlyyqzZQnoeIvDVR/2F4EbMepXMc/NdVbkzsJbzkUjhXv/70GQJ7tdLA4YJgNP25zukcxpX2/SueNrA==",
- "cpu": [
- "arm64"
- ],
- "dev": true,
- "license": "LGPL-3.0-or-later",
- "optional": true,
- "os": [
- "linux"
- ],
- "funding": {
- "url": "https://opencollective.com/libvips"
- }
- },
- "node_modules/@img/sharp-libvips-linux-s390x": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-s390x/-/sharp-libvips-linux-s390x-1.0.4.tgz",
- "integrity": "sha512-u7Wz6ntiSSgGSGcjZ55im6uvTrOxSIS8/dgoVMoiGE9I6JAfU50yH5BoDlYA1tcuGS7g/QNtetJnxA6QEsCVTA==",
- "cpu": [
- "s390x"
- ],
- "dev": true,
- "license": "LGPL-3.0-or-later",
- "optional": true,
- "os": [
- "linux"
- ],
- "funding": {
- "url": "https://opencollective.com/libvips"
- }
- },
- "node_modules/@img/sharp-libvips-linux-x64": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-x64/-/sharp-libvips-linux-x64-1.0.4.tgz",
- "integrity": "sha512-MmWmQ3iPFZr0Iev+BAgVMb3ZyC4KeFc3jFxnNbEPas60e1cIfevbtuyf9nDGIzOaW9PdnDciJm+wFFaTlj5xYw==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "LGPL-3.0-or-later",
- "optional": true,
- "os": [
- "linux"
- ],
- "funding": {
- "url": "https://opencollective.com/libvips"
- }
- },
- "node_modules/@img/sharp-libvips-linuxmusl-arm64": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-arm64/-/sharp-libvips-linuxmusl-arm64-1.0.4.tgz",
- "integrity": "sha512-9Ti+BbTYDcsbp4wfYib8Ctm1ilkugkA/uscUn6UXK1ldpC1JjiXbLfFZtRlBhjPZ5o1NCLiDbg8fhUPKStHoTA==",
- "cpu": [
- "arm64"
- ],
- "dev": true,
- "license": "LGPL-3.0-or-later",
- "optional": true,
- "os": [
- "linux"
- ],
- "funding": {
- "url": "https://opencollective.com/libvips"
- }
- },
- "node_modules/@img/sharp-libvips-linuxmusl-x64": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-x64/-/sharp-libvips-linuxmusl-x64-1.0.4.tgz",
- "integrity": "sha512-viYN1KX9m+/hGkJtvYYp+CCLgnJXwiQB39damAO7WMdKWlIhmYTfHjwSbQeUK/20vY154mwezd9HflVFM1wVSw==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "LGPL-3.0-or-later",
- "optional": true,
- "os": [
- "linux"
- ],
- "funding": {
- "url": "https://opencollective.com/libvips"
- }
- },
- "node_modules/@img/sharp-linux-arm": {
- "version": "0.33.5",
- "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm/-/sharp-linux-arm-0.33.5.tgz",
- "integrity": "sha512-JTS1eldqZbJxjvKaAkxhZmBqPRGmxgu+qFKSInv8moZ2AmT5Yib3EQ1c6gp493HvrvV8QgdOXdyaIBrhvFhBMQ==",
- "cpu": [
- "arm"
- ],
- "dev": true,
- "license": "Apache-2.0",
- "optional": true,
- "os": [
- "linux"
- ],
- "engines": {
- "node": "^18.17.0 || ^20.3.0 || >=21.0.0"
- },
- "funding": {
- "url": "https://opencollective.com/libvips"
- },
- "optionalDependencies": {
- "@img/sharp-libvips-linux-arm": "1.0.5"
- }
- },
- "node_modules/@img/sharp-linux-arm64": {
- "version": "0.33.5",
- "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm64/-/sharp-linux-arm64-0.33.5.tgz",
- "integrity": "sha512-JMVv+AMRyGOHtO1RFBiJy/MBsgz0x4AWrT6QoEVVTyh1E39TrCUpTRI7mx9VksGX4awWASxqCYLCV4wBZHAYxA==",
- "cpu": [
- "arm64"
- ],
- "dev": true,
- "license": "Apache-2.0",
- "optional": true,
- "os": [
- "linux"
- ],
- "engines": {
- "node": "^18.17.0 || ^20.3.0 || >=21.0.0"
- },
- "funding": {
- "url": "https://opencollective.com/libvips"
- },
- "optionalDependencies": {
- "@img/sharp-libvips-linux-arm64": "1.0.4"
- }
- },
- "node_modules/@img/sharp-linux-s390x": {
- "version": "0.33.5",
- "resolved": "https://registry.npmjs.org/@img/sharp-linux-s390x/-/sharp-linux-s390x-0.33.5.tgz",
- "integrity": "sha512-y/5PCd+mP4CA/sPDKl2961b+C9d+vPAveS33s6Z3zfASk2j5upL6fXVPZi7ztePZ5CuH+1kW8JtvxgbuXHRa4Q==",
- "cpu": [
- "s390x"
- ],
- "dev": true,
- "license": "Apache-2.0",
- "optional": true,
- "os": [
- "linux"
- ],
- "engines": {
- "node": "^18.17.0 || ^20.3.0 || >=21.0.0"
- },
- "funding": {
- "url": "https://opencollective.com/libvips"
- },
- "optionalDependencies": {
- "@img/sharp-libvips-linux-s390x": "1.0.4"
- }
- },
- "node_modules/@img/sharp-linux-x64": {
- "version": "0.33.5",
- "resolved": "https://registry.npmjs.org/@img/sharp-linux-x64/-/sharp-linux-x64-0.33.5.tgz",
- "integrity": "sha512-opC+Ok5pRNAzuvq1AG0ar+1owsu842/Ab+4qvU879ippJBHvyY5n2mxF1izXqkPYlGuP/M556uh53jRLJmzTWA==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "Apache-2.0",
- "optional": true,
- "os": [
- "linux"
- ],
- "engines": {
- "node": "^18.17.0 || ^20.3.0 || >=21.0.0"
- },
- "funding": {
- "url": "https://opencollective.com/libvips"
- },
- "optionalDependencies": {
- "@img/sharp-libvips-linux-x64": "1.0.4"
- }
- },
- "node_modules/@img/sharp-linuxmusl-arm64": {
- "version": "0.33.5",
- "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-arm64/-/sharp-linuxmusl-arm64-0.33.5.tgz",
- "integrity": "sha512-XrHMZwGQGvJg2V/oRSUfSAfjfPxO+4DkiRh6p2AFjLQztWUuY/o8Mq0eMQVIY7HJ1CDQUJlxGGZRw1a5bqmd1g==",
- "cpu": [
- "arm64"
- ],
- "dev": true,
- "license": "Apache-2.0",
- "optional": true,
- "os": [
- "linux"
- ],
- "engines": {
- "node": "^18.17.0 || ^20.3.0 || >=21.0.0"
- },
- "funding": {
- "url": "https://opencollective.com/libvips"
- },
- "optionalDependencies": {
- "@img/sharp-libvips-linuxmusl-arm64": "1.0.4"
- }
- },
- "node_modules/@img/sharp-linuxmusl-x64": {
- "version": "0.33.5",
- "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-x64/-/sharp-linuxmusl-x64-0.33.5.tgz",
- "integrity": "sha512-WT+d/cgqKkkKySYmqoZ8y3pxx7lx9vVejxW/W4DOFMYVSkErR+w7mf2u8m/y4+xHe7yY9DAXQMWQhpnMuFfScw==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "Apache-2.0",
- "optional": true,
- "os": [
- "linux"
- ],
- "engines": {
- "node": "^18.17.0 || ^20.3.0 || >=21.0.0"
- },
- "funding": {
- "url": "https://opencollective.com/libvips"
- },
- "optionalDependencies": {
- "@img/sharp-libvips-linuxmusl-x64": "1.0.4"
- }
- },
- "node_modules/@img/sharp-wasm32": {
- "version": "0.33.5",
- "resolved": "https://registry.npmjs.org/@img/sharp-wasm32/-/sharp-wasm32-0.33.5.tgz",
- "integrity": "sha512-ykUW4LVGaMcU9lu9thv85CbRMAwfeadCJHRsg2GmeRa/cJxsVY9Rbd57JcMxBkKHag5U/x7TSBpScF4U8ElVzg==",
- "cpu": [
- "wasm32"
- ],
- "dev": true,
- "license": "Apache-2.0 AND LGPL-3.0-or-later AND MIT",
- "optional": true,
- "dependencies": {
- "@emnapi/runtime": "^1.2.0"
- },
- "engines": {
- "node": "^18.17.0 || ^20.3.0 || >=21.0.0"
- },
- "funding": {
- "url": "https://opencollective.com/libvips"
- }
- },
- "node_modules/@img/sharp-win32-ia32": {
- "version": "0.33.5",
- "resolved": "https://registry.npmjs.org/@img/sharp-win32-ia32/-/sharp-win32-ia32-0.33.5.tgz",
- "integrity": "sha512-T36PblLaTwuVJ/zw/LaH0PdZkRz5rd3SmMHX8GSmR7vtNSP5Z6bQkExdSK7xGWyxLw4sUknBuugTelgw2faBbQ==",
- "cpu": [
- "ia32"
- ],
- "dev": true,
- "license": "Apache-2.0 AND LGPL-3.0-or-later",
- "optional": true,
- "os": [
- "win32"
- ],
- "engines": {
- "node": "^18.17.0 || ^20.3.0 || >=21.0.0"
- },
- "funding": {
- "url": "https://opencollective.com/libvips"
- }
- },
- "node_modules/@img/sharp-win32-x64": {
- "version": "0.33.5",
- "resolved": "https://registry.npmjs.org/@img/sharp-win32-x64/-/sharp-win32-x64-0.33.5.tgz",
- "integrity": "sha512-MpY/o8/8kj+EcnxwvrP4aTJSWw/aZ7JIGR4aBeZkZw5B7/Jn+tY9/VNwtcoGmdT7GfggGIU4kygOMSbYnOrAbg==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "Apache-2.0 AND LGPL-3.0-or-later",
- "optional": true,
- "os": [
- "win32"
- ],
- "engines": {
- "node": "^18.17.0 || ^20.3.0 || >=21.0.0"
- },
- "funding": {
- "url": "https://opencollective.com/libvips"
- }
- },
- "node_modules/@jridgewell/resolve-uri": {
- "version": "3.1.2",
- "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz",
- "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=6.0.0"
- }
- },
- "node_modules/@jridgewell/sourcemap-codec": {
- "version": "1.5.5",
- "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz",
- "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/@jridgewell/trace-mapping": {
- "version": "0.3.9",
- "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz",
- "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@jridgewell/resolve-uri": "^3.0.3",
- "@jridgewell/sourcemap-codec": "^1.4.10"
- }
- },
- "node_modules/@poppinss/colors": {
- "version": "4.1.5",
- "resolved": "https://registry.npmjs.org/@poppinss/colors/-/colors-4.1.5.tgz",
- "integrity": "sha512-FvdDqtcRCtz6hThExcFOgW0cWX+xwSMWcRuQe5ZEb2m7cVQOAVZOIMt+/v9RxGiD9/OY16qJBXK4CVKWAPalBw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "kleur": "^4.1.5"
- }
- },
- "node_modules/@poppinss/dumper": {
- "version": "0.6.5",
- "resolved": "https://registry.npmjs.org/@poppinss/dumper/-/dumper-0.6.5.tgz",
- "integrity": "sha512-NBdYIb90J7LfOI32dOewKI1r7wnkiH6m920puQ3qHUeZkxNkQiFnXVWoE6YtFSv6QOiPPf7ys6i+HWWecDz7sw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@poppinss/colors": "^4.1.5",
- "@sindresorhus/is": "^7.0.2",
- "supports-color": "^10.0.0"
- }
- },
- "node_modules/@poppinss/exception": {
- "version": "1.2.2",
- "resolved": "https://registry.npmjs.org/@poppinss/exception/-/exception-1.2.2.tgz",
- "integrity": "sha512-m7bpKCD4QMlFCjA/nKTs23fuvoVFoA83brRKmObCUNmi/9tVu8Ve3w4YQAnJu4q3Tjf5fr685HYIC/IA2zHRSg==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/@sindresorhus/is": {
- "version": "7.1.1",
- "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-7.1.1.tgz",
- "integrity": "sha512-rO92VvpgMc3kfiTjGT52LEtJ8Yc5kCWhZjLQ3LwlA4pSgPpQO7bVpYXParOD8Jwf+cVQECJo3yP/4I8aZtUQTQ==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=18"
- },
- "funding": {
- "url": "https://github.com/sindresorhus/is?sponsor=1"
- }
- },
- "node_modules/@speed-highlight/core": {
- "version": "1.2.12",
- "resolved": "https://registry.npmjs.org/@speed-highlight/core/-/core-1.2.12.tgz",
- "integrity": "sha512-uilwrK0Ygyri5dToHYdZSjcvpS2ZwX0w5aSt3GCEN9hrjxWCoeV4Z2DTXuxjwbntaLQIEEAlCeNQss5SoHvAEA==",
- "dev": true,
- "license": "CC0-1.0"
- },
- "node_modules/@types/node": {
- "version": "25.0.3",
- "resolved": "https://registry.npmjs.org/@types/node/-/node-25.0.3.tgz",
- "integrity": "sha512-W609buLVRVmeW693xKfzHeIV6nJGGz98uCPfeXI1ELMLXVeKYZ9m15fAMSaUPBHYLGFsVRcMmSCksQOrZV9BYA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "undici-types": "~7.16.0"
- }
- },
- "node_modules/acorn": {
- "version": "8.14.0",
- "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.0.tgz",
- "integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==",
- "dev": true,
- "license": "MIT",
- "bin": {
- "acorn": "bin/acorn"
- },
- "engines": {
- "node": ">=0.4.0"
- }
- },
- "node_modules/acorn-walk": {
- "version": "8.3.2",
- "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.2.tgz",
- "integrity": "sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=0.4.0"
- }
- },
- "node_modules/blake3-wasm": {
- "version": "2.1.5",
- "resolved": "https://registry.npmjs.org/blake3-wasm/-/blake3-wasm-2.1.5.tgz",
- "integrity": "sha512-F1+K8EbfOZE49dtoPtmxUQrpXaBIl3ICvasLh+nJta0xkz+9kF/7uet9fLnwKqhDrmj6g+6K3Tw9yQPUg2ka5g==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/color": {
- "version": "4.2.3",
- "resolved": "https://registry.npmjs.org/color/-/color-4.2.3.tgz",
- "integrity": "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "color-convert": "^2.0.1",
- "color-string": "^1.9.0"
- },
- "engines": {
- "node": ">=12.5.0"
- }
- },
- "node_modules/color-convert": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
- "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "color-name": "~1.1.4"
- },
- "engines": {
- "node": ">=7.0.0"
- }
- },
- "node_modules/color-name": {
- "version": "1.1.4",
- "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
- "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/color-string": {
- "version": "1.9.1",
- "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.9.1.tgz",
- "integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "color-name": "^1.0.0",
- "simple-swizzle": "^0.2.2"
- }
- },
- "node_modules/cookie": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/cookie/-/cookie-1.0.2.tgz",
- "integrity": "sha512-9Kr/j4O16ISv8zBBhJoi4bXOYNTkFLOqSL3UDB0njXxCXNezjeyVrJyGOWtgfs/q2km1gwBcfH8q1yEGoMYunA==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/detect-libc": {
- "version": "2.1.2",
- "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz",
- "integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==",
- "dev": true,
- "license": "Apache-2.0",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/error-stack-parser-es": {
- "version": "1.0.5",
- "resolved": "https://registry.npmjs.org/error-stack-parser-es/-/error-stack-parser-es-1.0.5.tgz",
- "integrity": "sha512-5qucVt2XcuGMcEGgWI7i+yZpmpByQ8J1lHhcL7PwqCwu9FPP3VUXzT4ltHe5i2z9dePwEHcDVOAfSnHsOlCXRA==",
- "dev": true,
- "license": "MIT",
- "funding": {
- "url": "https://github.com/sponsors/antfu"
- }
- },
- "node_modules/esbuild": {
- "version": "0.25.4",
- "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.4.tgz",
- "integrity": "sha512-8pgjLUcUjcgDg+2Q4NYXnPbo/vncAY4UmyaCm0jZevERqCHZIaWwdJHkf8XQtu4AxSKCdvrUbT0XUr1IdZzI8Q==",
- "dev": true,
- "hasInstallScript": true,
- "license": "MIT",
- "bin": {
- "esbuild": "bin/esbuild"
- },
- "engines": {
- "node": ">=18"
- },
- "optionalDependencies": {
- "@esbuild/aix-ppc64": "0.25.4",
- "@esbuild/android-arm": "0.25.4",
- "@esbuild/android-arm64": "0.25.4",
- "@esbuild/android-x64": "0.25.4",
- "@esbuild/darwin-arm64": "0.25.4",
- "@esbuild/darwin-x64": "0.25.4",
- "@esbuild/freebsd-arm64": "0.25.4",
- "@esbuild/freebsd-x64": "0.25.4",
- "@esbuild/linux-arm": "0.25.4",
- "@esbuild/linux-arm64": "0.25.4",
- "@esbuild/linux-ia32": "0.25.4",
- "@esbuild/linux-loong64": "0.25.4",
- "@esbuild/linux-mips64el": "0.25.4",
- "@esbuild/linux-ppc64": "0.25.4",
- "@esbuild/linux-riscv64": "0.25.4",
- "@esbuild/linux-s390x": "0.25.4",
- "@esbuild/linux-x64": "0.25.4",
- "@esbuild/netbsd-arm64": "0.25.4",
- "@esbuild/netbsd-x64": "0.25.4",
- "@esbuild/openbsd-arm64": "0.25.4",
- "@esbuild/openbsd-x64": "0.25.4",
- "@esbuild/sunos-x64": "0.25.4",
- "@esbuild/win32-arm64": "0.25.4",
- "@esbuild/win32-ia32": "0.25.4",
- "@esbuild/win32-x64": "0.25.4"
- }
- },
- "node_modules/exit-hook": {
- "version": "2.2.1",
- "resolved": "https://registry.npmjs.org/exit-hook/-/exit-hook-2.2.1.tgz",
- "integrity": "sha512-eNTPlAD67BmP31LDINZ3U7HSF8l57TxOY2PmBJ1shpCvpnxBF93mWCE8YHBnXs8qiUZJc9WDcWIeC3a2HIAMfw==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=6"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/fsevents": {
- "version": "2.3.3",
- "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
- "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==",
- "dev": true,
- "hasInstallScript": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "darwin"
- ],
- "engines": {
- "node": "^8.16.0 || ^10.6.0 || >=11.0.0"
- }
- },
- "node_modules/glob-to-regexp": {
- "version": "0.4.1",
- "resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz",
- "integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==",
- "dev": true,
- "license": "BSD-2-Clause"
- },
- "node_modules/is-arrayish": {
- "version": "0.3.4",
- "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.4.tgz",
- "integrity": "sha512-m6UrgzFVUYawGBh1dUsWR5M2Clqic9RVXC/9f8ceNlv2IcO9j9J/z8UoCLPqtsPBFNzEpfR3xftohbfqDx8EQA==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/itty-router": {
- "version": "5.0.22",
- "resolved": "https://registry.npmjs.org/itty-router/-/itty-router-5.0.22.tgz",
- "integrity": "sha512-9hmdGErWdYDOurGYxSbqLhy4EFReIwk71hMZTJ5b+zfa2zjMNV1ftFno2b8VjAQvX615gNB8Qxbl9JMRqHnIVA==",
- "license": "MIT"
- },
- "node_modules/jose": {
- "version": "5.10.0",
- "resolved": "https://registry.npmjs.org/jose/-/jose-5.10.0.tgz",
- "integrity": "sha512-s+3Al/p9g32Iq+oqXxkW//7jk2Vig6FF1CFqzVXoTUXt2qz89YWbL+OwS17NFYEvxC35n0FKeGO2LGYSxeM2Gg==",
- "license": "MIT",
- "funding": {
- "url": "https://github.com/sponsors/panva"
- }
- },
- "node_modules/kleur": {
- "version": "4.1.5",
- "resolved": "https://registry.npmjs.org/kleur/-/kleur-4.1.5.tgz",
- "integrity": "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=6"
- }
- },
- "node_modules/mime": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz",
- "integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==",
- "dev": true,
- "license": "MIT",
- "bin": {
- "mime": "cli.js"
- },
- "engines": {
- "node": ">=10.0.0"
- }
- },
- "node_modules/miniflare": {
- "version": "4.20251109.0",
- "resolved": "https://registry.npmjs.org/miniflare/-/miniflare-4.20251109.0.tgz",
- "integrity": "sha512-fm0J/IFrrx7RT1w3SIoDM5m7zPCa2wBtxBApy6G0QVjd2tx8w0WGlMFop6R49XyTfF1q3LRHCjFMfzJ8YS0RzQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@cspotcode/source-map-support": "0.8.1",
- "acorn": "8.14.0",
- "acorn-walk": "8.3.2",
- "exit-hook": "2.2.1",
- "glob-to-regexp": "0.4.1",
- "sharp": "^0.33.5",
- "stoppable": "1.1.0",
- "undici": "7.14.0",
- "workerd": "1.20251109.0",
- "ws": "8.18.0",
- "youch": "4.1.0-beta.10",
- "zod": "3.22.3"
- },
- "bin": {
- "miniflare": "bootstrap.js"
- },
- "engines": {
- "node": ">=18.0.0"
- }
- },
- "node_modules/miniflare/node_modules/zod": {
- "version": "3.22.3",
- "resolved": "https://registry.npmjs.org/zod/-/zod-3.22.3.tgz",
- "integrity": "sha512-EjIevzuJRiRPbVH4mGc8nApb/lVLKVpmUhAaR5R5doKGfAnGJ6Gr3CViAVjP+4FWSxCsybeWQdcgCtbX+7oZug==",
- "dev": true,
- "license": "MIT",
- "funding": {
- "url": "https://github.com/sponsors/colinhacks"
- }
- },
- "node_modules/path-to-regexp": {
- "version": "6.3.0",
- "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-6.3.0.tgz",
- "integrity": "sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/pathe": {
- "version": "2.0.3",
- "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz",
- "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/semver": {
- "version": "7.7.3",
- "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz",
- "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==",
- "dev": true,
- "license": "ISC",
- "bin": {
- "semver": "bin/semver.js"
- },
- "engines": {
- "node": ">=10"
- }
- },
- "node_modules/sharp": {
- "version": "0.33.5",
- "resolved": "https://registry.npmjs.org/sharp/-/sharp-0.33.5.tgz",
- "integrity": "sha512-haPVm1EkS9pgvHrQ/F3Xy+hgcuMV0Wm9vfIBSiwZ05k+xgb0PkBQpGsAA/oWdDobNaZTH5ppvHtzCFbnSEwHVw==",
- "dev": true,
- "hasInstallScript": true,
- "license": "Apache-2.0",
- "dependencies": {
- "color": "^4.2.3",
- "detect-libc": "^2.0.3",
- "semver": "^7.6.3"
- },
- "engines": {
- "node": "^18.17.0 || ^20.3.0 || >=21.0.0"
- },
- "funding": {
- "url": "https://opencollective.com/libvips"
- },
- "optionalDependencies": {
- "@img/sharp-darwin-arm64": "0.33.5",
- "@img/sharp-darwin-x64": "0.33.5",
- "@img/sharp-libvips-darwin-arm64": "1.0.4",
- "@img/sharp-libvips-darwin-x64": "1.0.4",
- "@img/sharp-libvips-linux-arm": "1.0.5",
- "@img/sharp-libvips-linux-arm64": "1.0.4",
- "@img/sharp-libvips-linux-s390x": "1.0.4",
- "@img/sharp-libvips-linux-x64": "1.0.4",
- "@img/sharp-libvips-linuxmusl-arm64": "1.0.4",
- "@img/sharp-libvips-linuxmusl-x64": "1.0.4",
- "@img/sharp-linux-arm": "0.33.5",
- "@img/sharp-linux-arm64": "0.33.5",
- "@img/sharp-linux-s390x": "0.33.5",
- "@img/sharp-linux-x64": "0.33.5",
- "@img/sharp-linuxmusl-arm64": "0.33.5",
- "@img/sharp-linuxmusl-x64": "0.33.5",
- "@img/sharp-wasm32": "0.33.5",
- "@img/sharp-win32-ia32": "0.33.5",
- "@img/sharp-win32-x64": "0.33.5"
- }
- },
- "node_modules/simple-swizzle": {
- "version": "0.2.4",
- "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.4.tgz",
- "integrity": "sha512-nAu1WFPQSMNr2Zn9PGSZK9AGn4t/y97lEm+MXTtUDwfP0ksAIX4nO+6ruD9Jwut4C49SB1Ws+fbXsm/yScWOHw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "is-arrayish": "^0.3.1"
- }
- },
- "node_modules/stoppable": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/stoppable/-/stoppable-1.1.0.tgz",
- "integrity": "sha512-KXDYZ9dszj6bzvnEMRYvxgeTHU74QBFL54XKtP3nyMuJ81CFYtABZ3bAzL2EdFUaEwJOBOgENyFj3R7oTzDyyw==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=4",
- "npm": ">=6"
- }
- },
- "node_modules/supports-color": {
- "version": "10.2.2",
- "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-10.2.2.tgz",
- "integrity": "sha512-SS+jx45GF1QjgEXQx4NJZV9ImqmO2NPz5FNsIHrsDjh2YsHnawpan7SNQ1o8NuhrbHZy9AZhIoCUiCeaW/C80g==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=18"
- },
- "funding": {
- "url": "https://github.com/chalk/supports-color?sponsor=1"
- }
- },
- "node_modules/tslib": {
- "version": "2.8.1",
- "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
- "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==",
- "dev": true,
- "license": "0BSD",
- "optional": true
- },
- "node_modules/typescript": {
- "version": "5.9.3",
- "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz",
- "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==",
- "dev": true,
- "license": "Apache-2.0",
- "bin": {
- "tsc": "bin/tsc",
- "tsserver": "bin/tsserver"
- },
- "engines": {
- "node": ">=14.17"
- }
- },
- "node_modules/undici": {
- "version": "7.14.0",
- "resolved": "https://registry.npmjs.org/undici/-/undici-7.14.0.tgz",
- "integrity": "sha512-Vqs8HTzjpQXZeXdpsfChQTlafcMQaaIwnGwLam1wudSSjlJeQ3bw1j+TLPePgrCnCpUXx7Ba5Pdpf5OBih62NQ==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=20.18.1"
- }
- },
- "node_modules/undici-types": {
- "version": "7.16.0",
- "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz",
- "integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/unenv": {
- "version": "2.0.0-rc.24",
- "resolved": "https://registry.npmjs.org/unenv/-/unenv-2.0.0-rc.24.tgz",
- "integrity": "sha512-i7qRCmY42zmCwnYlh9H2SvLEypEFGye5iRmEMKjcGi7zk9UquigRjFtTLz0TYqr0ZGLZhaMHl/foy1bZR+Cwlw==",
- "dev": true,
- "license": "MIT",
- "peer": true,
- "dependencies": {
- "pathe": "^2.0.3"
- }
- },
- "node_modules/workerd": {
- "version": "1.20251109.0",
- "resolved": "https://registry.npmjs.org/workerd/-/workerd-1.20251109.0.tgz",
- "integrity": "sha512-VfazMiymlzos0c1t9AhNi0w8gN9+ZbCVLdEE0VDOsI22WYa6yj+pYOhpZzI/mOzCGmk/o1eNjLMkfjWli6aRVg==",
- "dev": true,
- "hasInstallScript": true,
- "license": "Apache-2.0",
- "peer": true,
- "bin": {
- "workerd": "bin/workerd"
- },
- "engines": {
- "node": ">=16"
- },
- "optionalDependencies": {
- "@cloudflare/workerd-darwin-64": "1.20251109.0",
- "@cloudflare/workerd-darwin-arm64": "1.20251109.0",
- "@cloudflare/workerd-linux-64": "1.20251109.0",
- "@cloudflare/workerd-linux-arm64": "1.20251109.0",
- "@cloudflare/workerd-windows-64": "1.20251109.0"
- }
- },
- "node_modules/wrangler": {
- "version": "4.47.0",
- "resolved": "https://registry.npmjs.org/wrangler/-/wrangler-4.47.0.tgz",
- "integrity": "sha512-JP0U8oqUETK9D+ZbrSjFFOxGdufYsS6HsT0vLU1IAQrban9a6woMHdBZlGNn/lt8QA70xv1uFiJK8DUMPzC73A==",
- "dev": true,
- "license": "MIT OR Apache-2.0",
- "dependencies": {
- "@cloudflare/kv-asset-handler": "0.4.0",
- "@cloudflare/unenv-preset": "2.7.10",
- "blake3-wasm": "2.1.5",
- "esbuild": "0.25.4",
- "miniflare": "4.20251109.0",
- "path-to-regexp": "6.3.0",
- "unenv": "2.0.0-rc.24",
- "workerd": "1.20251109.0"
- },
- "bin": {
- "wrangler": "bin/wrangler.js",
- "wrangler2": "bin/wrangler.js"
- },
- "engines": {
- "node": ">=18.0.0"
- },
- "optionalDependencies": {
- "fsevents": "~2.3.2"
- },
- "peerDependencies": {
- "@cloudflare/workers-types": "^4.20251109.0"
- },
- "peerDependenciesMeta": {
- "@cloudflare/workers-types": {
- "optional": true
- }
- }
- },
- "node_modules/ws": {
- "version": "8.18.0",
- "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.0.tgz",
- "integrity": "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=10.0.0"
- },
- "peerDependencies": {
- "bufferutil": "^4.0.1",
- "utf-8-validate": ">=5.0.2"
- },
- "peerDependenciesMeta": {
- "bufferutil": {
- "optional": true
- },
- "utf-8-validate": {
- "optional": true
- }
- }
- },
- "node_modules/youch": {
- "version": "4.1.0-beta.10",
- "resolved": "https://registry.npmjs.org/youch/-/youch-4.1.0-beta.10.tgz",
- "integrity": "sha512-rLfVLB4FgQneDr0dv1oddCVZmKjcJ6yX6mS4pU82Mq/Dt9a3cLZQ62pDBL4AUO+uVrCvtWz3ZFUL2HFAFJ/BXQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@poppinss/colors": "^4.1.5",
- "@poppinss/dumper": "^0.6.4",
- "@speed-highlight/core": "^1.2.7",
- "cookie": "^1.0.2",
- "youch-core": "^0.3.3"
- }
- },
- "node_modules/youch-core": {
- "version": "0.3.3",
- "resolved": "https://registry.npmjs.org/youch-core/-/youch-core-0.3.3.tgz",
- "integrity": "sha512-ho7XuGjLaJ2hWHoK8yFnsUGy2Y5uDpqSTq1FkHLK4/oqKtyUU1AFbOOxY4IpC9f0fTLjwYbslUz0Po5BpD1wrA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@poppinss/exception": "^1.2.2",
- "error-stack-parser-es": "^1.0.5"
- }
- },
- "node_modules/zod": {
- "version": "3.25.76",
- "resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz",
- "integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==",
- "license": "MIT",
- "funding": {
- "url": "https://github.com/sponsors/colinhacks"
- }
- }
- }
-}
diff --git a/worker/package.json b/worker/package.json
deleted file mode 100644
index 430b1974..00000000
--- a/worker/package.json
+++ /dev/null
@@ -1,36 +0,0 @@
-{
- "name": "gsd-sync-worker",
- "version": "1.2.0",
- "description": "Cloudflare Worker for GSD Task Manager sync backend",
- "main": "src/index.ts",
- "scripts": {
- "dev": "wrangler dev",
- "deploy": "wrangler deploy --env development",
- "deploy:staging": "wrangler deploy --env staging",
- "deploy:production": "wrangler deploy --env production",
- "deploy:all": "bash deploy-all-envs.sh",
- "setup:all": "bash setup-all-envs.sh",
- "db:migrations:create": "wrangler d1 migrations create gsd-sync",
- "db:migrations:apply": "wrangler d1 migrations apply gsd-sync --local",
- "db:migrations:apply:remote": "wrangler d1 migrations apply gsd-sync --remote",
- "tail": "wrangler tail",
- "tail:staging": "wrangler tail --env staging",
- "tail:production": "wrangler tail --env production",
- "typecheck": "tsc --noEmit",
- "test": "vitest run",
- "test:watch": "vitest"
- },
- "dependencies": {
- "itty-router": "^5.0.22",
- "jose": "^5.10.0",
- "zod": "^4.3.6"
- },
- "devDependencies": {
- "@cloudflare/workers-types": "^4.20260124.0",
- "@types/node": "^25.0.10",
- "typescript": "^5.9.3",
- "vitest": "^4.0.18",
- "wrangler": "^4.60.0"
- },
- "license": "MIT"
-}
diff --git a/worker/pnpm-lock.yaml b/worker/pnpm-lock.yaml
deleted file mode 100644
index 0c8355a9..00000000
--- a/worker/pnpm-lock.yaml
+++ /dev/null
@@ -1,1780 +0,0 @@
-lockfileVersion: '9.0'
-
-settings:
- autoInstallPeers: true
- excludeLinksFromLockfile: false
-
-importers:
-
- .:
- dependencies:
- itty-router:
- specifier: ^5.0.18
- version: 5.0.22
- jose:
- specifier: ^5.10.0
- version: 5.10.0
- zod:
- specifier: ^4.3.5
- version: 4.3.5
- devDependencies:
- '@cloudflare/workers-types':
- specifier: ^4.20250110.0
- version: 4.20260103.0
- '@types/node':
- specifier: ^25.0.3
- version: 25.0.3
- typescript:
- specifier: ^5.9.3
- version: 5.9.3
- vitest:
- specifier: ^4.0.16
- version: 4.0.16(@types/node@25.0.3)
- wrangler:
- specifier: ^4.43.0
- version: 4.54.0(@cloudflare/workers-types@4.20260103.0)
-
-packages:
-
- '@cloudflare/kv-asset-handler@0.4.1':
- resolution: {integrity: sha512-Nu8ahitGFFJztxUml9oD/DLb7Z28C8cd8F46IVQ7y5Btz575pvMY8AqZsXkX7Gds29eCKdMgIHjIvzskHgPSFg==}
- engines: {node: '>=18.0.0'}
-
- '@cloudflare/unenv-preset@2.7.13':
- resolution: {integrity: sha512-NulO1H8R/DzsJguLC0ndMuk4Ufv0KSlN+E54ay9rn9ZCQo0kpAPwwh3LhgpZ96a3Dr6L9LqW57M4CqC34iLOvw==}
- peerDependencies:
- unenv: 2.0.0-rc.24
- workerd: ^1.20251202.0
- peerDependenciesMeta:
- workerd:
- optional: true
-
- '@cloudflare/workerd-darwin-64@1.20251210.0':
- resolution: {integrity: sha512-Nn9X1moUDERA9xtFdCQ2XpQXgAS9pOjiCxvOT8sVx9UJLAiBLkfSCGbpsYdarODGybXCpjRlc77Yppuolvt7oQ==}
- engines: {node: '>=16'}
- cpu: [x64]
- os: [darwin]
-
- '@cloudflare/workerd-darwin-arm64@1.20251210.0':
- resolution: {integrity: sha512-Mg8iYIZQFnbevq/ls9eW/eneWTk/EE13Pej1MwfkY5et0jVpdHnvOLywy/o+QtMJFef1AjsqXGULwAneYyBfHw==}
- engines: {node: '>=16'}
- cpu: [arm64]
- os: [darwin]
-
- '@cloudflare/workerd-linux-64@1.20251210.0':
- resolution: {integrity: sha512-kjC2fCZhZ2Gkm1biwk2qByAYpGguK5Gf5ic8owzSCUw0FOUfQxTZUT9Lp3gApxsfTLbbnLBrX/xzWjywH9QR4g==}
- engines: {node: '>=16'}
- cpu: [x64]
- os: [linux]
-
- '@cloudflare/workerd-linux-arm64@1.20251210.0':
- resolution: {integrity: sha512-2IB37nXi7PZVQLa1OCuO7/6pNxqisRSO8DmCQ5x/3sezI5op1vwOxAcb1osAnuVsVN9bbvpw70HJvhKruFJTuA==}
- engines: {node: '>=16'}
- cpu: [arm64]
- os: [linux]
-
- '@cloudflare/workerd-windows-64@1.20251210.0':
- resolution: {integrity: sha512-Uaz6/9XE+D6E7pCY4OvkCuJHu7HcSDzeGcCGY1HLhojXhHd7yL52c3yfiyJdS8hPatiAa0nn5qSI/42+aTdDSw==}
- engines: {node: '>=16'}
- cpu: [x64]
- os: [win32]
-
- '@cloudflare/workers-types@4.20260103.0':
- resolution: {integrity: sha512-jANmoGpJcXARnwlkvrQOeWyjYD1quTfHcs+++Z544XRHOSfLc4XSlts7snIhbiIGgA5bo66zDhraF+9lKUr2hw==}
-
- '@cspotcode/source-map-support@0.8.1':
- resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==}
- engines: {node: '>=12'}
-
- '@emnapi/runtime@1.8.1':
- resolution: {integrity: sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg==}
-
- '@esbuild/aix-ppc64@0.27.0':
- resolution: {integrity: sha512-KuZrd2hRjz01y5JK9mEBSD3Vj3mbCvemhT466rSuJYeE/hjuBrHfjjcjMdTm/sz7au+++sdbJZJmuBwQLuw68A==}
- engines: {node: '>=18'}
- cpu: [ppc64]
- os: [aix]
-
- '@esbuild/aix-ppc64@0.27.2':
- resolution: {integrity: sha512-GZMB+a0mOMZs4MpDbj8RJp4cw+w1WV5NYD6xzgvzUJ5Ek2jerwfO2eADyI6ExDSUED+1X8aMbegahsJi+8mgpw==}
- engines: {node: '>=18'}
- cpu: [ppc64]
- os: [aix]
-
- '@esbuild/android-arm64@0.27.0':
- resolution: {integrity: sha512-CC3vt4+1xZrs97/PKDkl0yN7w8edvU2vZvAFGD16n9F0Cvniy5qvzRXjfO1l94efczkkQE6g1x0i73Qf5uthOQ==}
- engines: {node: '>=18'}
- cpu: [arm64]
- os: [android]
-
- '@esbuild/android-arm64@0.27.2':
- resolution: {integrity: sha512-pvz8ZZ7ot/RBphf8fv60ljmaoydPU12VuXHImtAs0XhLLw+EXBi2BLe3OYSBslR4rryHvweW5gmkKFwTiFy6KA==}
- engines: {node: '>=18'}
- cpu: [arm64]
- os: [android]
-
- '@esbuild/android-arm@0.27.0':
- resolution: {integrity: sha512-j67aezrPNYWJEOHUNLPj9maeJte7uSMM6gMoxfPC9hOg8N02JuQi/T7ewumf4tNvJadFkvLZMlAq73b9uwdMyQ==}
- engines: {node: '>=18'}
- cpu: [arm]
- os: [android]
-
- '@esbuild/android-arm@0.27.2':
- resolution: {integrity: sha512-DVNI8jlPa7Ujbr1yjU2PfUSRtAUZPG9I1RwW4F4xFB1Imiu2on0ADiI/c3td+KmDtVKNbi+nffGDQMfcIMkwIA==}
- engines: {node: '>=18'}
- cpu: [arm]
- os: [android]
-
- '@esbuild/android-x64@0.27.0':
- resolution: {integrity: sha512-wurMkF1nmQajBO1+0CJmcN17U4BP6GqNSROP8t0X/Jiw2ltYGLHpEksp9MpoBqkrFR3kv2/te6Sha26k3+yZ9Q==}
- engines: {node: '>=18'}
- cpu: [x64]
- os: [android]
-
- '@esbuild/android-x64@0.27.2':
- resolution: {integrity: sha512-z8Ank4Byh4TJJOh4wpz8g2vDy75zFL0TlZlkUkEwYXuPSgX8yzep596n6mT7905kA9uHZsf/o2OJZubl2l3M7A==}
- engines: {node: '>=18'}
- cpu: [x64]
- os: [android]
-
- '@esbuild/darwin-arm64@0.27.0':
- resolution: {integrity: sha512-uJOQKYCcHhg07DL7i8MzjvS2LaP7W7Pn/7uA0B5S1EnqAirJtbyw4yC5jQ5qcFjHK9l6o/MX9QisBg12kNkdHg==}
- engines: {node: '>=18'}
- cpu: [arm64]
- os: [darwin]
-
- '@esbuild/darwin-arm64@0.27.2':
- resolution: {integrity: sha512-davCD2Zc80nzDVRwXTcQP/28fiJbcOwvdolL0sOiOsbwBa72kegmVU0Wrh1MYrbuCL98Omp5dVhQFWRKR2ZAlg==}
- engines: {node: '>=18'}
- cpu: [arm64]
- os: [darwin]
-
- '@esbuild/darwin-x64@0.27.0':
- resolution: {integrity: sha512-8mG6arH3yB/4ZXiEnXof5MK72dE6zM9cDvUcPtxhUZsDjESl9JipZYW60C3JGreKCEP+p8P/72r69m4AZGJd5g==}
- engines: {node: '>=18'}
- cpu: [x64]
- os: [darwin]
-
- '@esbuild/darwin-x64@0.27.2':
- resolution: {integrity: sha512-ZxtijOmlQCBWGwbVmwOF/UCzuGIbUkqB1faQRf5akQmxRJ1ujusWsb3CVfk/9iZKr2L5SMU5wPBi1UWbvL+VQA==}
- engines: {node: '>=18'}
- cpu: [x64]
- os: [darwin]
-
- '@esbuild/freebsd-arm64@0.27.0':
- resolution: {integrity: sha512-9FHtyO988CwNMMOE3YIeci+UV+x5Zy8fI2qHNpsEtSF83YPBmE8UWmfYAQg6Ux7Gsmd4FejZqnEUZCMGaNQHQw==}
- engines: {node: '>=18'}
- cpu: [arm64]
- os: [freebsd]
-
- '@esbuild/freebsd-arm64@0.27.2':
- resolution: {integrity: sha512-lS/9CN+rgqQ9czogxlMcBMGd+l8Q3Nj1MFQwBZJyoEKI50XGxwuzznYdwcav6lpOGv5BqaZXqvBSiB/kJ5op+g==}
- engines: {node: '>=18'}
- cpu: [arm64]
- os: [freebsd]
-
- '@esbuild/freebsd-x64@0.27.0':
- resolution: {integrity: sha512-zCMeMXI4HS/tXvJz8vWGexpZj2YVtRAihHLk1imZj4efx1BQzN76YFeKqlDr3bUWI26wHwLWPd3rwh6pe4EV7g==}
- engines: {node: '>=18'}
- cpu: [x64]
- os: [freebsd]
-
- '@esbuild/freebsd-x64@0.27.2':
- resolution: {integrity: sha512-tAfqtNYb4YgPnJlEFu4c212HYjQWSO/w/h/lQaBK7RbwGIkBOuNKQI9tqWzx7Wtp7bTPaGC6MJvWI608P3wXYA==}
- engines: {node: '>=18'}
- cpu: [x64]
- os: [freebsd]
-
- '@esbuild/linux-arm64@0.27.0':
- resolution: {integrity: sha512-AS18v0V+vZiLJyi/4LphvBE+OIX682Pu7ZYNsdUHyUKSoRwdnOsMf6FDekwoAFKej14WAkOef3zAORJgAtXnlQ==}
- engines: {node: '>=18'}
- cpu: [arm64]
- os: [linux]
-
- '@esbuild/linux-arm64@0.27.2':
- resolution: {integrity: sha512-hYxN8pr66NsCCiRFkHUAsxylNOcAQaxSSkHMMjcpx0si13t1LHFphxJZUiGwojB1a/Hd5OiPIqDdXONia6bhTw==}
- engines: {node: '>=18'}
- cpu: [arm64]
- os: [linux]
-
- '@esbuild/linux-arm@0.27.0':
- resolution: {integrity: sha512-t76XLQDpxgmq2cNXKTVEB7O7YMb42atj2Re2Haf45HkaUpjM2J0UuJZDuaGbPbamzZ7bawyGFUkodL+zcE+jvQ==}
- engines: {node: '>=18'}
- cpu: [arm]
- os: [linux]
-
- '@esbuild/linux-arm@0.27.2':
- resolution: {integrity: sha512-vWfq4GaIMP9AIe4yj1ZUW18RDhx6EPQKjwe7n8BbIecFtCQG4CfHGaHuh7fdfq+y3LIA2vGS/o9ZBGVxIDi9hw==}
- engines: {node: '>=18'}
- cpu: [arm]
- os: [linux]
-
- '@esbuild/linux-ia32@0.27.0':
- resolution: {integrity: sha512-Mz1jxqm/kfgKkc/KLHC5qIujMvnnarD9ra1cEcrs7qshTUSksPihGrWHVG5+osAIQ68577Zpww7SGapmzSt4Nw==}
- engines: {node: '>=18'}
- cpu: [ia32]
- os: [linux]
-
- '@esbuild/linux-ia32@0.27.2':
- resolution: {integrity: sha512-MJt5BRRSScPDwG2hLelYhAAKh9imjHK5+NE/tvnRLbIqUWa+0E9N4WNMjmp/kXXPHZGqPLxggwVhz7QP8CTR8w==}
- engines: {node: '>=18'}
- cpu: [ia32]
- os: [linux]
-
- '@esbuild/linux-loong64@0.27.0':
- resolution: {integrity: sha512-QbEREjdJeIreIAbdG2hLU1yXm1uu+LTdzoq1KCo4G4pFOLlvIspBm36QrQOar9LFduavoWX2msNFAAAY9j4BDg==}
- engines: {node: '>=18'}
- cpu: [loong64]
- os: [linux]
-
- '@esbuild/linux-loong64@0.27.2':
- resolution: {integrity: sha512-lugyF1atnAT463aO6KPshVCJK5NgRnU4yb3FUumyVz+cGvZbontBgzeGFO1nF+dPueHD367a2ZXe1NtUkAjOtg==}
- engines: {node: '>=18'}
- cpu: [loong64]
- os: [linux]
-
- '@esbuild/linux-mips64el@0.27.0':
- resolution: {integrity: sha512-sJz3zRNe4tO2wxvDpH/HYJilb6+2YJxo/ZNbVdtFiKDufzWq4JmKAiHy9iGoLjAV7r/W32VgaHGkk35cUXlNOg==}
- engines: {node: '>=18'}
- cpu: [mips64el]
- os: [linux]
-
- '@esbuild/linux-mips64el@0.27.2':
- resolution: {integrity: sha512-nlP2I6ArEBewvJ2gjrrkESEZkB5mIoaTswuqNFRv/WYd+ATtUpe9Y09RnJvgvdag7he0OWgEZWhviS1OTOKixw==}
- engines: {node: '>=18'}
- cpu: [mips64el]
- os: [linux]
-
- '@esbuild/linux-ppc64@0.27.0':
- resolution: {integrity: sha512-z9N10FBD0DCS2dmSABDBb5TLAyF1/ydVb+N4pi88T45efQ/w4ohr/F/QYCkxDPnkhkp6AIpIcQKQ8F0ANoA2JA==}
- engines: {node: '>=18'}
- cpu: [ppc64]
- os: [linux]
-
- '@esbuild/linux-ppc64@0.27.2':
- resolution: {integrity: sha512-C92gnpey7tUQONqg1n6dKVbx3vphKtTHJaNG2Ok9lGwbZil6DrfyecMsp9CrmXGQJmZ7iiVXvvZH6Ml5hL6XdQ==}
- engines: {node: '>=18'}
- cpu: [ppc64]
- os: [linux]
-
- '@esbuild/linux-riscv64@0.27.0':
- resolution: {integrity: sha512-pQdyAIZ0BWIC5GyvVFn5awDiO14TkT/19FTmFcPdDec94KJ1uZcmFs21Fo8auMXzD4Tt+diXu1LW1gHus9fhFQ==}
- engines: {node: '>=18'}
- cpu: [riscv64]
- os: [linux]
-
- '@esbuild/linux-riscv64@0.27.2':
- resolution: {integrity: sha512-B5BOmojNtUyN8AXlK0QJyvjEZkWwy/FKvakkTDCziX95AowLZKR6aCDhG7LeF7uMCXEJqwa8Bejz5LTPYm8AvA==}
- engines: {node: '>=18'}
- cpu: [riscv64]
- os: [linux]
-
- '@esbuild/linux-s390x@0.27.0':
- resolution: {integrity: sha512-hPlRWR4eIDDEci953RI1BLZitgi5uqcsjKMxwYfmi4LcwyWo2IcRP+lThVnKjNtk90pLS8nKdroXYOqW+QQH+w==}
- engines: {node: '>=18'}
- cpu: [s390x]
- os: [linux]
-
- '@esbuild/linux-s390x@0.27.2':
- resolution: {integrity: sha512-p4bm9+wsPwup5Z8f4EpfN63qNagQ47Ua2znaqGH6bqLlmJ4bx97Y9JdqxgGZ6Y8xVTixUnEkoKSHcpRlDnNr5w==}
- engines: {node: '>=18'}
- cpu: [s390x]
- os: [linux]
-
- '@esbuild/linux-x64@0.27.0':
- resolution: {integrity: sha512-1hBWx4OUJE2cab++aVZ7pObD6s+DK4mPGpemtnAORBvb5l/g5xFGk0vc0PjSkrDs0XaXj9yyob3d14XqvnQ4gw==}
- engines: {node: '>=18'}
- cpu: [x64]
- os: [linux]
-
- '@esbuild/linux-x64@0.27.2':
- resolution: {integrity: sha512-uwp2Tip5aPmH+NRUwTcfLb+W32WXjpFejTIOWZFw/v7/KnpCDKG66u4DLcurQpiYTiYwQ9B7KOeMJvLCu/OvbA==}
- engines: {node: '>=18'}
- cpu: [x64]
- os: [linux]
-
- '@esbuild/netbsd-arm64@0.27.0':
- resolution: {integrity: sha512-6m0sfQfxfQfy1qRuecMkJlf1cIzTOgyaeXaiVaaki8/v+WB+U4hc6ik15ZW6TAllRlg/WuQXxWj1jx6C+dfy3w==}
- engines: {node: '>=18'}
- cpu: [arm64]
- os: [netbsd]
-
- '@esbuild/netbsd-arm64@0.27.2':
- resolution: {integrity: sha512-Kj6DiBlwXrPsCRDeRvGAUb/LNrBASrfqAIok+xB0LxK8CHqxZ037viF13ugfsIpePH93mX7xfJp97cyDuTZ3cw==}
- engines: {node: '>=18'}
- cpu: [arm64]
- os: [netbsd]
-
- '@esbuild/netbsd-x64@0.27.0':
- resolution: {integrity: sha512-xbbOdfn06FtcJ9d0ShxxvSn2iUsGd/lgPIO2V3VZIPDbEaIj1/3nBBe1AwuEZKXVXkMmpr6LUAgMkLD/4D2PPA==}
- engines: {node: '>=18'}
- cpu: [x64]
- os: [netbsd]
-
- '@esbuild/netbsd-x64@0.27.2':
- resolution: {integrity: sha512-HwGDZ0VLVBY3Y+Nw0JexZy9o/nUAWq9MlV7cahpaXKW6TOzfVno3y3/M8Ga8u8Yr7GldLOov27xiCnqRZf0tCA==}
- engines: {node: '>=18'}
- cpu: [x64]
- os: [netbsd]
-
- '@esbuild/openbsd-arm64@0.27.0':
- resolution: {integrity: sha512-fWgqR8uNbCQ/GGv0yhzttj6sU/9Z5/Sv/VGU3F5OuXK6J6SlriONKrQ7tNlwBrJZXRYk5jUhuWvF7GYzGguBZQ==}
- engines: {node: '>=18'}
- cpu: [arm64]
- os: [openbsd]
-
- '@esbuild/openbsd-arm64@0.27.2':
- resolution: {integrity: sha512-DNIHH2BPQ5551A7oSHD0CKbwIA/Ox7+78/AWkbS5QoRzaqlev2uFayfSxq68EkonB+IKjiuxBFoV8ESJy8bOHA==}
- engines: {node: '>=18'}
- cpu: [arm64]
- os: [openbsd]
-
- '@esbuild/openbsd-x64@0.27.0':
- resolution: {integrity: sha512-aCwlRdSNMNxkGGqQajMUza6uXzR/U0dIl1QmLjPtRbLOx3Gy3otfFu/VjATy4yQzo9yFDGTxYDo1FfAD9oRD2A==}
- engines: {node: '>=18'}
- cpu: [x64]
- os: [openbsd]
-
- '@esbuild/openbsd-x64@0.27.2':
- resolution: {integrity: sha512-/it7w9Nb7+0KFIzjalNJVR5bOzA9Vay+yIPLVHfIQYG/j+j9VTH84aNB8ExGKPU4AzfaEvN9/V4HV+F+vo8OEg==}
- engines: {node: '>=18'}
- cpu: [x64]
- os: [openbsd]
-
- '@esbuild/openharmony-arm64@0.27.0':
- resolution: {integrity: sha512-nyvsBccxNAsNYz2jVFYwEGuRRomqZ149A39SHWk4hV0jWxKM0hjBPm3AmdxcbHiFLbBSwG6SbpIcUbXjgyECfA==}
- engines: {node: '>=18'}
- cpu: [arm64]
- os: [openharmony]
-
- '@esbuild/openharmony-arm64@0.27.2':
- resolution: {integrity: sha512-LRBbCmiU51IXfeXk59csuX/aSaToeG7w48nMwA6049Y4J4+VbWALAuXcs+qcD04rHDuSCSRKdmY63sruDS5qag==}
- engines: {node: '>=18'}
- cpu: [arm64]
- os: [openharmony]
-
- '@esbuild/sunos-x64@0.27.0':
- resolution: {integrity: sha512-Q1KY1iJafM+UX6CFEL+F4HRTgygmEW568YMqDA5UV97AuZSm21b7SXIrRJDwXWPzr8MGr75fUZPV67FdtMHlHA==}
- engines: {node: '>=18'}
- cpu: [x64]
- os: [sunos]
-
- '@esbuild/sunos-x64@0.27.2':
- resolution: {integrity: sha512-kMtx1yqJHTmqaqHPAzKCAkDaKsffmXkPHThSfRwZGyuqyIeBvf08KSsYXl+abf5HDAPMJIPnbBfXvP2ZC2TfHg==}
- engines: {node: '>=18'}
- cpu: [x64]
- os: [sunos]
-
- '@esbuild/win32-arm64@0.27.0':
- resolution: {integrity: sha512-W1eyGNi6d+8kOmZIwi/EDjrL9nxQIQ0MiGqe/AWc6+IaHloxHSGoeRgDRKHFISThLmsewZ5nHFvGFWdBYlgKPg==}
- engines: {node: '>=18'}
- cpu: [arm64]
- os: [win32]
-
- '@esbuild/win32-arm64@0.27.2':
- resolution: {integrity: sha512-Yaf78O/B3Kkh+nKABUF++bvJv5Ijoy9AN1ww904rOXZFLWVc5OLOfL56W+C8F9xn5JQZa3UX6m+IktJnIb1Jjg==}
- engines: {node: '>=18'}
- cpu: [arm64]
- os: [win32]
-
- '@esbuild/win32-ia32@0.27.0':
- resolution: {integrity: sha512-30z1aKL9h22kQhilnYkORFYt+3wp7yZsHWus+wSKAJR8JtdfI76LJ4SBdMsCopTR3z/ORqVu5L1vtnHZWVj4cQ==}
- engines: {node: '>=18'}
- cpu: [ia32]
- os: [win32]
-
- '@esbuild/win32-ia32@0.27.2':
- resolution: {integrity: sha512-Iuws0kxo4yusk7sw70Xa2E2imZU5HoixzxfGCdxwBdhiDgt9vX9VUCBhqcwY7/uh//78A1hMkkROMJq9l27oLQ==}
- engines: {node: '>=18'}
- cpu: [ia32]
- os: [win32]
-
- '@esbuild/win32-x64@0.27.0':
- resolution: {integrity: sha512-aIitBcjQeyOhMTImhLZmtxfdOcuNRpwlPNmlFKPcHQYPhEssw75Cl1TSXJXpMkzaua9FUetx/4OQKq7eJul5Cg==}
- engines: {node: '>=18'}
- cpu: [x64]
- os: [win32]
-
- '@esbuild/win32-x64@0.27.2':
- resolution: {integrity: sha512-sRdU18mcKf7F+YgheI/zGf5alZatMUTKj/jNS6l744f9u3WFu4v7twcUI9vu4mknF4Y9aDlblIie0IM+5xxaqQ==}
- engines: {node: '>=18'}
- cpu: [x64]
- os: [win32]
-
- '@img/sharp-darwin-arm64@0.33.5':
- resolution: {integrity: sha512-UT4p+iz/2H4twwAoLCqfA9UH5pI6DggwKEGuaPy7nCVQ8ZsiY5PIcrRvD1DzuY3qYL07NtIQcWnBSY/heikIFQ==}
- engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
- cpu: [arm64]
- os: [darwin]
-
- '@img/sharp-darwin-x64@0.33.5':
- resolution: {integrity: sha512-fyHac4jIc1ANYGRDxtiqelIbdWkIuQaI84Mv45KvGRRxSAa7o7d1ZKAOBaYbnepLC1WqxfpimdeWfvqqSGwR2Q==}
- engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
- cpu: [x64]
- os: [darwin]
-
- '@img/sharp-libvips-darwin-arm64@1.0.4':
- resolution: {integrity: sha512-XblONe153h0O2zuFfTAbQYAX2JhYmDHeWikp1LM9Hul9gVPjFY427k6dFEcOL72O01QxQsWi761svJ/ev9xEDg==}
- cpu: [arm64]
- os: [darwin]
-
- '@img/sharp-libvips-darwin-x64@1.0.4':
- resolution: {integrity: sha512-xnGR8YuZYfJGmWPvmlunFaWJsb9T/AO2ykoP3Fz/0X5XV2aoYBPkX6xqCQvUTKKiLddarLaxpzNe+b1hjeWHAQ==}
- cpu: [x64]
- os: [darwin]
-
- '@img/sharp-libvips-linux-arm64@1.0.4':
- resolution: {integrity: sha512-9B+taZ8DlyyqzZQnoeIvDVR/2F4EbMepXMc/NdVbkzsJbzkUjhXv/70GQJ7tdLA4YJgNP25zukcxpX2/SueNrA==}
- cpu: [arm64]
- os: [linux]
-
- '@img/sharp-libvips-linux-arm@1.0.5':
- resolution: {integrity: sha512-gvcC4ACAOPRNATg/ov8/MnbxFDJqf/pDePbBnuBDcjsI8PssmjoKMAz4LtLaVi+OnSb5FK/yIOamqDwGmXW32g==}
- cpu: [arm]
- os: [linux]
-
- '@img/sharp-libvips-linux-s390x@1.0.4':
- resolution: {integrity: sha512-u7Wz6ntiSSgGSGcjZ55im6uvTrOxSIS8/dgoVMoiGE9I6JAfU50yH5BoDlYA1tcuGS7g/QNtetJnxA6QEsCVTA==}
- cpu: [s390x]
- os: [linux]
-
- '@img/sharp-libvips-linux-x64@1.0.4':
- resolution: {integrity: sha512-MmWmQ3iPFZr0Iev+BAgVMb3ZyC4KeFc3jFxnNbEPas60e1cIfevbtuyf9nDGIzOaW9PdnDciJm+wFFaTlj5xYw==}
- cpu: [x64]
- os: [linux]
-
- '@img/sharp-libvips-linuxmusl-arm64@1.0.4':
- resolution: {integrity: sha512-9Ti+BbTYDcsbp4wfYib8Ctm1ilkugkA/uscUn6UXK1ldpC1JjiXbLfFZtRlBhjPZ5o1NCLiDbg8fhUPKStHoTA==}
- cpu: [arm64]
- os: [linux]
-
- '@img/sharp-libvips-linuxmusl-x64@1.0.4':
- resolution: {integrity: sha512-viYN1KX9m+/hGkJtvYYp+CCLgnJXwiQB39damAO7WMdKWlIhmYTfHjwSbQeUK/20vY154mwezd9HflVFM1wVSw==}
- cpu: [x64]
- os: [linux]
-
- '@img/sharp-linux-arm64@0.33.5':
- resolution: {integrity: sha512-JMVv+AMRyGOHtO1RFBiJy/MBsgz0x4AWrT6QoEVVTyh1E39TrCUpTRI7mx9VksGX4awWASxqCYLCV4wBZHAYxA==}
- engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
- cpu: [arm64]
- os: [linux]
-
- '@img/sharp-linux-arm@0.33.5':
- resolution: {integrity: sha512-JTS1eldqZbJxjvKaAkxhZmBqPRGmxgu+qFKSInv8moZ2AmT5Yib3EQ1c6gp493HvrvV8QgdOXdyaIBrhvFhBMQ==}
- engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
- cpu: [arm]
- os: [linux]
-
- '@img/sharp-linux-s390x@0.33.5':
- resolution: {integrity: sha512-y/5PCd+mP4CA/sPDKl2961b+C9d+vPAveS33s6Z3zfASk2j5upL6fXVPZi7ztePZ5CuH+1kW8JtvxgbuXHRa4Q==}
- engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
- cpu: [s390x]
- os: [linux]
-
- '@img/sharp-linux-x64@0.33.5':
- resolution: {integrity: sha512-opC+Ok5pRNAzuvq1AG0ar+1owsu842/Ab+4qvU879ippJBHvyY5n2mxF1izXqkPYlGuP/M556uh53jRLJmzTWA==}
- engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
- cpu: [x64]
- os: [linux]
-
- '@img/sharp-linuxmusl-arm64@0.33.5':
- resolution: {integrity: sha512-XrHMZwGQGvJg2V/oRSUfSAfjfPxO+4DkiRh6p2AFjLQztWUuY/o8Mq0eMQVIY7HJ1CDQUJlxGGZRw1a5bqmd1g==}
- engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
- cpu: [arm64]
- os: [linux]
-
- '@img/sharp-linuxmusl-x64@0.33.5':
- resolution: {integrity: sha512-WT+d/cgqKkkKySYmqoZ8y3pxx7lx9vVejxW/W4DOFMYVSkErR+w7mf2u8m/y4+xHe7yY9DAXQMWQhpnMuFfScw==}
- engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
- cpu: [x64]
- os: [linux]
-
- '@img/sharp-wasm32@0.33.5':
- resolution: {integrity: sha512-ykUW4LVGaMcU9lu9thv85CbRMAwfeadCJHRsg2GmeRa/cJxsVY9Rbd57JcMxBkKHag5U/x7TSBpScF4U8ElVzg==}
- engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
- cpu: [wasm32]
-
- '@img/sharp-win32-ia32@0.33.5':
- resolution: {integrity: sha512-T36PblLaTwuVJ/zw/LaH0PdZkRz5rd3SmMHX8GSmR7vtNSP5Z6bQkExdSK7xGWyxLw4sUknBuugTelgw2faBbQ==}
- engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
- cpu: [ia32]
- os: [win32]
-
- '@img/sharp-win32-x64@0.33.5':
- resolution: {integrity: sha512-MpY/o8/8kj+EcnxwvrP4aTJSWw/aZ7JIGR4aBeZkZw5B7/Jn+tY9/VNwtcoGmdT7GfggGIU4kygOMSbYnOrAbg==}
- engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
- cpu: [x64]
- os: [win32]
-
- '@jridgewell/resolve-uri@3.1.2':
- resolution: {integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==}
- engines: {node: '>=6.0.0'}
-
- '@jridgewell/sourcemap-codec@1.5.5':
- resolution: {integrity: sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==}
-
- '@jridgewell/trace-mapping@0.3.9':
- resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==}
-
- '@poppinss/colors@4.1.6':
- resolution: {integrity: sha512-H9xkIdFswbS8n1d6vmRd8+c10t2Qe+rZITbbDHHkQixH5+2x1FDGmi/0K+WgWiqQFKPSlIYB7jlH6Kpfn6Fleg==}
-
- '@poppinss/dumper@0.6.5':
- resolution: {integrity: sha512-NBdYIb90J7LfOI32dOewKI1r7wnkiH6m920puQ3qHUeZkxNkQiFnXVWoE6YtFSv6QOiPPf7ys6i+HWWecDz7sw==}
-
- '@poppinss/exception@1.2.3':
- resolution: {integrity: sha512-dCED+QRChTVatE9ibtoaxc+WkdzOSjYTKi/+uacHWIsfodVfpsueo3+DKpgU5Px8qXjgmXkSvhXvSCz3fnP9lw==}
-
- '@rollup/rollup-android-arm-eabi@4.54.0':
- resolution: {integrity: sha512-OywsdRHrFvCdvsewAInDKCNyR3laPA2mc9bRYJ6LBp5IyvF3fvXbbNR0bSzHlZVFtn6E0xw2oZlyjg4rKCVcng==}
- cpu: [arm]
- os: [android]
-
- '@rollup/rollup-android-arm64@4.54.0':
- resolution: {integrity: sha512-Skx39Uv+u7H224Af+bDgNinitlmHyQX1K/atIA32JP3JQw6hVODX5tkbi2zof/E69M1qH2UoN3Xdxgs90mmNYw==}
- cpu: [arm64]
- os: [android]
-
- '@rollup/rollup-darwin-arm64@4.54.0':
- resolution: {integrity: sha512-k43D4qta/+6Fq+nCDhhv9yP2HdeKeP56QrUUTW7E6PhZP1US6NDqpJj4MY0jBHlJivVJD5P8NxrjuobZBJTCRw==}
- cpu: [arm64]
- os: [darwin]
-
- '@rollup/rollup-darwin-x64@4.54.0':
- resolution: {integrity: sha512-cOo7biqwkpawslEfox5Vs8/qj83M/aZCSSNIWpVzfU2CYHa2G3P1UN5WF01RdTHSgCkri7XOlTdtk17BezlV3A==}
- cpu: [x64]
- os: [darwin]
-
- '@rollup/rollup-freebsd-arm64@4.54.0':
- resolution: {integrity: sha512-miSvuFkmvFbgJ1BevMa4CPCFt5MPGw094knM64W9I0giUIMMmRYcGW/JWZDriaw/k1kOBtsWh1z6nIFV1vPNtA==}
- cpu: [arm64]
- os: [freebsd]
-
- '@rollup/rollup-freebsd-x64@4.54.0':
- resolution: {integrity: sha512-KGXIs55+b/ZfZsq9aR026tmr/+7tq6VG6MsnrvF4H8VhwflTIuYh+LFUlIsRdQSgrgmtM3fVATzEAj4hBQlaqQ==}
- cpu: [x64]
- os: [freebsd]
-
- '@rollup/rollup-linux-arm-gnueabihf@4.54.0':
- resolution: {integrity: sha512-EHMUcDwhtdRGlXZsGSIuXSYwD5kOT9NVnx9sqzYiwAc91wfYOE1g1djOEDseZJKKqtHAHGwnGPQu3kytmfaXLQ==}
- cpu: [arm]
- os: [linux]
-
- '@rollup/rollup-linux-arm-musleabihf@4.54.0':
- resolution: {integrity: sha512-+pBrqEjaakN2ySv5RVrj/qLytYhPKEUwk+e3SFU5jTLHIcAtqh2rLrd/OkbNuHJpsBgxsD8ccJt5ga/SeG0JmA==}
- cpu: [arm]
- os: [linux]
-
- '@rollup/rollup-linux-arm64-gnu@4.54.0':
- resolution: {integrity: sha512-NSqc7rE9wuUaRBsBp5ckQ5CVz5aIRKCwsoa6WMF7G01sX3/qHUw/z4pv+D+ahL1EIKy6Enpcnz1RY8pf7bjwng==}
- cpu: [arm64]
- os: [linux]
-
- '@rollup/rollup-linux-arm64-musl@4.54.0':
- resolution: {integrity: sha512-gr5vDbg3Bakga5kbdpqx81m2n9IX8M6gIMlQQIXiLTNeQW6CucvuInJ91EuCJ/JYvc+rcLLsDFcfAD1K7fMofg==}
- cpu: [arm64]
- os: [linux]
-
- '@rollup/rollup-linux-loong64-gnu@4.54.0':
- resolution: {integrity: sha512-gsrtB1NA3ZYj2vq0Rzkylo9ylCtW/PhpLEivlgWe0bpgtX5+9j9EZa0wtZiCjgu6zmSeZWyI/e2YRX1URozpIw==}
- cpu: [loong64]
- os: [linux]
-
- '@rollup/rollup-linux-ppc64-gnu@4.54.0':
- resolution: {integrity: sha512-y3qNOfTBStmFNq+t4s7Tmc9hW2ENtPg8FeUD/VShI7rKxNW7O4fFeaYbMsd3tpFlIg1Q8IapFgy7Q9i2BqeBvA==}
- cpu: [ppc64]
- os: [linux]
-
- '@rollup/rollup-linux-riscv64-gnu@4.54.0':
- resolution: {integrity: sha512-89sepv7h2lIVPsFma8iwmccN7Yjjtgz0Rj/Ou6fEqg3HDhpCa+Et+YSufy27i6b0Wav69Qv4WBNl3Rs6pwhebQ==}
- cpu: [riscv64]
- os: [linux]
-
- '@rollup/rollup-linux-riscv64-musl@4.54.0':
- resolution: {integrity: sha512-ZcU77ieh0M2Q8Ur7D5X7KvK+UxbXeDHwiOt/CPSBTI1fBmeDMivW0dPkdqkT4rOgDjrDDBUed9x4EgraIKoR2A==}
- cpu: [riscv64]
- os: [linux]
-
- '@rollup/rollup-linux-s390x-gnu@4.54.0':
- resolution: {integrity: sha512-2AdWy5RdDF5+4YfG/YesGDDtbyJlC9LHmL6rZw6FurBJ5n4vFGupsOBGfwMRjBYH7qRQowT8D/U4LoSvVwOhSQ==}
- cpu: [s390x]
- os: [linux]
-
- '@rollup/rollup-linux-x64-gnu@4.54.0':
- resolution: {integrity: sha512-WGt5J8Ij/rvyqpFexxk3ffKqqbLf9AqrTBbWDk7ApGUzaIs6V+s2s84kAxklFwmMF/vBNGrVdYgbblCOFFezMQ==}
- cpu: [x64]
- os: [linux]
-
- '@rollup/rollup-linux-x64-musl@4.54.0':
- resolution: {integrity: sha512-JzQmb38ATzHjxlPHuTH6tE7ojnMKM2kYNzt44LO/jJi8BpceEC8QuXYA908n8r3CNuG/B3BV8VR3Hi1rYtmPiw==}
- cpu: [x64]
- os: [linux]
-
- '@rollup/rollup-openharmony-arm64@4.54.0':
- resolution: {integrity: sha512-huT3fd0iC7jigGh7n3q/+lfPcXxBi+om/Rs3yiFxjvSxbSB6aohDFXbWvlspaqjeOh+hx7DDHS+5Es5qRkWkZg==}
- cpu: [arm64]
- os: [openharmony]
-
- '@rollup/rollup-win32-arm64-msvc@4.54.0':
- resolution: {integrity: sha512-c2V0W1bsKIKfbLMBu/WGBz6Yci8nJ/ZJdheE0EwB73N3MvHYKiKGs3mVilX4Gs70eGeDaMqEob25Tw2Gb9Nqyw==}
- cpu: [arm64]
- os: [win32]
-
- '@rollup/rollup-win32-ia32-msvc@4.54.0':
- resolution: {integrity: sha512-woEHgqQqDCkAzrDhvDipnSirm5vxUXtSKDYTVpZG3nUdW/VVB5VdCYA2iReSj/u3yCZzXID4kuKG7OynPnB3WQ==}
- cpu: [ia32]
- os: [win32]
-
- '@rollup/rollup-win32-x64-gnu@4.54.0':
- resolution: {integrity: sha512-dzAc53LOuFvHwbCEOS0rPbXp6SIhAf2txMP5p6mGyOXXw5mWY8NGGbPMPrs4P1WItkfApDathBj/NzMLUZ9rtQ==}
- cpu: [x64]
- os: [win32]
-
- '@rollup/rollup-win32-x64-msvc@4.54.0':
- resolution: {integrity: sha512-hYT5d3YNdSh3mbCU1gwQyPgQd3T2ne0A3KG8KSBdav5TiBg6eInVmV+TeR5uHufiIgSFg0XsOWGW5/RhNcSvPg==}
- cpu: [x64]
- os: [win32]
-
- '@sindresorhus/is@7.2.0':
- resolution: {integrity: sha512-P1Cz1dWaFfR4IR+U13mqqiGsLFf1KbayybWwdd2vfctdV6hDpUkgCY0nKOLLTMSoRd/jJNjtbqzf13K8DCCXQw==}
- engines: {node: '>=18'}
-
- '@speed-highlight/core@1.2.14':
- resolution: {integrity: sha512-G4ewlBNhUtlLvrJTb88d2mdy2KRijzs4UhnlrOSRT4bmjh/IqNElZa3zkrZ+TC47TwtlDWzVLFADljF1Ijp5hA==}
-
- '@standard-schema/spec@1.1.0':
- resolution: {integrity: sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==}
-
- '@types/chai@5.2.3':
- resolution: {integrity: sha512-Mw558oeA9fFbv65/y4mHtXDs9bPnFMZAL/jxdPFUpOHHIXX91mcgEHbS5Lahr+pwZFR8A7GQleRWeI6cGFC2UA==}
-
- '@types/deep-eql@4.0.2':
- resolution: {integrity: sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==}
-
- '@types/estree@1.0.8':
- resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==}
-
- '@types/node@25.0.3':
- resolution: {integrity: sha512-W609buLVRVmeW693xKfzHeIV6nJGGz98uCPfeXI1ELMLXVeKYZ9m15fAMSaUPBHYLGFsVRcMmSCksQOrZV9BYA==}
-
- '@vitest/expect@4.0.16':
- resolution: {integrity: sha512-eshqULT2It7McaJkQGLkPjPjNph+uevROGuIMJdG3V+0BSR2w9u6J9Lwu+E8cK5TETlfou8GRijhafIMhXsimA==}
-
- '@vitest/mocker@4.0.16':
- resolution: {integrity: sha512-yb6k4AZxJTB+q9ycAvsoxGn+j/po0UaPgajllBgt1PzoMAAmJGYFdDk0uCcRcxb3BrME34I6u8gHZTQlkqSZpg==}
- peerDependencies:
- msw: ^2.4.9
- vite: ^6.0.0 || ^7.0.0-0
- peerDependenciesMeta:
- msw:
- optional: true
- vite:
- optional: true
-
- '@vitest/pretty-format@4.0.16':
- resolution: {integrity: sha512-eNCYNsSty9xJKi/UdVD8Ou16alu7AYiS2fCPRs0b1OdhJiV89buAXQLpTbe+X8V9L6qrs9CqyvU7OaAopJYPsA==}
-
- '@vitest/runner@4.0.16':
- resolution: {integrity: sha512-VWEDm5Wv9xEo80ctjORcTQRJ539EGPB3Pb9ApvVRAY1U/WkHXmmYISqU5E79uCwcW7xYUV38gwZD+RV755fu3Q==}
-
- '@vitest/snapshot@4.0.16':
- resolution: {integrity: sha512-sf6NcrYhYBsSYefxnry+DR8n3UV4xWZwWxYbCJUt2YdvtqzSPR7VfGrY0zsv090DAbjFZsi7ZaMi1KnSRyK1XA==}
-
- '@vitest/spy@4.0.16':
- resolution: {integrity: sha512-4jIOWjKP0ZUaEmJm00E0cOBLU+5WE0BpeNr3XN6TEF05ltro6NJqHWxXD0kA8/Zc8Nh23AT8WQxwNG+WeROupw==}
-
- '@vitest/utils@4.0.16':
- resolution: {integrity: sha512-h8z9yYhV3e1LEfaQ3zdypIrnAg/9hguReGZoS7Gl0aBG5xgA410zBqECqmaF/+RkTggRsfnzc1XaAHA6bmUufA==}
-
- acorn-walk@8.3.2:
- resolution: {integrity: sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A==}
- engines: {node: '>=0.4.0'}
-
- acorn@8.14.0:
- resolution: {integrity: sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==}
- engines: {node: '>=0.4.0'}
- hasBin: true
-
- assertion-error@2.0.1:
- resolution: {integrity: sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==}
- engines: {node: '>=12'}
-
- blake3-wasm@2.1.5:
- resolution: {integrity: sha512-F1+K8EbfOZE49dtoPtmxUQrpXaBIl3ICvasLh+nJta0xkz+9kF/7uet9fLnwKqhDrmj6g+6K3Tw9yQPUg2ka5g==}
-
- chai@6.2.2:
- resolution: {integrity: sha512-NUPRluOfOiTKBKvWPtSD4PhFvWCqOi0BGStNWs57X9js7XGTprSmFoz5F0tWhR4WPjNeR9jXqdC7/UpSJTnlRg==}
- engines: {node: '>=18'}
-
- color-convert@2.0.1:
- resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==}
- engines: {node: '>=7.0.0'}
-
- color-name@1.1.4:
- resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==}
-
- color-string@1.9.1:
- resolution: {integrity: sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==}
-
- color@4.2.3:
- resolution: {integrity: sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A==}
- engines: {node: '>=12.5.0'}
-
- cookie@1.1.1:
- resolution: {integrity: sha512-ei8Aos7ja0weRpFzJnEA9UHJ/7XQmqglbRwnf2ATjcB9Wq874VKH9kfjjirM6UhU2/E5fFYadylyhFldcqSidQ==}
- engines: {node: '>=18'}
-
- detect-libc@2.1.2:
- resolution: {integrity: sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==}
- engines: {node: '>=8'}
-
- error-stack-parser-es@1.0.5:
- resolution: {integrity: sha512-5qucVt2XcuGMcEGgWI7i+yZpmpByQ8J1lHhcL7PwqCwu9FPP3VUXzT4ltHe5i2z9dePwEHcDVOAfSnHsOlCXRA==}
-
- es-module-lexer@1.7.0:
- resolution: {integrity: sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==}
-
- esbuild@0.27.0:
- resolution: {integrity: sha512-jd0f4NHbD6cALCyGElNpGAOtWxSq46l9X/sWB0Nzd5er4Kz2YTm+Vl0qKFT9KUJvD8+fiO8AvoHhFvEatfVixA==}
- engines: {node: '>=18'}
- hasBin: true
-
- esbuild@0.27.2:
- resolution: {integrity: sha512-HyNQImnsOC7X9PMNaCIeAm4ISCQXs5a5YasTXVliKv4uuBo1dKrG0A+uQS8M5eXjVMnLg3WgXaKvprHlFJQffw==}
- engines: {node: '>=18'}
- hasBin: true
-
- estree-walker@3.0.3:
- resolution: {integrity: sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==}
-
- exit-hook@2.2.1:
- resolution: {integrity: sha512-eNTPlAD67BmP31LDINZ3U7HSF8l57TxOY2PmBJ1shpCvpnxBF93mWCE8YHBnXs8qiUZJc9WDcWIeC3a2HIAMfw==}
- engines: {node: '>=6'}
-
- expect-type@1.3.0:
- resolution: {integrity: sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==}
- engines: {node: '>=12.0.0'}
-
- fdir@6.5.0:
- resolution: {integrity: sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==}
- engines: {node: '>=12.0.0'}
- peerDependencies:
- picomatch: ^3 || ^4
- peerDependenciesMeta:
- picomatch:
- optional: true
-
- fsevents@2.3.3:
- resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==}
- engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
- os: [darwin]
-
- glob-to-regexp@0.4.1:
- resolution: {integrity: sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==}
-
- is-arrayish@0.3.4:
- resolution: {integrity: sha512-m6UrgzFVUYawGBh1dUsWR5M2Clqic9RVXC/9f8ceNlv2IcO9j9J/z8UoCLPqtsPBFNzEpfR3xftohbfqDx8EQA==}
-
- itty-router@5.0.22:
- resolution: {integrity: sha512-9hmdGErWdYDOurGYxSbqLhy4EFReIwk71hMZTJ5b+zfa2zjMNV1ftFno2b8VjAQvX615gNB8Qxbl9JMRqHnIVA==}
-
- jose@5.10.0:
- resolution: {integrity: sha512-s+3Al/p9g32Iq+oqXxkW//7jk2Vig6FF1CFqzVXoTUXt2qz89YWbL+OwS17NFYEvxC35n0FKeGO2LGYSxeM2Gg==}
-
- kleur@4.1.5:
- resolution: {integrity: sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==}
- engines: {node: '>=6'}
-
- magic-string@0.30.21:
- resolution: {integrity: sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==}
-
- mime@3.0.0:
- resolution: {integrity: sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==}
- engines: {node: '>=10.0.0'}
- hasBin: true
-
- miniflare@4.20251210.0:
- resolution: {integrity: sha512-k6kIoXwGVqlPZb0hcn+X7BmnK+8BjIIkusQPY22kCo2RaQJ/LzAjtxHQdGXerlHSnJyQivDQsL6BJHMpQfUFyw==}
- engines: {node: '>=18.0.0'}
- hasBin: true
-
- nanoid@3.3.11:
- resolution: {integrity: sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==}
- engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1}
- hasBin: true
-
- obug@2.1.1:
- resolution: {integrity: sha512-uTqF9MuPraAQ+IsnPf366RG4cP9RtUi7MLO1N3KEc+wb0a6yKpeL0lmk2IB1jY5KHPAlTc6T/JRdC/YqxHNwkQ==}
-
- path-to-regexp@6.3.0:
- resolution: {integrity: sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ==}
-
- pathe@2.0.3:
- resolution: {integrity: sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==}
-
- picocolors@1.1.1:
- resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==}
-
- picomatch@4.0.3:
- resolution: {integrity: sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==}
- engines: {node: '>=12'}
-
- postcss@8.5.6:
- resolution: {integrity: sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==}
- engines: {node: ^10 || ^12 || >=14}
-
- rollup@4.54.0:
- resolution: {integrity: sha512-3nk8Y3a9Ea8szgKhinMlGMhGMw89mqule3KWczxhIzqudyHdCIOHw8WJlj/r329fACjKLEh13ZSk7oE22kyeIw==}
- engines: {node: '>=18.0.0', npm: '>=8.0.0'}
- hasBin: true
-
- semver@7.7.3:
- resolution: {integrity: sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==}
- engines: {node: '>=10'}
- hasBin: true
-
- sharp@0.33.5:
- resolution: {integrity: sha512-haPVm1EkS9pgvHrQ/F3Xy+hgcuMV0Wm9vfIBSiwZ05k+xgb0PkBQpGsAA/oWdDobNaZTH5ppvHtzCFbnSEwHVw==}
- engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
-
- siginfo@2.0.0:
- resolution: {integrity: sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==}
-
- simple-swizzle@0.2.4:
- resolution: {integrity: sha512-nAu1WFPQSMNr2Zn9PGSZK9AGn4t/y97lEm+MXTtUDwfP0ksAIX4nO+6ruD9Jwut4C49SB1Ws+fbXsm/yScWOHw==}
-
- source-map-js@1.2.1:
- resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==}
- engines: {node: '>=0.10.0'}
-
- stackback@0.0.2:
- resolution: {integrity: sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==}
-
- std-env@3.10.0:
- resolution: {integrity: sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==}
-
- stoppable@1.1.0:
- resolution: {integrity: sha512-KXDYZ9dszj6bzvnEMRYvxgeTHU74QBFL54XKtP3nyMuJ81CFYtABZ3bAzL2EdFUaEwJOBOgENyFj3R7oTzDyyw==}
- engines: {node: '>=4', npm: '>=6'}
-
- supports-color@10.2.2:
- resolution: {integrity: sha512-SS+jx45GF1QjgEXQx4NJZV9ImqmO2NPz5FNsIHrsDjh2YsHnawpan7SNQ1o8NuhrbHZy9AZhIoCUiCeaW/C80g==}
- engines: {node: '>=18'}
-
- tinybench@2.9.0:
- resolution: {integrity: sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==}
-
- tinyexec@1.0.2:
- resolution: {integrity: sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg==}
- engines: {node: '>=18'}
-
- tinyglobby@0.2.15:
- resolution: {integrity: sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==}
- engines: {node: '>=12.0.0'}
-
- tinyrainbow@3.0.3:
- resolution: {integrity: sha512-PSkbLUoxOFRzJYjjxHJt9xro7D+iilgMX/C9lawzVuYiIdcihh9DXmVibBe8lmcFrRi/VzlPjBxbN7rH24q8/Q==}
- engines: {node: '>=14.0.0'}
-
- tslib@2.8.1:
- resolution: {integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==}
-
- typescript@5.9.3:
- resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==}
- engines: {node: '>=14.17'}
- hasBin: true
-
- undici-types@7.16.0:
- resolution: {integrity: sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==}
-
- undici@7.14.0:
- resolution: {integrity: sha512-Vqs8HTzjpQXZeXdpsfChQTlafcMQaaIwnGwLam1wudSSjlJeQ3bw1j+TLPePgrCnCpUXx7Ba5Pdpf5OBih62NQ==}
- engines: {node: '>=20.18.1'}
-
- unenv@2.0.0-rc.24:
- resolution: {integrity: sha512-i7qRCmY42zmCwnYlh9H2SvLEypEFGye5iRmEMKjcGi7zk9UquigRjFtTLz0TYqr0ZGLZhaMHl/foy1bZR+Cwlw==}
-
- vite@7.3.0:
- resolution: {integrity: sha512-dZwN5L1VlUBewiP6H9s2+B3e3Jg96D0vzN+Ry73sOefebhYr9f94wwkMNN/9ouoU8pV1BqA1d1zGk8928cx0rg==}
- engines: {node: ^20.19.0 || >=22.12.0}
- hasBin: true
- peerDependencies:
- '@types/node': ^20.19.0 || >=22.12.0
- jiti: '>=1.21.0'
- less: ^4.0.0
- lightningcss: ^1.21.0
- sass: ^1.70.0
- sass-embedded: ^1.70.0
- stylus: '>=0.54.8'
- sugarss: ^5.0.0
- terser: ^5.16.0
- tsx: ^4.8.1
- yaml: ^2.4.2
- peerDependenciesMeta:
- '@types/node':
- optional: true
- jiti:
- optional: true
- less:
- optional: true
- lightningcss:
- optional: true
- sass:
- optional: true
- sass-embedded:
- optional: true
- stylus:
- optional: true
- sugarss:
- optional: true
- terser:
- optional: true
- tsx:
- optional: true
- yaml:
- optional: true
-
- vitest@4.0.16:
- resolution: {integrity: sha512-E4t7DJ9pESL6E3I8nFjPa4xGUd3PmiWDLsDztS2qXSJWfHtbQnwAWylaBvSNY48I3vr8PTqIZlyK8TE3V3CA4Q==}
- engines: {node: ^20.0.0 || ^22.0.0 || >=24.0.0}
- hasBin: true
- peerDependencies:
- '@edge-runtime/vm': '*'
- '@opentelemetry/api': ^1.9.0
- '@types/node': ^20.0.0 || ^22.0.0 || >=24.0.0
- '@vitest/browser-playwright': 4.0.16
- '@vitest/browser-preview': 4.0.16
- '@vitest/browser-webdriverio': 4.0.16
- '@vitest/ui': 4.0.16
- happy-dom: '*'
- jsdom: '*'
- peerDependenciesMeta:
- '@edge-runtime/vm':
- optional: true
- '@opentelemetry/api':
- optional: true
- '@types/node':
- optional: true
- '@vitest/browser-playwright':
- optional: true
- '@vitest/browser-preview':
- optional: true
- '@vitest/browser-webdriverio':
- optional: true
- '@vitest/ui':
- optional: true
- happy-dom:
- optional: true
- jsdom:
- optional: true
-
- why-is-node-running@2.3.0:
- resolution: {integrity: sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==}
- engines: {node: '>=8'}
- hasBin: true
-
- workerd@1.20251210.0:
- resolution: {integrity: sha512-9MUUneP1BnRE9XAYi94FXxHmiLGbO75EHQZsgWqSiOXjoXSqJCw8aQbIEPxCy19TclEl/kHUFYce8ST2W+Qpjw==}
- engines: {node: '>=16'}
- hasBin: true
-
- wrangler@4.54.0:
- resolution: {integrity: sha512-bANFsjDwJLbprYoBK+hUDZsVbUv2SqJd8QvArLIcZk+fPq4h/Ohtj5vkKXD3k0s2bD1DXLk08D+hYmeNH+xC6A==}
- engines: {node: '>=20.0.0'}
- hasBin: true
- peerDependencies:
- '@cloudflare/workers-types': ^4.20251210.0
- peerDependenciesMeta:
- '@cloudflare/workers-types':
- optional: true
-
- ws@8.18.0:
- resolution: {integrity: sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==}
- engines: {node: '>=10.0.0'}
- peerDependencies:
- bufferutil: ^4.0.1
- utf-8-validate: '>=5.0.2'
- peerDependenciesMeta:
- bufferutil:
- optional: true
- utf-8-validate:
- optional: true
-
- youch-core@0.3.3:
- resolution: {integrity: sha512-ho7XuGjLaJ2hWHoK8yFnsUGy2Y5uDpqSTq1FkHLK4/oqKtyUU1AFbOOxY4IpC9f0fTLjwYbslUz0Po5BpD1wrA==}
-
- youch@4.1.0-beta.10:
- resolution: {integrity: sha512-rLfVLB4FgQneDr0dv1oddCVZmKjcJ6yX6mS4pU82Mq/Dt9a3cLZQ62pDBL4AUO+uVrCvtWz3ZFUL2HFAFJ/BXQ==}
-
- zod@3.22.3:
- resolution: {integrity: sha512-EjIevzuJRiRPbVH4mGc8nApb/lVLKVpmUhAaR5R5doKGfAnGJ6Gr3CViAVjP+4FWSxCsybeWQdcgCtbX+7oZug==}
-
- zod@4.3.5:
- resolution: {integrity: sha512-k7Nwx6vuWx1IJ9Bjuf4Zt1PEllcwe7cls3VNzm4CQ1/hgtFUK2bRNG3rvnpPUhFjmqJKAKtjV576KnUkHocg/g==}
-
-snapshots:
-
- '@cloudflare/kv-asset-handler@0.4.1':
- dependencies:
- mime: 3.0.0
-
- '@cloudflare/unenv-preset@2.7.13(unenv@2.0.0-rc.24)(workerd@1.20251210.0)':
- dependencies:
- unenv: 2.0.0-rc.24
- optionalDependencies:
- workerd: 1.20251210.0
-
- '@cloudflare/workerd-darwin-64@1.20251210.0':
- optional: true
-
- '@cloudflare/workerd-darwin-arm64@1.20251210.0':
- optional: true
-
- '@cloudflare/workerd-linux-64@1.20251210.0':
- optional: true
-
- '@cloudflare/workerd-linux-arm64@1.20251210.0':
- optional: true
-
- '@cloudflare/workerd-windows-64@1.20251210.0':
- optional: true
-
- '@cloudflare/workers-types@4.20260103.0': {}
-
- '@cspotcode/source-map-support@0.8.1':
- dependencies:
- '@jridgewell/trace-mapping': 0.3.9
-
- '@emnapi/runtime@1.8.1':
- dependencies:
- tslib: 2.8.1
- optional: true
-
- '@esbuild/aix-ppc64@0.27.0':
- optional: true
-
- '@esbuild/aix-ppc64@0.27.2':
- optional: true
-
- '@esbuild/android-arm64@0.27.0':
- optional: true
-
- '@esbuild/android-arm64@0.27.2':
- optional: true
-
- '@esbuild/android-arm@0.27.0':
- optional: true
-
- '@esbuild/android-arm@0.27.2':
- optional: true
-
- '@esbuild/android-x64@0.27.0':
- optional: true
-
- '@esbuild/android-x64@0.27.2':
- optional: true
-
- '@esbuild/darwin-arm64@0.27.0':
- optional: true
-
- '@esbuild/darwin-arm64@0.27.2':
- optional: true
-
- '@esbuild/darwin-x64@0.27.0':
- optional: true
-
- '@esbuild/darwin-x64@0.27.2':
- optional: true
-
- '@esbuild/freebsd-arm64@0.27.0':
- optional: true
-
- '@esbuild/freebsd-arm64@0.27.2':
- optional: true
-
- '@esbuild/freebsd-x64@0.27.0':
- optional: true
-
- '@esbuild/freebsd-x64@0.27.2':
- optional: true
-
- '@esbuild/linux-arm64@0.27.0':
- optional: true
-
- '@esbuild/linux-arm64@0.27.2':
- optional: true
-
- '@esbuild/linux-arm@0.27.0':
- optional: true
-
- '@esbuild/linux-arm@0.27.2':
- optional: true
-
- '@esbuild/linux-ia32@0.27.0':
- optional: true
-
- '@esbuild/linux-ia32@0.27.2':
- optional: true
-
- '@esbuild/linux-loong64@0.27.0':
- optional: true
-
- '@esbuild/linux-loong64@0.27.2':
- optional: true
-
- '@esbuild/linux-mips64el@0.27.0':
- optional: true
-
- '@esbuild/linux-mips64el@0.27.2':
- optional: true
-
- '@esbuild/linux-ppc64@0.27.0':
- optional: true
-
- '@esbuild/linux-ppc64@0.27.2':
- optional: true
-
- '@esbuild/linux-riscv64@0.27.0':
- optional: true
-
- '@esbuild/linux-riscv64@0.27.2':
- optional: true
-
- '@esbuild/linux-s390x@0.27.0':
- optional: true
-
- '@esbuild/linux-s390x@0.27.2':
- optional: true
-
- '@esbuild/linux-x64@0.27.0':
- optional: true
-
- '@esbuild/linux-x64@0.27.2':
- optional: true
-
- '@esbuild/netbsd-arm64@0.27.0':
- optional: true
-
- '@esbuild/netbsd-arm64@0.27.2':
- optional: true
-
- '@esbuild/netbsd-x64@0.27.0':
- optional: true
-
- '@esbuild/netbsd-x64@0.27.2':
- optional: true
-
- '@esbuild/openbsd-arm64@0.27.0':
- optional: true
-
- '@esbuild/openbsd-arm64@0.27.2':
- optional: true
-
- '@esbuild/openbsd-x64@0.27.0':
- optional: true
-
- '@esbuild/openbsd-x64@0.27.2':
- optional: true
-
- '@esbuild/openharmony-arm64@0.27.0':
- optional: true
-
- '@esbuild/openharmony-arm64@0.27.2':
- optional: true
-
- '@esbuild/sunos-x64@0.27.0':
- optional: true
-
- '@esbuild/sunos-x64@0.27.2':
- optional: true
-
- '@esbuild/win32-arm64@0.27.0':
- optional: true
-
- '@esbuild/win32-arm64@0.27.2':
- optional: true
-
- '@esbuild/win32-ia32@0.27.0':
- optional: true
-
- '@esbuild/win32-ia32@0.27.2':
- optional: true
-
- '@esbuild/win32-x64@0.27.0':
- optional: true
-
- '@esbuild/win32-x64@0.27.2':
- optional: true
-
- '@img/sharp-darwin-arm64@0.33.5':
- optionalDependencies:
- '@img/sharp-libvips-darwin-arm64': 1.0.4
- optional: true
-
- '@img/sharp-darwin-x64@0.33.5':
- optionalDependencies:
- '@img/sharp-libvips-darwin-x64': 1.0.4
- optional: true
-
- '@img/sharp-libvips-darwin-arm64@1.0.4':
- optional: true
-
- '@img/sharp-libvips-darwin-x64@1.0.4':
- optional: true
-
- '@img/sharp-libvips-linux-arm64@1.0.4':
- optional: true
-
- '@img/sharp-libvips-linux-arm@1.0.5':
- optional: true
-
- '@img/sharp-libvips-linux-s390x@1.0.4':
- optional: true
-
- '@img/sharp-libvips-linux-x64@1.0.4':
- optional: true
-
- '@img/sharp-libvips-linuxmusl-arm64@1.0.4':
- optional: true
-
- '@img/sharp-libvips-linuxmusl-x64@1.0.4':
- optional: true
-
- '@img/sharp-linux-arm64@0.33.5':
- optionalDependencies:
- '@img/sharp-libvips-linux-arm64': 1.0.4
- optional: true
-
- '@img/sharp-linux-arm@0.33.5':
- optionalDependencies:
- '@img/sharp-libvips-linux-arm': 1.0.5
- optional: true
-
- '@img/sharp-linux-s390x@0.33.5':
- optionalDependencies:
- '@img/sharp-libvips-linux-s390x': 1.0.4
- optional: true
-
- '@img/sharp-linux-x64@0.33.5':
- optionalDependencies:
- '@img/sharp-libvips-linux-x64': 1.0.4
- optional: true
-
- '@img/sharp-linuxmusl-arm64@0.33.5':
- optionalDependencies:
- '@img/sharp-libvips-linuxmusl-arm64': 1.0.4
- optional: true
-
- '@img/sharp-linuxmusl-x64@0.33.5':
- optionalDependencies:
- '@img/sharp-libvips-linuxmusl-x64': 1.0.4
- optional: true
-
- '@img/sharp-wasm32@0.33.5':
- dependencies:
- '@emnapi/runtime': 1.8.1
- optional: true
-
- '@img/sharp-win32-ia32@0.33.5':
- optional: true
-
- '@img/sharp-win32-x64@0.33.5':
- optional: true
-
- '@jridgewell/resolve-uri@3.1.2': {}
-
- '@jridgewell/sourcemap-codec@1.5.5': {}
-
- '@jridgewell/trace-mapping@0.3.9':
- dependencies:
- '@jridgewell/resolve-uri': 3.1.2
- '@jridgewell/sourcemap-codec': 1.5.5
-
- '@poppinss/colors@4.1.6':
- dependencies:
- kleur: 4.1.5
-
- '@poppinss/dumper@0.6.5':
- dependencies:
- '@poppinss/colors': 4.1.6
- '@sindresorhus/is': 7.2.0
- supports-color: 10.2.2
-
- '@poppinss/exception@1.2.3': {}
-
- '@rollup/rollup-android-arm-eabi@4.54.0':
- optional: true
-
- '@rollup/rollup-android-arm64@4.54.0':
- optional: true
-
- '@rollup/rollup-darwin-arm64@4.54.0':
- optional: true
-
- '@rollup/rollup-darwin-x64@4.54.0':
- optional: true
-
- '@rollup/rollup-freebsd-arm64@4.54.0':
- optional: true
-
- '@rollup/rollup-freebsd-x64@4.54.0':
- optional: true
-
- '@rollup/rollup-linux-arm-gnueabihf@4.54.0':
- optional: true
-
- '@rollup/rollup-linux-arm-musleabihf@4.54.0':
- optional: true
-
- '@rollup/rollup-linux-arm64-gnu@4.54.0':
- optional: true
-
- '@rollup/rollup-linux-arm64-musl@4.54.0':
- optional: true
-
- '@rollup/rollup-linux-loong64-gnu@4.54.0':
- optional: true
-
- '@rollup/rollup-linux-ppc64-gnu@4.54.0':
- optional: true
-
- '@rollup/rollup-linux-riscv64-gnu@4.54.0':
- optional: true
-
- '@rollup/rollup-linux-riscv64-musl@4.54.0':
- optional: true
-
- '@rollup/rollup-linux-s390x-gnu@4.54.0':
- optional: true
-
- '@rollup/rollup-linux-x64-gnu@4.54.0':
- optional: true
-
- '@rollup/rollup-linux-x64-musl@4.54.0':
- optional: true
-
- '@rollup/rollup-openharmony-arm64@4.54.0':
- optional: true
-
- '@rollup/rollup-win32-arm64-msvc@4.54.0':
- optional: true
-
- '@rollup/rollup-win32-ia32-msvc@4.54.0':
- optional: true
-
- '@rollup/rollup-win32-x64-gnu@4.54.0':
- optional: true
-
- '@rollup/rollup-win32-x64-msvc@4.54.0':
- optional: true
-
- '@sindresorhus/is@7.2.0': {}
-
- '@speed-highlight/core@1.2.14': {}
-
- '@standard-schema/spec@1.1.0': {}
-
- '@types/chai@5.2.3':
- dependencies:
- '@types/deep-eql': 4.0.2
- assertion-error: 2.0.1
-
- '@types/deep-eql@4.0.2': {}
-
- '@types/estree@1.0.8': {}
-
- '@types/node@25.0.3':
- dependencies:
- undici-types: 7.16.0
-
- '@vitest/expect@4.0.16':
- dependencies:
- '@standard-schema/spec': 1.1.0
- '@types/chai': 5.2.3
- '@vitest/spy': 4.0.16
- '@vitest/utils': 4.0.16
- chai: 6.2.2
- tinyrainbow: 3.0.3
-
- '@vitest/mocker@4.0.16(vite@7.3.0(@types/node@25.0.3))':
- dependencies:
- '@vitest/spy': 4.0.16
- estree-walker: 3.0.3
- magic-string: 0.30.21
- optionalDependencies:
- vite: 7.3.0(@types/node@25.0.3)
-
- '@vitest/pretty-format@4.0.16':
- dependencies:
- tinyrainbow: 3.0.3
-
- '@vitest/runner@4.0.16':
- dependencies:
- '@vitest/utils': 4.0.16
- pathe: 2.0.3
-
- '@vitest/snapshot@4.0.16':
- dependencies:
- '@vitest/pretty-format': 4.0.16
- magic-string: 0.30.21
- pathe: 2.0.3
-
- '@vitest/spy@4.0.16': {}
-
- '@vitest/utils@4.0.16':
- dependencies:
- '@vitest/pretty-format': 4.0.16
- tinyrainbow: 3.0.3
-
- acorn-walk@8.3.2: {}
-
- acorn@8.14.0: {}
-
- assertion-error@2.0.1: {}
-
- blake3-wasm@2.1.5: {}
-
- chai@6.2.2: {}
-
- color-convert@2.0.1:
- dependencies:
- color-name: 1.1.4
-
- color-name@1.1.4: {}
-
- color-string@1.9.1:
- dependencies:
- color-name: 1.1.4
- simple-swizzle: 0.2.4
-
- color@4.2.3:
- dependencies:
- color-convert: 2.0.1
- color-string: 1.9.1
-
- cookie@1.1.1: {}
-
- detect-libc@2.1.2: {}
-
- error-stack-parser-es@1.0.5: {}
-
- es-module-lexer@1.7.0: {}
-
- esbuild@0.27.0:
- optionalDependencies:
- '@esbuild/aix-ppc64': 0.27.0
- '@esbuild/android-arm': 0.27.0
- '@esbuild/android-arm64': 0.27.0
- '@esbuild/android-x64': 0.27.0
- '@esbuild/darwin-arm64': 0.27.0
- '@esbuild/darwin-x64': 0.27.0
- '@esbuild/freebsd-arm64': 0.27.0
- '@esbuild/freebsd-x64': 0.27.0
- '@esbuild/linux-arm': 0.27.0
- '@esbuild/linux-arm64': 0.27.0
- '@esbuild/linux-ia32': 0.27.0
- '@esbuild/linux-loong64': 0.27.0
- '@esbuild/linux-mips64el': 0.27.0
- '@esbuild/linux-ppc64': 0.27.0
- '@esbuild/linux-riscv64': 0.27.0
- '@esbuild/linux-s390x': 0.27.0
- '@esbuild/linux-x64': 0.27.0
- '@esbuild/netbsd-arm64': 0.27.0
- '@esbuild/netbsd-x64': 0.27.0
- '@esbuild/openbsd-arm64': 0.27.0
- '@esbuild/openbsd-x64': 0.27.0
- '@esbuild/openharmony-arm64': 0.27.0
- '@esbuild/sunos-x64': 0.27.0
- '@esbuild/win32-arm64': 0.27.0
- '@esbuild/win32-ia32': 0.27.0
- '@esbuild/win32-x64': 0.27.0
-
- esbuild@0.27.2:
- optionalDependencies:
- '@esbuild/aix-ppc64': 0.27.2
- '@esbuild/android-arm': 0.27.2
- '@esbuild/android-arm64': 0.27.2
- '@esbuild/android-x64': 0.27.2
- '@esbuild/darwin-arm64': 0.27.2
- '@esbuild/darwin-x64': 0.27.2
- '@esbuild/freebsd-arm64': 0.27.2
- '@esbuild/freebsd-x64': 0.27.2
- '@esbuild/linux-arm': 0.27.2
- '@esbuild/linux-arm64': 0.27.2
- '@esbuild/linux-ia32': 0.27.2
- '@esbuild/linux-loong64': 0.27.2
- '@esbuild/linux-mips64el': 0.27.2
- '@esbuild/linux-ppc64': 0.27.2
- '@esbuild/linux-riscv64': 0.27.2
- '@esbuild/linux-s390x': 0.27.2
- '@esbuild/linux-x64': 0.27.2
- '@esbuild/netbsd-arm64': 0.27.2
- '@esbuild/netbsd-x64': 0.27.2
- '@esbuild/openbsd-arm64': 0.27.2
- '@esbuild/openbsd-x64': 0.27.2
- '@esbuild/openharmony-arm64': 0.27.2
- '@esbuild/sunos-x64': 0.27.2
- '@esbuild/win32-arm64': 0.27.2
- '@esbuild/win32-ia32': 0.27.2
- '@esbuild/win32-x64': 0.27.2
-
- estree-walker@3.0.3:
- dependencies:
- '@types/estree': 1.0.8
-
- exit-hook@2.2.1: {}
-
- expect-type@1.3.0: {}
-
- fdir@6.5.0(picomatch@4.0.3):
- optionalDependencies:
- picomatch: 4.0.3
-
- fsevents@2.3.3:
- optional: true
-
- glob-to-regexp@0.4.1: {}
-
- is-arrayish@0.3.4: {}
-
- itty-router@5.0.22: {}
-
- jose@5.10.0: {}
-
- kleur@4.1.5: {}
-
- magic-string@0.30.21:
- dependencies:
- '@jridgewell/sourcemap-codec': 1.5.5
-
- mime@3.0.0: {}
-
- miniflare@4.20251210.0:
- dependencies:
- '@cspotcode/source-map-support': 0.8.1
- acorn: 8.14.0
- acorn-walk: 8.3.2
- exit-hook: 2.2.1
- glob-to-regexp: 0.4.1
- sharp: 0.33.5
- stoppable: 1.1.0
- undici: 7.14.0
- workerd: 1.20251210.0
- ws: 8.18.0
- youch: 4.1.0-beta.10
- zod: 3.22.3
- transitivePeerDependencies:
- - bufferutil
- - utf-8-validate
-
- nanoid@3.3.11: {}
-
- obug@2.1.1: {}
-
- path-to-regexp@6.3.0: {}
-
- pathe@2.0.3: {}
-
- picocolors@1.1.1: {}
-
- picomatch@4.0.3: {}
-
- postcss@8.5.6:
- dependencies:
- nanoid: 3.3.11
- picocolors: 1.1.1
- source-map-js: 1.2.1
-
- rollup@4.54.0:
- dependencies:
- '@types/estree': 1.0.8
- optionalDependencies:
- '@rollup/rollup-android-arm-eabi': 4.54.0
- '@rollup/rollup-android-arm64': 4.54.0
- '@rollup/rollup-darwin-arm64': 4.54.0
- '@rollup/rollup-darwin-x64': 4.54.0
- '@rollup/rollup-freebsd-arm64': 4.54.0
- '@rollup/rollup-freebsd-x64': 4.54.0
- '@rollup/rollup-linux-arm-gnueabihf': 4.54.0
- '@rollup/rollup-linux-arm-musleabihf': 4.54.0
- '@rollup/rollup-linux-arm64-gnu': 4.54.0
- '@rollup/rollup-linux-arm64-musl': 4.54.0
- '@rollup/rollup-linux-loong64-gnu': 4.54.0
- '@rollup/rollup-linux-ppc64-gnu': 4.54.0
- '@rollup/rollup-linux-riscv64-gnu': 4.54.0
- '@rollup/rollup-linux-riscv64-musl': 4.54.0
- '@rollup/rollup-linux-s390x-gnu': 4.54.0
- '@rollup/rollup-linux-x64-gnu': 4.54.0
- '@rollup/rollup-linux-x64-musl': 4.54.0
- '@rollup/rollup-openharmony-arm64': 4.54.0
- '@rollup/rollup-win32-arm64-msvc': 4.54.0
- '@rollup/rollup-win32-ia32-msvc': 4.54.0
- '@rollup/rollup-win32-x64-gnu': 4.54.0
- '@rollup/rollup-win32-x64-msvc': 4.54.0
- fsevents: 2.3.3
-
- semver@7.7.3: {}
-
- sharp@0.33.5:
- dependencies:
- color: 4.2.3
- detect-libc: 2.1.2
- semver: 7.7.3
- optionalDependencies:
- '@img/sharp-darwin-arm64': 0.33.5
- '@img/sharp-darwin-x64': 0.33.5
- '@img/sharp-libvips-darwin-arm64': 1.0.4
- '@img/sharp-libvips-darwin-x64': 1.0.4
- '@img/sharp-libvips-linux-arm': 1.0.5
- '@img/sharp-libvips-linux-arm64': 1.0.4
- '@img/sharp-libvips-linux-s390x': 1.0.4
- '@img/sharp-libvips-linux-x64': 1.0.4
- '@img/sharp-libvips-linuxmusl-arm64': 1.0.4
- '@img/sharp-libvips-linuxmusl-x64': 1.0.4
- '@img/sharp-linux-arm': 0.33.5
- '@img/sharp-linux-arm64': 0.33.5
- '@img/sharp-linux-s390x': 0.33.5
- '@img/sharp-linux-x64': 0.33.5
- '@img/sharp-linuxmusl-arm64': 0.33.5
- '@img/sharp-linuxmusl-x64': 0.33.5
- '@img/sharp-wasm32': 0.33.5
- '@img/sharp-win32-ia32': 0.33.5
- '@img/sharp-win32-x64': 0.33.5
-
- siginfo@2.0.0: {}
-
- simple-swizzle@0.2.4:
- dependencies:
- is-arrayish: 0.3.4
-
- source-map-js@1.2.1: {}
-
- stackback@0.0.2: {}
-
- std-env@3.10.0: {}
-
- stoppable@1.1.0: {}
-
- supports-color@10.2.2: {}
-
- tinybench@2.9.0: {}
-
- tinyexec@1.0.2: {}
-
- tinyglobby@0.2.15:
- dependencies:
- fdir: 6.5.0(picomatch@4.0.3)
- picomatch: 4.0.3
-
- tinyrainbow@3.0.3: {}
-
- tslib@2.8.1:
- optional: true
-
- typescript@5.9.3: {}
-
- undici-types@7.16.0: {}
-
- undici@7.14.0: {}
-
- unenv@2.0.0-rc.24:
- dependencies:
- pathe: 2.0.3
-
- vite@7.3.0(@types/node@25.0.3):
- dependencies:
- esbuild: 0.27.2
- fdir: 6.5.0(picomatch@4.0.3)
- picomatch: 4.0.3
- postcss: 8.5.6
- rollup: 4.54.0
- tinyglobby: 0.2.15
- optionalDependencies:
- '@types/node': 25.0.3
- fsevents: 2.3.3
-
- vitest@4.0.16(@types/node@25.0.3):
- dependencies:
- '@vitest/expect': 4.0.16
- '@vitest/mocker': 4.0.16(vite@7.3.0(@types/node@25.0.3))
- '@vitest/pretty-format': 4.0.16
- '@vitest/runner': 4.0.16
- '@vitest/snapshot': 4.0.16
- '@vitest/spy': 4.0.16
- '@vitest/utils': 4.0.16
- es-module-lexer: 1.7.0
- expect-type: 1.3.0
- magic-string: 0.30.21
- obug: 2.1.1
- pathe: 2.0.3
- picomatch: 4.0.3
- std-env: 3.10.0
- tinybench: 2.9.0
- tinyexec: 1.0.2
- tinyglobby: 0.2.15
- tinyrainbow: 3.0.3
- vite: 7.3.0(@types/node@25.0.3)
- why-is-node-running: 2.3.0
- optionalDependencies:
- '@types/node': 25.0.3
- transitivePeerDependencies:
- - jiti
- - less
- - lightningcss
- - msw
- - sass
- - sass-embedded
- - stylus
- - sugarss
- - terser
- - tsx
- - yaml
-
- why-is-node-running@2.3.0:
- dependencies:
- siginfo: 2.0.0
- stackback: 0.0.2
-
- workerd@1.20251210.0:
- optionalDependencies:
- '@cloudflare/workerd-darwin-64': 1.20251210.0
- '@cloudflare/workerd-darwin-arm64': 1.20251210.0
- '@cloudflare/workerd-linux-64': 1.20251210.0
- '@cloudflare/workerd-linux-arm64': 1.20251210.0
- '@cloudflare/workerd-windows-64': 1.20251210.0
-
- wrangler@4.54.0(@cloudflare/workers-types@4.20260103.0):
- dependencies:
- '@cloudflare/kv-asset-handler': 0.4.1
- '@cloudflare/unenv-preset': 2.7.13(unenv@2.0.0-rc.24)(workerd@1.20251210.0)
- blake3-wasm: 2.1.5
- esbuild: 0.27.0
- miniflare: 4.20251210.0
- path-to-regexp: 6.3.0
- unenv: 2.0.0-rc.24
- workerd: 1.20251210.0
- optionalDependencies:
- '@cloudflare/workers-types': 4.20260103.0
- fsevents: 2.3.3
- transitivePeerDependencies:
- - bufferutil
- - utf-8-validate
-
- ws@8.18.0: {}
-
- youch-core@0.3.3:
- dependencies:
- '@poppinss/exception': 1.2.3
- error-stack-parser-es: 1.0.5
-
- youch@4.1.0-beta.10:
- dependencies:
- '@poppinss/colors': 4.1.6
- '@poppinss/dumper': 0.6.5
- '@speed-highlight/core': 1.2.14
- cookie: 1.1.1
- youch-core: 0.3.3
-
- zod@3.22.3: {}
-
- zod@4.3.5: {}
diff --git a/worker/schema.sql b/worker/schema.sql
deleted file mode 100644
index 394e5456..00000000
--- a/worker/schema.sql
+++ /dev/null
@@ -1,100 +0,0 @@
--- GSD Task Manager Sync Database Schema
--- Database: Cloudflare D1
--- Version: 1.0.0
-
--- Users table (authentication and account management)
-CREATE TABLE IF NOT EXISTS users (
- id TEXT PRIMARY KEY,
- email TEXT UNIQUE NOT NULL,
- password_hash TEXT NOT NULL,
- salt TEXT NOT NULL,
- created_at INTEGER NOT NULL,
- updated_at INTEGER NOT NULL,
- last_login_at INTEGER,
- account_status TEXT DEFAULT 'active' CHECK(account_status IN ('active', 'suspended', 'deleted'))
-);
-
-CREATE INDEX IF NOT EXISTS idx_users_email ON users(email);
-CREATE INDEX IF NOT EXISTS idx_users_status ON users(account_status);
-
--- Devices table (track user devices for selective sync)
-CREATE TABLE IF NOT EXISTS devices (
- id TEXT PRIMARY KEY,
- user_id TEXT NOT NULL,
- device_name TEXT,
- device_fingerprint TEXT,
- last_seen_at INTEGER NOT NULL,
- created_at INTEGER NOT NULL,
- is_active INTEGER DEFAULT 1,
- FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
-);
-
-CREATE INDEX IF NOT EXISTS idx_devices_user ON devices(user_id);
-CREATE INDEX IF NOT EXISTS idx_devices_active ON devices(user_id, is_active);
-
--- Sync operations table (central conflict resolution log)
-CREATE TABLE IF NOT EXISTS sync_operations (
- id TEXT PRIMARY KEY,
- user_id TEXT NOT NULL,
- device_id TEXT NOT NULL,
- operation_type TEXT NOT NULL CHECK(operation_type IN ('push', 'pull', 'resolve')),
- vector_clock TEXT NOT NULL,
- created_at INTEGER NOT NULL,
- FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE,
- FOREIGN KEY (device_id) REFERENCES devices(id) ON DELETE CASCADE
-);
-
-CREATE INDEX IF NOT EXISTS idx_sync_ops_user ON sync_operations(user_id, created_at DESC);
-CREATE INDEX IF NOT EXISTS idx_sync_ops_device ON sync_operations(device_id, created_at DESC);
-
--- Encrypted task blobs (stores encrypted task data)
-CREATE TABLE IF NOT EXISTS encrypted_tasks (
- id TEXT PRIMARY KEY,
- user_id TEXT NOT NULL,
- encrypted_blob TEXT NOT NULL,
- nonce TEXT NOT NULL,
- version INTEGER NOT NULL DEFAULT 1,
- vector_clock TEXT NOT NULL,
- deleted_at INTEGER,
- created_at INTEGER NOT NULL,
- updated_at INTEGER NOT NULL,
- last_modified_device TEXT,
- checksum TEXT NOT NULL,
- FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
-);
-
-CREATE INDEX IF NOT EXISTS idx_enc_tasks_user ON encrypted_tasks(user_id, updated_at DESC);
-CREATE INDEX IF NOT EXISTS idx_enc_tasks_version ON encrypted_tasks(id, version);
-CREATE INDEX IF NOT EXISTS idx_enc_tasks_deleted ON encrypted_tasks(user_id, deleted_at) WHERE deleted_at IS NOT NULL;
-
--- Sync metadata (track last successful sync per device)
-CREATE TABLE IF NOT EXISTS sync_metadata (
- user_id TEXT NOT NULL,
- device_id TEXT NOT NULL,
- last_sync_at INTEGER NOT NULL,
- last_pull_vector TEXT NOT NULL,
- last_push_vector TEXT NOT NULL,
- sync_status TEXT DEFAULT 'success' CHECK(sync_status IN ('success', 'conflict', 'error')),
- PRIMARY KEY (user_id, device_id),
- FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE,
- FOREIGN KEY (device_id) REFERENCES devices(id) ON DELETE CASCADE
-);
-
-CREATE INDEX IF NOT EXISTS idx_sync_meta_status ON sync_metadata(sync_status);
-
--- Conflict log (audit trail for manual resolution if needed)
-CREATE TABLE IF NOT EXISTS conflict_log (
- id TEXT PRIMARY KEY,
- user_id TEXT NOT NULL,
- task_id TEXT NOT NULL,
- conflict_type TEXT NOT NULL CHECK(conflict_type IN ('concurrent_edit', 'delete_edit', 'duplicate_id')),
- device_a TEXT NOT NULL,
- device_b TEXT NOT NULL,
- resolution TEXT NOT NULL CHECK(resolution IN ('auto_merge', 'last_write_wins', 'manual')),
- resolved_at INTEGER NOT NULL,
- resolution_data TEXT,
- FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
-);
-
-CREATE INDEX IF NOT EXISTS idx_conflicts_user ON conflict_log(user_id, resolved_at DESC);
-CREATE INDEX IF NOT EXISTS idx_conflicts_task ON conflict_log(task_id);
diff --git a/worker/set-secrets.sh b/worker/set-secrets.sh
deleted file mode 100755
index 701c09e7..00000000
--- a/worker/set-secrets.sh
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/bin/bash
-# Generate and set secrets for GSD Sync Worker
-
-echo "🔐 Generating secure secrets..."
-echo ""
-
-# Generate random secrets
-JWT_SECRET=$(openssl rand -base64 32)
-ENCRYPTION_SALT=$(openssl rand -base64 32)
-
-echo "Generated secrets:"
-echo "─────────────────────────────────────────────────────────"
-echo "JWT_SECRET: $JWT_SECRET"
-echo "ENCRYPTION_SALT: $ENCRYPTION_SALT"
-echo "─────────────────────────────────────────────────────────"
-echo ""
-echo "⚠️ SAVE THESE SOMEWHERE SECURE (password manager)!"
-echo ""
-read -p "Press Enter to set JWT_SECRET in Cloudflare..."
-
-# Set JWT_SECRET
-echo "$JWT_SECRET" | npx wrangler secret put JWT_SECRET
-
-echo ""
-read -p "Press Enter to set ENCRYPTION_SALT in Cloudflare..."
-
-# Set ENCRYPTION_SALT
-echo "$ENCRYPTION_SALT" | npx wrangler secret put ENCRYPTION_SALT
-
-echo ""
-echo "✅ Secrets set successfully!"
-echo ""
-echo "Secrets saved to: ./secrets.txt (DO NOT COMMIT THIS FILE)"
-cat > secrets.txt << EOF
-# GSD Sync Worker Secrets
-# Generated: $(date)
-# DO NOT COMMIT TO GIT
-
-JWT_SECRET=$JWT_SECRET
-ENCRYPTION_SALT=$ENCRYPTION_SALT
-EOF
-
-echo ""
-echo "Next step: Redeploy the worker with 'npx wrangler deploy'"
diff --git a/worker/setup-all-envs.sh b/worker/setup-all-envs.sh
deleted file mode 100755
index 2c74f41c..00000000
--- a/worker/setup-all-envs.sh
+++ /dev/null
@@ -1,312 +0,0 @@
-#!/bin/bash
-# GSD Sync Worker - Multi-Environment Setup Script
-# Creates resources for development, staging, and production environments
-
-set -e # Exit on error
-
-# Colors for output
-RED='\033[0;31m'
-GREEN='\033[0;32m'
-BLUE='\033[0;34m'
-YELLOW='\033[1;33m'
-NC='\033[0m' # No Color
-
-echo -e "${BLUE}╔════════════════════════════════════════════════════════════╗${NC}"
-echo -e "${BLUE}║ GSD Sync Worker - Multi-Environment Setup ║${NC}"
-echo -e "${BLUE}╔════════════════════════════════════════════════════════════╗${NC}"
-echo ""
-
-# ========================================
-# Step 1: Check Authentication
-# ========================================
-echo -e "${BLUE}[1/4]${NC} Checking Cloudflare authentication..."
-if ! npx wrangler whoami > /dev/null 2>&1; then
- echo -e "${RED}✗ Not authenticated${NC}"
- echo ""
- echo "Please run: npx wrangler login"
- exit 1
-fi
-echo -e "${GREEN}✓ Authenticated${NC}"
-echo ""
-
-# ========================================
-# Step 2: Create Resources for All Environments
-# ========================================
-echo -e "${BLUE}[2/4]${NC} Creating resources for all environments..."
-echo ""
-
-# Variables to store created resource IDs
-DB_ID_DEV=""
-DB_ID_STAGING=""
-DB_ID_PROD=""
-KV_ID_DEV=""
-KV_ID_STAGING=""
-KV_ID_PROD=""
-
-ENVIRONMENTS=("development" "staging" "production")
-ENV_SUFFIXES=("dev" "staging" "production")
-
-for i in 0 1 2; do
- ENV="${ENVIRONMENTS[$i]}"
- SUFFIX="${ENV_SUFFIXES[$i]}"
-
- echo -e "${YELLOW}────────────────────────────────────────────────────────${NC}"
- echo -e "${YELLOW}Setting up ${ENV} environment${NC}"
- echo -e "${YELLOW}────────────────────────────────────────────────────────${NC}"
- echo ""
-
- # Create D1 Database
- echo -e " ${BLUE}→${NC} Creating D1 database: gsd-sync-${SUFFIX}"
- DB_OUTPUT=$(npx wrangler d1 create "gsd-sync-${SUFFIX}" 2>&1 || true)
-
- # Extract database ID from output
- if echo "$DB_OUTPUT" | grep -q "already exists"; then
- echo -e " ${YELLOW}⚠${NC} Database already exists, fetching ID..."
- DB_ID=$(npx wrangler d1 list | grep "gsd-sync-${SUFFIX}" | awk '{print $2}' || echo "")
- else
- DB_ID=$(echo "$DB_OUTPUT" | grep "database_id" | sed -E 's/.*database_id = "([^"]+)".*/\1/')
- fi
-
- if [ -n "$DB_ID" ]; then
- # Store in environment-specific variable
- case "$SUFFIX" in
- "dev")
- DB_ID_DEV="$DB_ID"
- ;;
- "staging")
- DB_ID_STAGING="$DB_ID"
- ;;
- "production")
- DB_ID_PROD="$DB_ID"
- ;;
- esac
- echo -e " ${GREEN}✓${NC} Database ID: ${DB_ID}"
- else
- echo -e " ${RED}✗${NC} Failed to create/find database"
- exit 1
- fi
- echo ""
-
- # Create KV Namespace
- echo -e " ${BLUE}→${NC} Creating KV namespace for ${ENV}"
- KV_OUTPUT=$(npx wrangler kv namespace create "KV" --env "${ENV}" 2>&1 || true)
-
- # Extract KV ID from output
- if echo "$KV_OUTPUT" | grep -q "already exists"; then
- echo -e " ${YELLOW}⚠${NC} KV namespace already exists, fetching ID..."
- # KV namespaces are named as "{env}-KV" (e.g., "development-KV", "staging-KV", "production-KV")
- KV_LIST=$(npx wrangler kv namespace list 2>/dev/null || echo "[]")
- KV_ID=$(echo "$KV_LIST" | grep -B 1 "\"title\": \"${ENV}-KV\"" | grep '"id"' | sed -E 's/.*"id": "([^"]+)".*/\1/' | head -1)
- else
- KV_ID=$(echo "$KV_OUTPUT" | grep -oE 'id = "[^"]+"' | sed 's/id = "\(.*\)"/\1/')
- fi
-
- if [ -n "$KV_ID" ]; then
- # Store in environment-specific variable
- case "$SUFFIX" in
- "dev")
- KV_ID_DEV="$KV_ID"
- ;;
- "staging")
- KV_ID_STAGING="$KV_ID"
- ;;
- "production")
- KV_ID_PROD="$KV_ID"
- ;;
- esac
- echo -e " ${GREEN}✓${NC} KV ID: ${KV_ID}"
- else
- echo -e " ${RED}✗${NC} Failed to create/find KV namespace"
- exit 1
- fi
- echo ""
-
- # Create R2 Bucket
- echo -e " ${BLUE}→${NC} Creating R2 bucket: gsd-backups-${SUFFIX}"
- if npx wrangler r2 bucket create "gsd-backups-${SUFFIX}" 2>&1 | grep -q "already exists"; then
- echo -e " ${YELLOW}⚠${NC} R2 bucket already exists"
- else
- echo -e " ${GREEN}✓${NC} R2 bucket created"
- fi
- echo ""
-done
-
-# ========================================
-# Step 3: Update wrangler.toml
-# ========================================
-echo ""
-echo -e "${BLUE}[3/4]${NC} Updating wrangler.toml with resource IDs..."
-echo ""
-
-# Update each environment in wrangler.toml
-for i in 0 1 2; do
- ENV="${ENVIRONMENTS[$i]}"
- SUFFIX="${ENV_SUFFIXES[$i]}"
-
- # Get the appropriate IDs based on suffix
- case "$SUFFIX" in
- "dev")
- DB_ID="$DB_ID_DEV"
- KV_ID="$KV_ID_DEV"
- ;;
- "staging")
- DB_ID="$DB_ID_STAGING"
- KV_ID="$KV_ID_STAGING"
- ;;
- "production")
- DB_ID="$DB_ID_PROD"
- KV_ID="$KV_ID_PROD"
- ;;
- esac
-
- echo -e " ${BLUE}→${NC} Updating ${ENV} environment..."
-
- # Convert ENV to uppercase for placeholder matching
- ENV_UPPER=$(echo "$ENV" | tr '[:lower:]' '[:upper:]')
-
- # Update database_id and KV_id
- if [[ "$OSTYPE" == "darwin"* ]]; then
- # macOS
- sed -i '' "s/REPLACE_WITH_${ENV_UPPER}_DB_ID/${DB_ID}/" wrangler.toml
- sed -i '' "s/REPLACE_WITH_${ENV_UPPER}_KV_ID/${KV_ID}/" wrangler.toml
- else
- # Linux
- sed -i "s/REPLACE_WITH_${ENV_UPPER}_DB_ID/${DB_ID}/" wrangler.toml
- sed -i "s/REPLACE_WITH_${ENV_UPPER}_KV_ID/${KV_ID}/" wrangler.toml
- fi
-
- echo -e " ${GREEN}✓${NC} Updated wrangler.toml for ${ENV}"
-done
-
-echo -e "${GREEN}✓ wrangler.toml updated${NC}"
-echo ""
-
-# ========================================
-# Step 4: Set Secrets & Apply Schema
-# ========================================
-echo -e "${BLUE}[4/4]${NC} Setting secrets and applying database schema..."
-echo ""
-
-for i in 0 1 2; do
- ENV="${ENVIRONMENTS[$i]}"
- SUFFIX="${ENV_SUFFIXES[$i]}"
-
- echo -e "${YELLOW}────────────────────────────────────────────────────────${NC}"
- echo -e "${YELLOW}Configuring ${ENV} environment${NC}"
- echo -e "${YELLOW}────────────────────────────────────────────────────────${NC}"
- echo ""
-
- # Generate unique secrets for each environment
- echo -e " ${BLUE}→${NC} Generating secrets for ${ENV}..."
- JWT_SECRET=$(openssl rand -base64 32)
- ENCRYPTION_SALT=$(openssl rand -base64 32)
-
- echo -e " ${GREEN}✓${NC} Generated JWT_SECRET and ENCRYPTION_SALT"
- echo ""
-
- # Save secrets to environment-specific file
- SECRETS_FILE="secrets-${SUFFIX}.txt"
- ENV_UPPER=$(echo "$ENV" | tr '[:lower:]' '[:upper:]')
- cat > "$SECRETS_FILE" << EOF
-# GSD Sync Worker Secrets - ${ENV_UPPER} ENVIRONMENT
-# Generated: $(date)
-# DO NOT COMMIT TO GIT
-
-JWT_SECRET=${JWT_SECRET}
-ENCRYPTION_SALT=${ENCRYPTION_SALT}
-
-# Set these manually via Cloudflare dashboard or wrangler CLI:
-# GOOGLE_CLIENT_SECRET=your_google_client_secret
-# APPLE_CLIENT_ID=your_apple_client_id
-# APPLE_TEAM_ID=your_apple_team_id
-# APPLE_KEY_ID=your_apple_key_id
-# APPLE_PRIVATE_KEY=your_apple_private_key
-EOF
-
- echo -e " ${GREEN}✓${NC} Secrets saved to: ${SECRETS_FILE}"
- echo ""
-
- # Set secrets via wrangler
- echo -e " ${BLUE}→${NC} Setting JWT_SECRET for ${ENV}..."
- echo "$JWT_SECRET" | npx wrangler secret put JWT_SECRET --env "${ENV}" > /dev/null
- echo -e " ${GREEN}✓${NC} JWT_SECRET set"
-
- echo -e " ${BLUE}→${NC} Setting ENCRYPTION_SALT for ${ENV}..."
- echo "$ENCRYPTION_SALT" | npx wrangler secret put ENCRYPTION_SALT --env "${ENV}" > /dev/null
- echo -e " ${GREEN}✓${NC} ENCRYPTION_SALT set"
- echo ""
-
- # Apply database schema
- echo -e " ${BLUE}→${NC} Applying database schema to ${ENV}..."
-
- # Apply migrations if migrations folder exists, otherwise use schema.sql
- if [ -d "migrations" ] && [ "$(ls -A migrations)" ]; then
- echo -e " ${BLUE}→${NC} Applying migrations to gsd-sync-${SUFFIX}..."
- npx wrangler d1 migrations apply "gsd-sync-${SUFFIX}" --remote > /dev/null 2>&1
- elif [ -f "schema.sql" ]; then
- echo -e " ${BLUE}→${NC} Applying schema.sql to gsd-sync-${SUFFIX}..."
- npx wrangler d1 execute "gsd-sync-${SUFFIX}" --remote --file=./schema.sql > /dev/null 2>&1
- fi
-
- echo -e " ${GREEN}✓${NC} Database schema applied"
- echo ""
-done
-
-# ========================================
-# Summary
-# ========================================
-echo ""
-echo -e "${GREEN}╔════════════════════════════════════════════════════════════╗${NC}"
-echo -e "${GREEN}║ ✓ Multi-Environment Setup Complete! ║${NC}"
-echo -e "${GREEN}╔════════════════════════════════════════════════════════════╗${NC}"
-echo ""
-echo -e "${BLUE}Resources Created:${NC}"
-echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
-
-for i in 0 1 2; do
- ENV="${ENVIRONMENTS[$i]}"
- SUFFIX="${ENV_SUFFIXES[$i]}"
-
- # Get the appropriate IDs based on suffix
- case "$SUFFIX" in
- "dev")
- DB_ID="$DB_ID_DEV"
- KV_ID="$KV_ID_DEV"
- ;;
- "staging")
- DB_ID="$DB_ID_STAGING"
- KV_ID="$KV_ID_STAGING"
- ;;
- "production")
- DB_ID="$DB_ID_PROD"
- KV_ID="$KV_ID_PROD"
- ;;
- esac
-
- ENV_UPPER=$(echo "$ENV" | tr '[:lower:]' '[:upper:]')
- echo ""
- echo -e "${YELLOW}${ENV_UPPER} Environment:${NC}"
- echo -e " Worker: gsd-sync-worker-${SUFFIX}"
- echo -e " D1 Database: gsd-sync-${SUFFIX} (${DB_ID})"
- echo -e " KV Namespace: ${KV_ID}"
- echo -e " R2 Bucket: gsd-backups-${SUFFIX}"
- echo -e " Secrets: See secrets-${SUFFIX}.txt"
-done
-
-echo ""
-echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
-echo ""
-echo -e "${BLUE}Next Steps:${NC}"
-echo " 1. Review and commit the updated wrangler.toml"
-echo " 2. Store the secrets files in a secure location (DO NOT commit)"
-echo " 3. Deploy all environments:"
-echo " npm run deploy:all"
-echo ""
-echo " Or deploy individually:"
-echo " npm run deploy # Development"
-echo " npm run deploy:staging # Staging"
-echo " npm run deploy:production # Production"
-echo ""
-echo -e "${YELLOW}⚠ Remember to set OAuth secrets manually:${NC}"
-echo " wrangler secret put GOOGLE_CLIENT_SECRET --env "
-echo ""
diff --git a/worker/src/__tests__/schemas.test.ts b/worker/src/__tests__/schemas.test.ts
deleted file mode 100644
index 2cfa6034..00000000
--- a/worker/src/__tests__/schemas.test.ts
+++ /dev/null
@@ -1,222 +0,0 @@
-import { describe, it, expect } from 'vitest';
-import {
- vectorClockSchema,
- syncOperationSchema,
- pushRequestSchema,
- pullRequestSchema,
- resolveRequestSchema,
- updateDeviceRequestSchema,
-} from '../schemas';
-
-describe('vectorClockSchema', () => {
- it('accepts valid vector clock with string keys and non-negative integers', () => {
- const validClock = { device1: 0, device2: 5, device3: 100 };
- expect(() => vectorClockSchema.parse(validClock)).not.toThrow();
- expect(vectorClockSchema.parse(validClock)).toEqual(validClock);
- });
-
- it('accepts empty object as valid vector clock', () => {
- expect(() => vectorClockSchema.parse({})).not.toThrow();
- });
-
- it('rejects negative values', () => {
- expect(() => vectorClockSchema.parse({ device: -1 })).toThrow();
- });
-
- it('rejects non-integer values', () => {
- expect(() => vectorClockSchema.parse({ device: 1.5 })).toThrow();
- });
-
- it('rejects non-number values', () => {
- expect(() => vectorClockSchema.parse({ device: 'not a number' })).toThrow();
- });
-});
-
-describe('syncOperationSchema', () => {
- it('accepts valid create operation', () => {
- const validOp = {
- type: 'create',
- taskId: 'task-123',
- encryptedBlob: 'encrypted-data',
- nonce: 'random-nonce',
- vectorClock: { device1: 1 },
- checksum: 'abc123',
- };
- expect(() => syncOperationSchema.parse(validOp)).not.toThrow();
- });
-
- it('accepts valid update operation', () => {
- const validOp = {
- type: 'update',
- taskId: 'task-123',
- vectorClock: { device1: 2 },
- };
- expect(() => syncOperationSchema.parse(validOp)).not.toThrow();
- });
-
- it('accepts valid delete operation', () => {
- const validOp = {
- type: 'delete',
- taskId: 'task-123',
- vectorClock: { device1: 3 },
- };
- expect(() => syncOperationSchema.parse(validOp)).not.toThrow();
- });
-
- it('rejects invalid operation type', () => {
- const invalidOp = {
- type: 'invalid',
- taskId: 'task-123',
- vectorClock: {},
- };
- expect(() => syncOperationSchema.parse(invalidOp)).toThrow();
- });
-
- it('rejects empty taskId', () => {
- const invalidOp = {
- type: 'create',
- taskId: '',
- vectorClock: {},
- };
- expect(() => syncOperationSchema.parse(invalidOp)).toThrow();
- });
-});
-
-describe('pushRequestSchema', () => {
- it('accepts valid push request', () => {
- const validRequest = {
- deviceId: 'device-123',
- operations: [
- {
- type: 'create',
- taskId: 'task-1',
- vectorClock: { device1: 1 },
- },
- ],
- clientVectorClock: { device1: 1 },
- };
- expect(() => pushRequestSchema.parse(validRequest)).not.toThrow();
- });
-
- it('accepts push request with empty operations array', () => {
- const validRequest = {
- deviceId: 'device-123',
- operations: [],
- clientVectorClock: {},
- };
- expect(() => pushRequestSchema.parse(validRequest)).not.toThrow();
- });
-
- it('rejects empty deviceId', () => {
- const invalidRequest = {
- deviceId: '',
- operations: [],
- clientVectorClock: {},
- };
- expect(() => pushRequestSchema.parse(invalidRequest)).toThrow();
- });
-});
-
-describe('pullRequestSchema', () => {
- it('accepts valid pull request with required fields', () => {
- const validRequest = {
- deviceId: 'device-123',
- lastVectorClock: { device1: 5 },
- };
- expect(() => pullRequestSchema.parse(validRequest)).not.toThrow();
- });
-
- it('accepts pull request with optional fields', () => {
- const validRequest = {
- deviceId: 'device-123',
- lastVectorClock: { device1: 5 },
- sinceTimestamp: 1704067200,
- limit: 50,
- cursor: 'cursor-abc',
- };
- expect(() => pullRequestSchema.parse(validRequest)).not.toThrow();
- });
-
- it('rejects limit exceeding 100', () => {
- const invalidRequest = {
- deviceId: 'device-123',
- lastVectorClock: {},
- limit: 101,
- };
- expect(() => pullRequestSchema.parse(invalidRequest)).toThrow();
- });
-
- it('rejects non-positive sinceTimestamp', () => {
- const invalidRequest = {
- deviceId: 'device-123',
- lastVectorClock: {},
- sinceTimestamp: 0,
- };
- expect(() => pullRequestSchema.parse(invalidRequest)).toThrow();
- });
-});
-
-describe('resolveRequestSchema', () => {
- it('accepts keep_local resolution', () => {
- const validRequest = {
- taskId: 'task-123',
- resolution: 'keep_local',
- };
- expect(() => resolveRequestSchema.parse(validRequest)).not.toThrow();
- });
-
- it('accepts keep_remote resolution', () => {
- const validRequest = {
- taskId: 'task-123',
- resolution: 'keep_remote',
- };
- expect(() => resolveRequestSchema.parse(validRequest)).not.toThrow();
- });
-
- it('accepts merge resolution with mergedTask', () => {
- const validRequest = {
- taskId: 'task-123',
- resolution: 'merge',
- mergedTask: {
- encryptedBlob: 'merged-data',
- nonce: 'merged-nonce',
- vectorClock: { device1: 10, device2: 5 },
- checksum: 'merged-checksum',
- },
- };
- expect(() => resolveRequestSchema.parse(validRequest)).not.toThrow();
- });
-
- it('rejects invalid resolution type', () => {
- const invalidRequest = {
- taskId: 'task-123',
- resolution: 'invalid',
- };
- expect(() => resolveRequestSchema.parse(invalidRequest)).toThrow();
- });
-
- it('rejects empty taskId', () => {
- const invalidRequest = {
- taskId: '',
- resolution: 'keep_local',
- };
- expect(() => resolveRequestSchema.parse(invalidRequest)).toThrow();
- });
-});
-
-describe('updateDeviceRequestSchema', () => {
- it('accepts valid device name update', () => {
- const validRequest = { name: 'New Device Name' };
- expect(() => updateDeviceRequestSchema.parse(validRequest)).not.toThrow();
- });
-
- it('rejects empty name', () => {
- const invalidRequest = { name: '' };
- expect(() => updateDeviceRequestSchema.parse(invalidRequest)).toThrow();
- });
-
- it('rejects name exceeding 100 characters', () => {
- const invalidRequest = { name: 'a'.repeat(101) };
- expect(() => updateDeviceRequestSchema.parse(invalidRequest)).toThrow();
- });
-});
diff --git a/worker/src/config.ts b/worker/src/config.ts
deleted file mode 100644
index f1d633e1..00000000
--- a/worker/src/config.ts
+++ /dev/null
@@ -1,135 +0,0 @@
-/**
- * Worker configuration constants
- * Centralizes allowed origins, timeout values, and other configuration
- */
-
-// Allowed origins for CORS
-export const ALLOWED_ORIGINS = [
- 'https://gsd.vinny.dev', // Production
- 'https://gsd-dev.vinny.dev', // Development/Staging
- 'http://localhost:3000', // Local development
- 'http://127.0.0.1:3000', // Local development (alternative)
-];
-
-// Session and token TTL values (in seconds)
-export const TTL = {
- SESSION: 60 * 60 * 24 * 7, // 7 days - session lifetime
- OAUTH_STATE: 1800, // 30 minutes - OAuth state validity (increased for iPad PWA context switching)
- REVOCATION: 60 * 60 * 24 * 7, // 7 days - keep revocation records
-} as const;
-
-export const OAUTH_COOKIE = {
- name: 'gsd_oauth_session',
- maxAge: TTL.OAUTH_STATE,
-} as const;
-
-// Cleanup retention periods (in days)
-export const RETENTION = {
- DELETED_TASKS: 30, // Clean up soft-deleted tasks after 30 days
- CONFLICT_LOGS: 90, // Clean up resolved conflicts after 90 days
- INACTIVE_DEVICES: 180, // Clean up inactive devices after 6 months
-} as const;
-
-// Rate limiting configuration
-export const RATE_LIMITS = {
- SYNC_OPERATIONS: {
- maxRequests: 100, // Max requests per window
- windowMs: 60 * 1000, // 1 minute window
- },
- REFRESH_OPERATIONS: {
- maxRequests: 20, // Max refresh attempts per window
- windowMs: 60 * 60 * 1000, // 1 hour window
- },
-} as const;
-
-// Storage quotas (in bytes)
-export const STORAGE = {
- DEFAULT_QUOTA: 10 * 1024 * 1024, // 10MB default quota per user
- TASK_SIZE_ESTIMATE: 1024, // Rough estimate per task
-} as const;
-
-// Sync payload limits (defense-in-depth against abuse)
-export const SYNC_LIMITS = {
- MAX_OPERATIONS_PER_PUSH: 200,
- MAX_ENCRYPTED_BLOB_CHARS: 400_000, // ~300KB payload (base64)
- MAX_NONCE_CHARS: 64,
- MAX_CHECKSUM_CHARS: 128,
- MAX_TASK_ID_CHARS: 128,
- MAX_VECTOR_CLOCK_ENTRIES: 100,
-} as const;
-
-// OAuth provider configurations
-export const GOOGLE_CONFIG = {
- issuer: 'https://accounts.google.com',
- authorization_endpoint: 'https://accounts.google.com/o/oauth2/v2/auth',
- token_endpoint: 'https://oauth2.googleapis.com/token',
- userinfo_endpoint: 'https://openidconnect.googleapis.com/v1/userinfo',
- jwks_uri: 'https://www.googleapis.com/oauth2/v3/certs',
- scope: 'openid email profile',
-} as const;
-
-export const APPLE_CONFIG = {
- issuer: 'https://appleid.apple.com',
- authorization_endpoint: 'https://appleid.apple.com/auth/authorize',
- token_endpoint: 'https://appleid.apple.com/auth/token',
- jwks_uri: 'https://appleid.apple.com/auth/keys',
- scope: 'openid email name',
-} as const;
-
-// Allowed development ports for localhost
-const ALLOWED_DEV_PORTS = ['3000', '3001', '5173', '8080'];
-
-/**
- * Check if an origin is allowed
- * @param origin - The origin to check
- * @param environment - Optional environment string ('development', 'staging', 'production')
- */
-export function isOriginAllowed(
- origin: string | null | undefined,
- environment?: string
-): boolean {
- if (!origin) return false;
-
- // Check exact match in allowed list
- if (ALLOWED_ORIGINS.includes(origin)) {
- return true;
- }
-
- // Only allow specific localhost ports in development environment
- // In staging/production, only the specific localhost:3000 from ALLOWED_ORIGINS is allowed
- if (environment === 'development') {
- if (
- origin.startsWith('http://localhost:') ||
- origin.startsWith('http://127.0.0.1:')
- ) {
- try {
- const url = new URL(origin);
- const port = url.port || '80';
- return ALLOWED_DEV_PORTS.includes(port);
- } catch {
- // URL constructor throws on invalid strings - treat as not allowed
- return false;
- }
- }
- }
-
- return false;
-}
-
-/**
- * Get the appropriate redirect URI based on origin
- */
-export function getRedirectUri(
- origin: string | null | undefined,
- fallback: string,
- environment?: string
-): string {
- if (!origin) return fallback;
-
- // Use origin-specific callback for allowed origins
- if (isOriginAllowed(origin, environment)) {
- return `${origin}/oauth-callback.html`;
- }
-
- return fallback;
-}
diff --git a/worker/src/constants/security.ts b/worker/src/constants/security.ts
deleted file mode 100644
index 1d248382..00000000
--- a/worker/src/constants/security.ts
+++ /dev/null
@@ -1,108 +0,0 @@
-/**
- * Security-related constants for the Worker
- * Centralizes security configuration values
- */
-
-/**
- * CORS and HTTP security headers
- */
-export const SECURITY_HEADERS = {
- /** CORS preflight cache duration (24 hours) */
- CORS_MAX_AGE_SECONDS: 86400,
-
- /** HSTS max-age directive (1 year) */
- HSTS_MAX_AGE_SECONDS: 31536000,
-} as const;
-
-/**
- * Cryptographic buffer sizes
- */
-export const CRYPTO_BUFFER = {
- /** Salt buffer size in bytes (256 bits) */
- SALT_BYTES: 32,
-
- /** Random ID buffer size in bytes (128 bits) */
- ID_BYTES: 16,
-
- /** OAuth state token length in characters */
- STATE_TOKEN_LENGTH: 32,
-
- /** PKCE code verifier length */
- CODE_VERIFIER_LENGTH: 43,
-} as const;
-
-/**
- * JWT configuration
- */
-export const JWT_CONFIG = {
- /** Apple JWT expiration in seconds (1 hour) */
- APPLE_JWT_EXP_SECONDS: 3600,
-} as const;
-
-/**
- * PBKDF2 configuration - OWASP 2023 compliant
- * Aligned across client, MCP server, and Worker for consistency
- */
-export const PBKDF2_CONFIG = {
- /** PBKDF2 iterations - OWASP 2023 recommendation for SHA-256 */
- ITERATIONS: 600_000,
-
- /** Derived key length in bits (AES-256) */
- KEY_LENGTH_BITS: 256,
-
- /** Hash algorithm */
- HASH_ALGORITHM: 'SHA-256',
-} as const;
-
-/**
- * Cookie security configuration
- *
- * SECURITY TRADE-OFFS FOR TOKEN STORAGE:
- *
- * Option 1: localStorage/IndexedDB (Current Implementation)
- * - Pros: Works offline, PWA-friendly, accessible to JavaScript
- * - Cons: Vulnerable to XSS attacks if malicious script runs
- * - Mitigated by: React's built-in XSS protection, CSP headers
- *
- * Option 2: HttpOnly Cookies
- * - Pros: Not accessible to JavaScript (XSS-proof for tokens)
- * - Cons: Breaks offline-first PWA, cookies sent on every request
- * - Would require: Server-side session management
- *
- * DECISION: localStorage/IndexedDB chosen because:
- * 1. GSD is an offline-first PWA - tokens must be accessible when offline
- * 2. MCP server integration requires JavaScript access to tokens
- * 3. XSS risk is mitigated by React's escaping and CSP headers
- * 4. All sensitive data (tasks) is encrypted client-side
- *
- * If you need maximum XSS protection and don't require offline:
- * Set COOKIE_CONFIG.USE_HTTP_ONLY = true and implement server sessions
- */
-export const COOKIE_CONFIG = {
- /** Use HttpOnly cookies instead of localStorage (breaks offline PWA) */
- USE_HTTP_ONLY: false,
-
- /** SameSite attribute for cookies */
- SAME_SITE: 'Lax' as const,
-
- /** Secure flag (HTTPS only) */
- SECURE: true,
-
- /** Cookie path */
- PATH: '/',
-
- /** Session cookie name */
- SESSION_COOKIE_NAME: 'gsd_session',
-} as const;
-
-/**
- * @deprecated Use PBKDF2_CONFIG instead
- * Kept for backward compatibility
- */
-export const WORKER_CRYPTO = {
- /** @deprecated Use PBKDF2_CONFIG.ITERATIONS */
- PBKDF2_ITERATIONS: PBKDF2_CONFIG.ITERATIONS,
-
- /** @deprecated Use PBKDF2_CONFIG.KEY_LENGTH_BITS */
- KEY_LENGTH_BITS: PBKDF2_CONFIG.KEY_LENGTH_BITS,
-} as const;
diff --git a/worker/src/handlers/cleanup.ts b/worker/src/handlers/cleanup.ts
deleted file mode 100644
index ca1f86e9..00000000
--- a/worker/src/handlers/cleanup.ts
+++ /dev/null
@@ -1,199 +0,0 @@
-/**
- * Cleanup handler for scheduled tasks
- * Removes soft-deleted tasks, old conflict logs, and inactive devices
- */
-
-import type { Env } from '../types';
-import { RETENTION } from '../config';
-import { createLogger } from '../utils/logger';
-
-const logger = createLogger('CLEANUP');
-
-export interface CleanupResult {
- deletedTasks: number;
- conflictLogs: number;
- inactiveDevices: number;
- duration: number;
-}
-
-/**
- * Run all cleanup tasks
- */
-export async function runCleanup(env: Env): Promise {
- const startTime = Date.now();
-
- logger.info('Starting scheduled cleanup tasks', {
- deletedTasksRetentionDays: RETENTION.DELETED_TASKS,
- conflictLogsRetentionDays: RETENTION.CONFLICT_LOGS,
- inactiveDevicesRetentionDays: RETENTION.INACTIVE_DEVICES,
- });
-
- const result: CleanupResult = {
- deletedTasks: 0,
- conflictLogs: 0,
- inactiveDevices: 0,
- duration: 0,
- };
-
- try {
- // Clean up soft-deleted tasks older than retention period
- result.deletedTasks = await cleanupDeletedTasks(env);
-
- // Clean up old conflict logs
- result.conflictLogs = await cleanupConflictLogs(env);
-
- // Clean up inactive devices
- result.inactiveDevices = await cleanupInactiveDevices(env);
-
- result.duration = Date.now() - startTime;
-
- logger.info('Cleanup tasks completed successfully', {
- deletedTasks: result.deletedTasks,
- conflictLogs: result.conflictLogs,
- inactiveDevices: result.inactiveDevices,
- duration: `${result.duration}ms`,
- });
-
- return result;
- } catch (error) {
- result.duration = Date.now() - startTime;
- logger.error('Cleanup tasks failed', error as Error, {
- partialResult: result,
- duration: `${result.duration}ms`,
- });
- throw error;
- }
-}
-
-/**
- * Clean up soft-deleted tasks older than retention period
- */
-async function cleanupDeletedTasks(env: Env): Promise {
- const thresholdMs = Date.now() - RETENTION.DELETED_TASKS * 24 * 60 * 60 * 1000;
-
- logger.info('Cleaning up soft-deleted tasks', {
- thresholdDate: new Date(thresholdMs).toISOString(),
- retentionDays: RETENTION.DELETED_TASKS,
- });
-
- try {
- // First, count how many tasks will be deleted
- const countResult = await env.DB.prepare(
- 'SELECT COUNT(*) as count FROM encrypted_tasks WHERE deleted_at IS NOT NULL AND deleted_at < ?'
- )
- .bind(thresholdMs)
- .first();
-
- const count = (countResult?.count as number) || 0;
-
- if (count === 0) {
- logger.info('No soft-deleted tasks to clean up');
- return 0;
- }
-
- // Permanently delete the tasks
- await env.DB.prepare(
- 'DELETE FROM encrypted_tasks WHERE deleted_at IS NOT NULL AND deleted_at < ?'
- )
- .bind(thresholdMs)
- .run();
-
- logger.info('Soft-deleted tasks cleaned up', {
- deletedCount: count,
- thresholdDate: new Date(thresholdMs).toISOString(),
- });
-
- return count;
- } catch (error) {
- logger.error('Failed to clean up deleted tasks', error as Error, {
- threshold: thresholdMs,
- });
- throw error;
- }
-}
-
-/**
- * Clean up old conflict logs
- */
-async function cleanupConflictLogs(env: Env): Promise {
- const thresholdMs = Date.now() - RETENTION.CONFLICT_LOGS * 24 * 60 * 60 * 1000;
-
- logger.info('Cleaning up old conflict logs', {
- thresholdDate: new Date(thresholdMs).toISOString(),
- retentionDays: RETENTION.CONFLICT_LOGS,
- });
-
- try {
- const countResult = await env.DB.prepare(
- 'SELECT COUNT(*) as count FROM conflict_log WHERE resolved_at < ?'
- )
- .bind(thresholdMs)
- .first();
-
- const count = (countResult?.count as number) || 0;
-
- if (count === 0) {
- logger.info('No old conflict logs to clean up');
- return 0;
- }
-
- await env.DB.prepare('DELETE FROM conflict_log WHERE resolved_at < ?')
- .bind(thresholdMs)
- .run();
-
- logger.info('Old conflict logs cleaned up', {
- deletedCount: count,
- thresholdDate: new Date(thresholdMs).toISOString(),
- });
-
- return count;
- } catch (error) {
- logger.error('Failed to clean up conflict logs', error as Error, {
- threshold: thresholdMs,
- });
- throw error;
- }
-}
-
-/**
- * Clean up inactive devices
- */
-async function cleanupInactiveDevices(env: Env): Promise {
- const thresholdMs = Date.now() - RETENTION.INACTIVE_DEVICES * 24 * 60 * 60 * 1000;
-
- logger.info('Cleaning up inactive devices', {
- thresholdDate: new Date(thresholdMs).toISOString(),
- retentionDays: RETENTION.INACTIVE_DEVICES,
- });
-
- try {
- const countResult = await env.DB.prepare(
- 'SELECT COUNT(*) as count FROM devices WHERE last_seen_at < ? AND is_active = 0'
- )
- .bind(thresholdMs)
- .first();
-
- const count = (countResult?.count as number) || 0;
-
- if (count === 0) {
- logger.info('No inactive devices to clean up');
- return 0;
- }
-
- await env.DB.prepare('DELETE FROM devices WHERE last_seen_at < ? AND is_active = 0')
- .bind(thresholdMs)
- .run();
-
- logger.info('Inactive devices cleaned up', {
- deletedCount: count,
- thresholdDate: new Date(thresholdMs).toISOString(),
- });
-
- return count;
- } catch (error) {
- logger.error('Failed to clean up inactive devices', error as Error, {
- threshold: thresholdMs,
- });
- throw error;
- }
-}
diff --git a/worker/src/handlers/oidc.ts b/worker/src/handlers/oidc.ts
deleted file mode 100644
index fadc20f2..00000000
--- a/worker/src/handlers/oidc.ts
+++ /dev/null
@@ -1,18 +0,0 @@
-/**
- * OIDC OAuth Handler - Re-export for backward compatibility
- *
- * This file maintains backward compatibility by re-exporting all OIDC functions
- * from the modular implementation in the oidc/ directory.
- *
- * Modular structure (v0.5.0):
- * - oidc/helpers.ts - Utility functions for PKCE, Apple JWT, etc.
- * - oidc/token-exchange.ts - Code-to-token exchange logic
- * - oidc/id-verification.ts - ID token verification with JWKS
- * - oidc/initiate.ts - OAuth flow initiation
- * - oidc/callback.ts - OAuth callback handler (main orchestration)
- * - oidc/result.ts - OAuth result retrieval
- */
-
-export { initiateOAuth } from './oidc/initiate';
-export { handleOAuthCallback } from './oidc/callback';
-export { getOAuthResult } from './oidc/result';
diff --git a/worker/src/handlers/oidc/callback.ts b/worker/src/handlers/oidc/callback.ts
deleted file mode 100644
index e2941ada..00000000
--- a/worker/src/handlers/oidc/callback.ts
+++ /dev/null
@@ -1,145 +0,0 @@
-import type { Env } from '../../types';
-import { errorResponse } from '../../middleware/cors';
-import { createLogger } from '../../utils/logger';
-import { exchangeCodeForTokens } from './token-exchange';
-import { verifyIdToken } from './id-verification';
-import { parseOAuthRequest } from './request-parser';
-import { validateOAuthState, deleteOAuthState } from './state-validator';
-import { findOrCreateUser } from './user-manager';
-import { createDevice, createSession, buildAuthData, storeOAuthResult } from './session-manager';
-import {
- buildSuccessRedirect,
- buildSuccessJson,
- buildErrorRedirect,
- buildErrorJson,
- getErrorContext,
- storeErrorResult,
- buildStateExpiredRedirect,
-} from './response-builder';
-import { getAppOriginFromRequest } from './helpers';
-
-const logger = createLogger('OIDC:Callback');
-
-/**
- * Handle OAuth callback
- * POST /api/auth/oauth/callback
- */
-export async function handleOAuthCallback(request: Request, env: Env): Promise {
- const origin = request.headers.get('Origin');
-
- try {
- // Parse request to extract code and state
- const { code, state } = await parseOAuthRequest(request);
-
- if (!code || !state) {
- logger.warn('Invalid callback parameters', {
- hasCode: !!code,
- hasState: !!state,
- url: request.url,
- });
- return errorResponse('Invalid callback parameters', 400, origin);
- }
-
- // Validate OAuth state from KV
- const stateResult = await validateOAuthState(state, env, request, origin);
- if (!stateResult.success) {
- // For state-not-found errors, redirect back to app with friendly error
- // This handles cases like: expired states, PWA lifecycle issues, cached OAuth URLs
- const appOrigin = getAppOriginFromRequest(request, env);
-
- logger.warn('OAuth state validation failed, redirecting to app', {
- error: stateResult.error,
- statusCode: stateResult.statusCode,
- appOrigin,
- statePrefix: state.substring(0, 8) + '...',
- });
-
- if (appOrigin) {
- return buildStateExpiredRedirect(appOrigin, stateResult.error);
- }
-
- // Fallback to JSON error if no app origin can be determined
- return errorResponse(stateResult.error, stateResult.statusCode, origin);
- }
-
- const { stateData } = stateResult;
- const { codeVerifier, provider, redirectUri, appOrigin, sessionId } = stateData;
-
- // Delete used state
- await deleteOAuthState(state, env);
-
- // Exchange code for tokens
- const tokens = await exchangeCodeForTokens(provider, code, codeVerifier, redirectUri, env);
-
- // Verify ID token and extract user info
- const { email, providerUserId } = await verifyIdToken(provider, tokens.id_token!, env);
-
- // Find or create user
- const userResult = await findOrCreateUser(provider, providerUserId, email, env);
- if (!userResult.success) {
- return errorResponse(userResult.error, userResult.statusCode, origin);
- }
-
- const { user, isNewUser } = userResult;
-
- // Create device and session
- const deviceId = await createDevice(user.id, provider, env);
- const session = await createSession(user.id, user.email, deviceId, env);
-
- // Build auth data and store result
- const authData = buildAuthData(user.id, user.email, session, user.encryption_salt, provider);
- await storeOAuthResult(state, authData, appOrigin, sessionId, env);
-
- logger.info('OAuth callback successful', {
- userId: user.id,
- deviceId,
- provider,
- isNewUser,
- state,
- appOrigin,
- });
-
- // Return redirect or JSON response
- if (appOrigin) {
- return buildSuccessRedirect(appOrigin, state);
- }
-
- return buildSuccessJson(state, origin);
- } catch (error: unknown) {
- return handleCallbackError(error, request, env, origin);
- }
-}
-
-/**
- * Handle errors during OAuth callback
- */
-async function handleCallbackError(
- error: unknown,
- request: Request,
- env: Env,
- origin: string | null
-): Promise {
- logger.error('OAuth callback failed', error as Error, { provider: 'unknown' });
-
- const url = new URL(request.url);
- const state = url.searchParams.get('state');
-
- if (!state) {
- return buildErrorJson(error, origin, env);
- }
-
- // Try to get context from state
- const { appOrigin, sessionId } = await getErrorContext(state, env);
- const message = error instanceof Error ? error.message : 'OAuth callback failed';
-
- // Store error result for later retrieval
- await storeErrorResult(state, message, appOrigin || origin, sessionId, env);
-
- const redirectTarget = appOrigin || origin;
-
- if (redirectTarget) {
- return buildErrorRedirect(redirectTarget, state, message);
- }
-
- return buildErrorJson(error, origin, env);
-}
diff --git a/worker/src/handlers/oidc/helpers.ts b/worker/src/handlers/oidc/helpers.ts
deleted file mode 100644
index 09e53e69..00000000
--- a/worker/src/handlers/oidc/helpers.ts
+++ /dev/null
@@ -1,155 +0,0 @@
-import type { Env } from '../../types';
-import { JWT_CONFIG } from '../../constants/security';
-import { ALLOWED_ORIGINS } from '../../config';
-
-/** Named origin constants to avoid fragile positional array access */
-const PRODUCTION_ORIGIN = ALLOWED_ORIGINS[0]; // https://gsd.vinny.dev
-const STAGING_ORIGIN = ALLOWED_ORIGINS[1]; // https://gsd-dev.vinny.dev
-
-/**
- * Determine the app origin from request context
- * Used when OAuth state is not available (expired/invalid)
- * Returns the most likely app origin based on request headers and environment
- */
-export function getAppOriginFromRequest(request: Request, env: Env): string | null {
- // Priority 1: Use OAUTH_CALLBACK_BASE if set (explicit configuration)
- if (env.OAUTH_CALLBACK_BASE) {
- return env.OAUTH_CALLBACK_BASE;
- }
-
- // Priority 2: Check Referer header (might contain app origin)
- const referer = request.headers.get('Referer');
- if (referer) {
- try {
- const refererUrl = new URL(referer);
- const refererOrigin = refererUrl.origin;
- if (ALLOWED_ORIGINS.includes(refererOrigin)) {
- return refererOrigin;
- }
- } catch {
- // Invalid referer URL, continue to next option
- }
- }
-
- // Priority 3: Check Origin header
- const origin = request.headers.get('Origin');
- if (origin && ALLOWED_ORIGINS.includes(origin)) {
- return origin;
- }
-
- // Priority 4: Derive from environment
- if (env.ENVIRONMENT === 'production') {
- return PRODUCTION_ORIGIN;
- }
- if (env.ENVIRONMENT === 'staging' || env.ENVIRONMENT === 'development') {
- return STAGING_ORIGIN;
- }
-
- // Priority 5: Default to production
- return PRODUCTION_ORIGIN;
-}
-
-/**
- * Generate random string for state and code verifier
- */
-export function generateRandomString(length: number): string {
- const array = new Uint8Array(length);
- crypto.getRandomValues(array);
- return Array.from(array, (byte) => byte.toString(16).padStart(2, '0')).join('');
-}
-
-/**
- * Generate PKCE code challenge from verifier
- */
-export async function generateCodeChallenge(verifier: string): Promise {
- const encoder = new TextEncoder();
- const data = encoder.encode(verifier);
- const hash = await crypto.subtle.digest('SHA-256', data);
-
- // Base64URL encode
- return btoa(String.fromCharCode(...new Uint8Array(hash)))
- .replace(/\+/g, '-')
- .replace(/\//g, '_')
- .replace(/=+$/, '');
-}
-
-/**
- * Base64URL encode
- */
-export function base64UrlEncode(input: string | ArrayBuffer): string {
- let str: string;
-
- if (typeof input === 'string') {
- str = btoa(input);
- } else {
- str = btoa(String.fromCharCode(...new Uint8Array(input)));
- }
-
- return str.replace(/\+/g, '-').replace(/\//g, '_').replace(/=+$/, '');
-}
-
-/**
- * Import Apple private key for signing
- */
-export async function importApplePrivateKey(pem: string): Promise {
- // Remove PEM headers and decode
- const pemContents = pem
- .replace(/-----BEGIN PRIVATE KEY-----/, '')
- .replace(/-----END PRIVATE KEY-----/, '')
- .replace(/\s/g, '');
-
- const binaryDer = Uint8Array.from(atob(pemContents), (c) => c.charCodeAt(0));
-
- return crypto.subtle.importKey(
- 'pkcs8',
- binaryDer,
- {
- name: 'ECDSA',
- namedCurve: 'P-256',
- },
- false,
- ['sign']
- );
-}
-
-/**
- * Generate Apple client secret JWT
- * Required for Apple Sign In token exchange
- */
-export async function generateAppleClientSecret(env: Env): Promise {
- const now = Math.floor(Date.now() / 1000);
-
- // JWT header
- const header = {
- alg: 'ES256',
- kid: env.APPLE_KEY_ID,
- };
-
- // JWT payload
- const payload = {
- iss: env.APPLE_TEAM_ID,
- iat: now,
- exp: now + JWT_CONFIG.APPLE_JWT_EXP_SECONDS,
- aud: 'https://appleid.apple.com',
- sub: env.APPLE_CLIENT_ID,
- };
-
- // Encode header and payload
- const encodedHeader = base64UrlEncode(JSON.stringify(header));
- const encodedPayload = base64UrlEncode(JSON.stringify(payload));
- const message = `${encodedHeader}.${encodedPayload}`;
-
- // Sign with Apple private key
- const privateKeyPem = env.APPLE_PRIVATE_KEY;
- const privateKey = await importApplePrivateKey(privateKeyPem);
-
- const signature = await crypto.subtle.sign(
- { name: 'ECDSA', hash: 'SHA-256' },
- privateKey,
- new TextEncoder().encode(message)
- );
-
- const encodedSignature = base64UrlEncode(signature);
-
- return `${message}.${encodedSignature}`;
-}
diff --git a/worker/src/handlers/oidc/id-verification.ts b/worker/src/handlers/oidc/id-verification.ts
deleted file mode 100644
index bc11eca9..00000000
--- a/worker/src/handlers/oidc/id-verification.ts
+++ /dev/null
@@ -1,56 +0,0 @@
-import { jwtVerify, createRemoteJWKSet } from 'jose';
-import type { Env } from '../../types';
-import { GOOGLE_CONFIG, APPLE_CONFIG } from '../../config';
-import { createLogger } from '../../utils/logger';
-
-const logger = createLogger('OIDC:IDVerification');
-
-export interface VerifiedUserInfo {
- email: string;
- providerUserId: string;
- emailVerified: boolean;
-}
-
-/**
- * Verify ID token and extract user information
- * Uses JWKS for signature verification
- */
-export async function verifyIdToken(
- provider: 'google' | 'apple',
- idToken: string,
- env: Env
-): Promise {
- const config = provider === 'google' ? GOOGLE_CONFIG : APPLE_CONFIG;
- const clientId = provider === 'google' ? env.GOOGLE_CLIENT_ID : env.APPLE_CLIENT_ID;
-
- logger.info('Verifying ID token', { provider });
-
- // Verify ID token using JWKS
- const JWKS = createRemoteJWKSet(new URL(config.jwks_uri));
- const { payload } = await jwtVerify(idToken, JWKS, {
- issuer: config.issuer,
- audience: clientId,
- });
-
- // Extract user info
- const email = payload.email as string;
- const providerUserId = payload.sub as string;
- const emailVerified = payload.email_verified as boolean;
-
- if (!email || !emailVerified) {
- logger.error('Email not verified', new Error('Email verification failed'), {
- provider,
- email,
- emailVerified,
- });
- throw new Error('Email not verified');
- }
-
- logger.info('ID token verified successfully', { provider, email });
-
- return {
- email,
- providerUserId,
- emailVerified,
- };
-}
diff --git a/worker/src/handlers/oidc/initiate.ts b/worker/src/handlers/oidc/initiate.ts
deleted file mode 100644
index 544d9f03..00000000
--- a/worker/src/handlers/oidc/initiate.ts
+++ /dev/null
@@ -1,107 +0,0 @@
-import type { Env } from '../../types';
-import { jsonResponse, errorResponse } from '../../middleware/cors';
-import { GOOGLE_CONFIG, APPLE_CONFIG, TTL, OAUTH_COOKIE, isOriginAllowed } from '../../config';
-import { createLogger } from '../../utils/logger';
-import { generateRandomString, generateCodeChallenge } from './helpers';
-import { createCookie } from '../../utils/cookies';
-
-const logger = createLogger('OIDC:Initiate');
-
-/**
- * Initiate OAuth flow
- * GET /api/auth/oauth/:provider/start
- */
-export async function initiateOAuth(
- request: Request,
- env: Env,
- provider: 'google' | 'apple'
-): Promise {
- const requestOrigin = request.headers.get('Origin');
- const allowedOrigin = requestOrigin && isOriginAllowed(requestOrigin, env.ENVIRONMENT) ? requestOrigin : null;
-
- try {
- const config = provider === 'google' ? GOOGLE_CONFIG : APPLE_CONFIG;
- const clientId = provider === 'google' ? env.GOOGLE_CLIENT_ID : env.APPLE_CLIENT_ID;
-
- if (!clientId) {
- return errorResponse(`${provider} OAuth not configured`, 500, allowedOrigin || undefined);
- }
-
- // Determine the worker's callback URI (where OAuth provider redirects)
- // IMPORTANT: Use OAUTH_CALLBACK_BASE to ensure callback domain matches cookie domain.
- // When behind CloudFront proxy, request.host might be the worker's direct domain,
- // but cookies are set for the frontend domain. Using OAUTH_CALLBACK_BASE ensures
- // Google redirects to the same domain where the session cookie was set.
- const requestUrl = new URL(request.url);
- const callbackBase = env.OAUTH_CALLBACK_BASE || `${requestUrl.protocol}//${requestUrl.host}`;
- const workerCallbackUri = `${callbackBase}/api/auth/oauth/callback`;
-
- // Determine the app origin (where we'll redirect after processing)
- // Use OAUTH_CALLBACK_BASE if set, otherwise use Origin header
- const trustedAppOrigin =
- env.OAUTH_CALLBACK_BASE ||
- allowedOrigin ||
- env.OAUTH_REDIRECT_URI.replace('/oauth-callback.html', '');
-
- // Generate state and PKCE verifier
- const state = generateRandomString(32);
- const codeVerifier = generateRandomString(64);
- const codeChallenge = await generateCodeChallenge(codeVerifier);
- const sessionId = crypto.randomUUID();
-
- // Store state, verifier, and app origin in KV (short-lived)
- await env.KV.put(
- `oauth_state:${state}`,
- JSON.stringify({
- codeVerifier,
- provider,
- redirectUri: workerCallbackUri,
- appOrigin: trustedAppOrigin,
- sessionId,
- createdAt: Date.now(),
- }),
- { expirationTtl: TTL.OAUTH_STATE }
- );
-
- // Build authorization URL
- const authUrl = new URL(config.authorization_endpoint);
- authUrl.searchParams.set('client_id', clientId);
- authUrl.searchParams.set('redirect_uri', workerCallbackUri);
- authUrl.searchParams.set('response_type', 'code');
- authUrl.searchParams.set('scope', config.scope);
-
- authUrl.searchParams.set('state', state);
- authUrl.searchParams.set('code_challenge', codeChallenge);
- authUrl.searchParams.set('code_challenge_method', 'S256');
-
- if (provider === 'apple') {
- authUrl.searchParams.set('response_mode', 'form_post');
- }
-
- logger.info('OAuth flow initiated', {
- provider,
- statePrefix: state.substring(0, 8) + '...',
- workerCallbackUri,
- appOrigin: trustedAppOrigin,
- origin: allowedOrigin || 'default',
- });
-
- const response = jsonResponse({
- authUrl: authUrl.toString(),
- state,
- }, 200, allowedOrigin || undefined);
-
- const cookie = createCookie(OAUTH_COOKIE.name, sessionId, {
- httpOnly: true,
- sameSite: 'Lax',
- secure: requestUrl.protocol === 'https:',
- maxAge: OAUTH_COOKIE.maxAge,
- });
- response.headers.append('Set-Cookie', cookie);
-
- return response;
- } catch (error: unknown) {
- logger.error('OAuth initiation failed', error as Error, { provider });
- return errorResponse('Failed to initiate OAuth', 500, allowedOrigin || undefined);
- }
-}
diff --git a/worker/src/handlers/oidc/request-parser.ts b/worker/src/handlers/oidc/request-parser.ts
deleted file mode 100644
index 076f1863..00000000
--- a/worker/src/handlers/oidc/request-parser.ts
+++ /dev/null
@@ -1,35 +0,0 @@
-export interface ParsedOAuthRequest {
- code: string | null;
- state: string | null;
-}
-
-/**
- * Parse OAuth callback request to extract code and state
- * Handles multiple request formats: JSON POST, form POST (Apple), GET query params (Google)
- */
-export async function parseOAuthRequest(request: Request): Promise {
- const url = new URL(request.url);
- const contentType = request.headers.get('content-type');
-
- if (request.method === 'POST' && contentType?.includes('application/json')) {
- const body = (await request.json()) as { code?: string; state?: string };
- return {
- code: body.code ?? null,
- state: body.state ?? null,
- };
- }
-
- if (request.method === 'POST' && contentType?.includes('application/x-www-form-urlencoded')) {
- const formData = await request.formData();
- return {
- code: formData.get('code') as string,
- state: formData.get('state') as string,
- };
- }
-
- // GET request with query params (Google redirect)
- return {
- code: url.searchParams.get('code'),
- state: url.searchParams.get('state'),
- };
-}
diff --git a/worker/src/handlers/oidc/response-builder.ts b/worker/src/handlers/oidc/response-builder.ts
deleted file mode 100644
index 4c7456f3..00000000
--- a/worker/src/handlers/oidc/response-builder.ts
+++ /dev/null
@@ -1,145 +0,0 @@
-import type { Env } from '../../types';
-import { jsonResponse } from '../../middleware/cors';
-import { TTL } from '../../config';
-import { createLogger } from '../../utils/logger';
-
-const logger = createLogger('OIDC:RESPONSE');
-
-/**
- * Build redirect response for successful OAuth callback
- */
-export function buildSuccessRedirect(appOrigin: string, state: string): Response {
- const redirectUrl = new URL('/oauth-callback.html', appOrigin);
- redirectUrl.searchParams.set('success', 'true');
- redirectUrl.searchParams.set('state', state);
-
- return new Response(null, {
- status: 302,
- headers: {
- Location: redirectUrl.toString(),
- 'Cache-Control': 'no-store, no-cache, must-revalidate, private',
- },
- });
-}
-
-/**
- * Build JSON response for successful OAuth callback (fallback when no appOrigin)
- */
-export function buildSuccessJson(state: string, origin: string | null): Response {
- return jsonResponse({ status: 'success', state }, 200, origin);
-}
-
-/**
- * Build redirect response for failed OAuth callback
- */
-export function buildErrorRedirect(redirectTarget: string, state: string, message: string): Response {
- const redirectUrl = new URL('/oauth-callback.html', redirectTarget);
- redirectUrl.searchParams.set('success', 'false');
- redirectUrl.searchParams.set('state', state);
- redirectUrl.searchParams.set('error', message);
-
- return new Response(null, {
- status: 302,
- headers: {
- Location: redirectUrl.toString(),
- 'Cache-Control': 'no-store, no-cache, must-revalidate, private',
- },
- });
-}
-
-/**
- * Build redirect response for state-expired/not-found errors
- * Redirects directly to the app with a friendly error message
- * Used when we can't use oauth-callback.html because state is invalid
- */
-export function buildStateExpiredRedirect(appOrigin: string, message: string): Response {
- // Redirect to app root with error parameters
- // The app will show a toast with the error message
- const redirectUrl = new URL('/', appOrigin);
- redirectUrl.searchParams.set('oauth_error', 'session_expired');
- redirectUrl.searchParams.set('oauth_message', message);
-
- return new Response(null, {
- status: 302,
- headers: {
- Location: redirectUrl.toString(),
- 'Cache-Control': 'no-store, no-cache, must-revalidate, private',
- },
- });
-}
-
-/**
- * Build JSON response for failed OAuth callback
- */
-export function buildErrorJson(
- error: unknown,
- origin: string | null,
- env: Env
-): Response {
- const message = error instanceof Error ? error.message : 'OAuth callback failed';
-
- return jsonResponse(
- {
- error: 'OAuth callback failed',
- message: env.ENVIRONMENT === 'development' ? message : 'OAuth authentication failed',
- ...(env.ENVIRONMENT === 'development' &&
- error instanceof Error && {
- stack: error.stack?.split('\n').slice(0, 3).join('\n'),
- }),
- },
- 500,
- origin
- );
-}
-
-interface ErrorContext {
- appOrigin: string | null;
- sessionId: string | null;
-}
-
-/**
- * Retrieve error context from OAuth state in KV
- */
-export async function getErrorContext(state: string, env: Env): Promise {
- try {
- const stateDataStr = await env.KV.get(`oauth_state:${state}`);
- if (stateDataStr) {
- const stateData = JSON.parse(stateDataStr);
- return {
- appOrigin: stateData.appOrigin || null,
- sessionId: stateData.sessionId || null,
- };
- }
- } catch (error) {
- // KV lookup failed - proceed with null context (non-critical, best-effort)
- logger.warn('Failed to retrieve OAuth state context', {
- state: state.substring(0, 8) + '...',
- error: String(error),
- });
- }
-
- return { appOrigin: null, sessionId: null };
-}
-
-/**
- * Store error result in KV for later retrieval
- */
-export async function storeErrorResult(
- state: string,
- message: string,
- appOrigin: string | null,
- sessionId: string | null,
- env: Env
-): Promise {
- await env.KV.put(
- `oauth_result:${state}`,
- JSON.stringify({
- status: 'error',
- error: message,
- appOrigin,
- sessionId,
- createdAt: Date.now(),
- }),
- { expirationTtl: TTL.OAUTH_STATE }
- );
-}
diff --git a/worker/src/handlers/oidc/result.ts b/worker/src/handlers/oidc/result.ts
deleted file mode 100644
index 2cea7ed2..00000000
--- a/worker/src/handlers/oidc/result.ts
+++ /dev/null
@@ -1,83 +0,0 @@
-import type { Env } from '../../types';
-import { jsonResponse, errorResponse } from '../../middleware/cors';
-import { OAUTH_COOKIE } from '../../config';
-import { createCookie, getCookie } from '../../utils/cookies';
-
-/**
- * Retrieve OAuth result using state token
- * GET /api/auth/oauth/result?state=...
- */
-export async function getOAuthResult(request: Request, env: Env): Promise {
- const origin = request.headers.get('Origin');
- const url = new URL(request.url);
- const state = url.searchParams.get('state');
-
- if (!state) {
- return errorResponse('Missing state parameter', 400, origin);
- }
-
- const oauthSession = getCookie(request.headers.get('Cookie'), OAUTH_COOKIE.name);
-
- if (!oauthSession) {
- return errorResponse('Missing OAuth session cookie', 401, origin);
- }
-
- const resultKey = `oauth_result:${state}`;
- const resultStr = await env.KV.get(resultKey);
-
- if (!resultStr) {
- return jsonResponse(
- {
- status: 'expired',
- message: 'OAuth result not found or expired',
- },
- 410,
- origin
- );
- }
-
- const result = JSON.parse(resultStr) as {
- status: 'success' | 'error';
- authData?: Record;
- error?: string;
- sessionId?: string | null;
- };
-
- if (!result.sessionId || result.sessionId !== oauthSession) {
- return errorResponse('OAuth session validation failed', 401, origin);
- }
-
- await env.KV.delete(resultKey);
-
- const clearCookie = createCookie(OAUTH_COOKIE.name, '', {
- httpOnly: true,
- sameSite: 'Lax',
- secure: url.protocol === 'https:',
- maxAge: 0,
- expires: new Date(0),
- });
-
- if (result.status === 'error') {
- const response = jsonResponse(
- {
- status: 'error',
- error: result.error || 'OAuth failed',
- },
- 200,
- origin
- );
- response.headers.append('Set-Cookie', clearCookie);
- return response;
- }
-
- const response = jsonResponse(
- {
- status: 'success',
- authData: result.authData,
- },
- 200,
- origin
- );
- response.headers.append('Set-Cookie', clearCookie);
- return response;
-}
diff --git a/worker/src/handlers/oidc/session-manager.ts b/worker/src/handlers/oidc/session-manager.ts
deleted file mode 100644
index 44e415b1..00000000
--- a/worker/src/handlers/oidc/session-manager.ts
+++ /dev/null
@@ -1,114 +0,0 @@
-import type { Env } from '../../types';
-import { generateId } from '../../utils/crypto';
-import { createToken } from '../../utils/jwt';
-import { TTL } from '../../config';
-
-export interface SessionData {
- deviceId: string;
- token: string;
- expiresAt: number;
-}
-
-/**
- * Create a new device entry for the authenticated user
- */
-export async function createDevice(
- userId: string,
- provider: string,
- env: Env
-): Promise {
- const deviceId = generateId();
- const deviceName = `${provider === 'google' ? 'Google' : 'Apple'} Device`;
- const now = Date.now();
-
- await env.DB.prepare(
- `INSERT INTO devices (id, user_id, device_name, last_seen_at, created_at, is_active)
- VALUES (?, ?, ?, ?, ?, 1)`
- )
- .bind(deviceId, userId, deviceName, now, now)
- .run();
-
- return deviceId;
-}
-
-/**
- * Generate JWT token and store session in KV
- */
-export async function createSession(
- userId: string,
- email: string,
- deviceId: string,
- env: Env
-): Promise {
- const { token, jti, expiresAt } = await createToken(userId, email, deviceId, env.JWT_SECRET);
- const now = Date.now();
-
- await env.KV.put(
- `session:${userId}:${jti}`,
- JSON.stringify({
- deviceId,
- issuedAt: now,
- expiresAt,
- lastActivity: now,
- }),
- { expirationTtl: TTL.SESSION }
- );
-
- return { deviceId, token, expiresAt };
-}
-
-export interface AuthData {
- userId: string;
- deviceId: string;
- email: string;
- token: string;
- expiresAt: number;
- requiresEncryptionSetup: boolean;
- encryptionSalt?: string;
- provider: string;
-}
-
-/**
- * Build the authentication data response object
- */
-export function buildAuthData(
- userId: string,
- email: string,
- session: SessionData,
- encryptionSalt: string | null,
- provider: string
-): AuthData {
- return {
- userId,
- deviceId: session.deviceId,
- email,
- token: session.token,
- expiresAt: session.expiresAt,
- requiresEncryptionSetup: !encryptionSalt,
- encryptionSalt: encryptionSalt || undefined,
- provider,
- };
-}
-
-/**
- * Store OAuth result in KV for later retrieval by the app
- */
-export async function storeOAuthResult(
- state: string,
- authData: AuthData,
- appOrigin: string | null,
- sessionId: string,
- env: Env
-): Promise {
- await env.KV.put(
- `oauth_result:${state}`,
- JSON.stringify({
- status: 'success',
- authData,
- appOrigin,
- sessionId,
- createdAt: Date.now(),
- }),
- { expirationTtl: TTL.OAUTH_STATE }
- );
-}
diff --git a/worker/src/handlers/oidc/state-validator.ts b/worker/src/handlers/oidc/state-validator.ts
deleted file mode 100644
index da51cd51..00000000
--- a/worker/src/handlers/oidc/state-validator.ts
+++ /dev/null
@@ -1,151 +0,0 @@
-import type { Env } from '../../types';
-import { createLogger } from '../../utils/logger';
-import { getCookie } from '../../utils/cookies';
-import { OAUTH_COOKIE } from '../../config';
-
-const logger = createLogger('OIDC:StateValidator');
-
-export type OAuthProvider = 'google' | 'apple';
-
-export interface OAuthStateData {
- codeVerifier: string;
- provider: OAuthProvider;
- redirectUri: string;
- appOrigin: string | null;
- createdAt: number;
- sessionId: string;
-}
-
-export interface StateValidationResult {
- success: true;
- stateData: OAuthStateData;
-}
-
-export interface StateValidationError {
- success: false;
- error: string;
- statusCode: number;
-}
-
-export type StateValidationOutcome = StateValidationResult | StateValidationError;
-
-/**
- * Validate OAuth state from KV and return parsed state data
- */
-export async function validateOAuthState(
- state: string,
- env: Env,
- request: Request,
- origin: string | null
-): Promise {
- const stateKey = `oauth_state:${state}`;
- const stateDataStr = await env.KV.get(stateKey);
-
- if (!stateDataStr) {
- // Check if this state was already processed (OAuth result exists)
- // This handles duplicate callbacks or page refreshes
- const resultKey = `oauth_result:${state}`;
- const existingResult = await env.KV.get(resultKey);
-
- if (existingResult) {
- logger.info('OAuth state not found but result exists - likely duplicate callback', {
- statePrefix: state.substring(0, 8) + '...',
- hasResult: true,
- });
-
- return {
- success: false,
- error:
- 'This sign-in link has already been used. If you just signed in, please return to the app.',
- statusCode: 400,
- };
- }
-
- logger.warn('OAuth state not found in KV', {
- statePrefix: state.substring(0, 8) + '...',
- stateLength: state.length,
- stateKey,
- url: request.url,
- timeNow: new Date().toISOString(),
- userAgent: request.headers.get('User-Agent'),
- origin,
- });
-
- return {
- success: false,
- error:
- 'Sign-in session expired. Please try signing in again.',
- statusCode: 400,
- };
- }
-
- const stateData = JSON.parse(stateDataStr) as OAuthStateData;
-
- if (!stateData.sessionId) {
- logger.warn('OAuth state missing session binding', {
- statePrefix: state.substring(0, 8) + '...',
- });
-
- return {
- success: false,
- error: 'OAuth session invalid or expired. Please retry sign in.',
- statusCode: 400,
- };
- }
-
- // Verify session cookie matches stored sessionId (cryptographic binding)
- // This prevents OAuth CSRF attacks where an attacker tricks a victim into using a pre-generated state
- const cookieHeader = request.headers.get('Cookie');
- const sessionCookie = getCookie(cookieHeader, OAUTH_COOKIE.name);
-
- if (!sessionCookie) {
- logger.warn('OAuth session cookie missing - possible CSRF attempt', {
- statePrefix: state.substring(0, 8) + '...',
- hasCookieHeader: !!cookieHeader,
- userAgent: request.headers.get('User-Agent'),
- });
-
- return {
- success: false,
- error: 'Session verification failed. Please ensure cookies are enabled and try again.',
- statusCode: 400,
- };
- }
-
- if (sessionCookie !== stateData.sessionId) {
- logger.warn('OAuth session cookie mismatch - possible CSRF attempt', {
- statePrefix: state.substring(0, 8) + '...',
- storedSessionPrefix: stateData.sessionId.substring(0, 8) + '...',
- cookieSessionPrefix: sessionCookie.substring(0, 8) + '...',
- userAgent: request.headers.get('User-Agent'),
- });
-
- return {
- success: false,
- error: 'Session verification failed. Please try signing in again.',
- statusCode: 400,
- };
- }
-
- // Log timing information for diagnostics
- const now = Date.now();
- const flowDuration = stateData.createdAt ? now - stateData.createdAt : null;
-
- logger.info('OAuth callback received - state valid', {
- provider: stateData.provider,
- statePrefix: state.substring(0, 8) + '...',
- flowDurationMs: flowDuration,
- flowDurationSec: flowDuration ? Math.round(flowDuration / 1000) : null,
- userAgent: request.headers.get('User-Agent'),
- });
-
- return { success: true, stateData };
-}
-
-/**
- * Delete the OAuth state from KV after successful validation
- */
-export async function deleteOAuthState(state: string, env: Env): Promise {
- const stateKey = `oauth_state:${state}`;
- await env.KV.delete(stateKey);
-}
diff --git a/worker/src/handlers/oidc/token-exchange.ts b/worker/src/handlers/oidc/token-exchange.ts
deleted file mode 100644
index 9cb1a595..00000000
--- a/worker/src/handlers/oidc/token-exchange.ts
+++ /dev/null
@@ -1,76 +0,0 @@
-import type { Env } from '../../types';
-import { GOOGLE_CONFIG, APPLE_CONFIG } from '../../config';
-import { createLogger } from '../../utils/logger';
-import { generateAppleClientSecret } from './helpers';
-
-const logger = createLogger('OIDC:TokenExchange');
-
-export interface TokenExchangeResult {
- access_token?: string;
- id_token?: string;
-}
-
-/**
- * Exchange authorization code for tokens
- * Handles both Google and Apple token endpoints
- */
-export async function exchangeCodeForTokens(
- provider: 'google' | 'apple',
- code: string,
- codeVerifier: string,
- redirectUri: string,
- env: Env
-): Promise {
- const config = provider === 'google' ? GOOGLE_CONFIG : APPLE_CONFIG;
- const clientId = provider === 'google' ? env.GOOGLE_CLIENT_ID : env.APPLE_CLIENT_ID;
-
- // Build token request parameters
- const tokenParams = new URLSearchParams({
- client_id: clientId,
- code,
- redirect_uri: redirectUri,
- grant_type: 'authorization_code',
- code_verifier: codeVerifier,
- });
-
- // Add client_secret for both providers
- if (provider === 'google') {
- tokenParams.set('client_secret', env.GOOGLE_CLIENT_SECRET);
- } else if (provider === 'apple') {
- const clientSecret = await generateAppleClientSecret(env);
- tokenParams.set('client_secret', clientSecret);
- }
-
- logger.info('Token exchange request', {
- provider,
- redirect_uri: tokenParams.get('redirect_uri'),
- client_id: tokenParams.get('client_id'),
- token_endpoint: config.token_endpoint,
- });
-
- const tokenResponse = await fetch(config.token_endpoint, {
- method: 'POST',
- headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
- body: tokenParams,
- });
-
- if (!tokenResponse.ok) {
- const errorText = await tokenResponse.text();
- logger.error('Token exchange failed', new Error(errorText), {
- provider,
- redirect_uri: tokenParams.get('redirect_uri'),
- error: errorText,
- });
- throw new Error('Token exchange failed');
- }
-
- const tokens = await tokenResponse.json() as TokenExchangeResult;
-
- if (!tokens.id_token) {
- logger.error('No ID token received', new Error('Missing id_token'), { provider });
- throw new Error('No ID token received');
- }
-
- logger.info('Token exchange successful', { provider });
- return tokens;
-}
diff --git a/worker/src/handlers/oidc/user-manager.ts b/worker/src/handlers/oidc/user-manager.ts
deleted file mode 100644
index 998696fb..00000000
--- a/worker/src/handlers/oidc/user-manager.ts
+++ /dev/null
@@ -1,178 +0,0 @@
-import type { Env } from '../../types';
-import { generateId } from '../../utils/crypto';
-import { createLogger } from '../../utils/logger';
-
-const logger = createLogger('OIDC:UserManager');
-
-export interface UserData {
- id: string;
- email: string;
- account_status: string;
- encryption_salt: string | null;
-}
-
-export interface UserLookupResult {
- success: true;
- user: UserData;
- isNewUser: boolean;
-}
-
-export interface UserLookupError {
- success: false;
- error: string;
- statusCode: number;
-}
-
-export type UserLookupOutcome = UserLookupResult | UserLookupError;
-
-/**
- * Find existing user or create a new one
- * Handles race conditions for concurrent user creation
- */
-export async function findOrCreateUser(
- provider: string,
- providerUserId: string,
- email: string,
- env: Env
-): Promise {
- const now = Date.now();
-
- // Look up existing user by provider
- const existingUser = await env.DB.prepare(
- 'SELECT id, email, account_status, encryption_salt FROM users WHERE auth_provider = ? AND provider_user_id = ?'
- )
- .bind(provider, providerUserId)
- .first();
-
- if (existingUser) {
- return handleExistingUser(existingUser, now, env);
- }
-
- // User doesn't exist, create new one
- return createNewUser(provider, providerUserId, email, now, env);
-}
-
-async function handleExistingUser(
- user: Record,
- now: number,
- env: Env
-): Promise {
- if (user.account_status !== 'active') {
- return {
- success: false,
- error: 'Account is suspended or deleted',
- statusCode: 403,
- };
- }
-
- // Update last login
- await env.DB.prepare('UPDATE users SET last_login_at = ?, updated_at = ? WHERE id = ?')
- .bind(now, now, user.id)
- .run();
-
- return {
- success: true,
- user: {
- id: user.id as string,
- email: user.email as string,
- account_status: user.account_status as string,
- encryption_salt: (user.encryption_salt as string) || null,
- },
- isNewUser: false,
- };
-}
-
-async function createNewUser(
- provider: string,
- providerUserId: string,
- email: string,
- now: number,
- env: Env
-): Promise {
- // Check if email is already registered with a different provider
- const emailCollision = await checkEmailCollision(email, env);
- if (emailCollision) {
- return emailCollision;
- }
-
- try {
- const userId = generateId();
- await env.DB.prepare(
- `INSERT INTO users (id, email, auth_provider, provider_user_id, created_at, updated_at, account_status)
- VALUES (?, ?, ?, ?, ?, ?, 'active')`
- )
- .bind(userId, email, provider, providerUserId, now, now)
- .run();
-
- return {
- success: true,
- user: {
- id: userId,
- email,
- account_status: 'active',
- encryption_salt: null,
- },
- isNewUser: true,
- };
- } catch (error: unknown) {
- return handleUserCreationError(error, email, provider, env);
- }
-}
-
-async function checkEmailCollision(
- email: string,
- env: Env
-): Promise