Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 25 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
name: CI

on:
push:
branches: [main, master, 'liz/**']
pull_request:
branches: [main, master]

jobs:
test:
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [18, 20, 22]
steps:
- uses: actions/checkout@v4
- uses: pnpm/action-setup@v3
with:
version: 9
- uses: actions/setup-node@v4
with:
node-version: ${{ matrix.node-version }}
- run: pnpm install
- run: pnpm run typecheck
- run: pnpm test -- run
15 changes: 9 additions & 6 deletions examples/blob-crud/README.md
Original file line number Diff line number Diff line change
@@ -1,20 +1,23 @@
# Dexie Cloud SDK Example
# Dexie Cloud SDK — Blob CRUD Example

A Node.js example showing how to use the Dexie Cloud SDK for server-side data operations with blob support.
Server-side data operations with blob support using client credentials.

## What It Shows

- Authenticating with OTP
- Authenticating with client credentials (`clientId`/`clientSecret`)
- CRUD operations via REST API
- Uploading and downloading blobs
- Auto vs lazy blob handling modes
- Auto blob handling mode

## Run

```bash
npm install
# Set your database URL:

# Credentials from your dexie-cloud.key file:
export DEXIE_CLOUD_DB_URL=https://xxxxxxxx.dexie.cloud
export DEXIE_CLOUD_EMAIL=your@email.com
export DEXIE_CLOUD_CLIENT_ID=your-client-id
export DEXIE_CLOUD_CLIENT_SECRET=your-client-secret

npm start
```
38 changes: 15 additions & 23 deletions examples/blob-crud/src/main.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,51 +2,45 @@
* Dexie Cloud SDK — Blob CRUD Example
*
* Demonstrates server-side data operations with blob offloading.
* Uses client credentials (clientId/clientSecret from dexie-cloud.key).
*/

import { DexieCloudClient } from 'dexie-cloud-sdk';
import * as fs from 'fs';
import * as readline from 'readline/promises';

// Read credentials from dexie-cloud.key or environment
const DB_URL = process.env.DEXIE_CLOUD_DB_URL;
const EMAIL = process.env.DEXIE_CLOUD_EMAIL;
const CLIENT_ID = process.env.DEXIE_CLOUD_CLIENT_ID;
const CLIENT_SECRET = process.env.DEXIE_CLOUD_CLIENT_SECRET;

if (!DB_URL || !EMAIL) {
console.error('Set DEXIE_CLOUD_DB_URL and DEXIE_CLOUD_EMAIL environment variables');
if (!DB_URL || !CLIENT_ID || !CLIENT_SECRET) {
console.error(`Set environment variables:
DEXIE_CLOUD_DB_URL — Your database URL (from dexie-cloud.key)
DEXIE_CLOUD_CLIENT_ID — Client ID (from dexie-cloud.key)
DEXIE_CLOUD_CLIENT_SECRET — Client secret (from dexie-cloud.key)

Or source your dexie-cloud.key file directly.`);
process.exit(1);
}

async function main() {
const rl = readline.createInterface({ input: process.stdin, output: process.stdout });

// --- Initialize SDK ---

const client = new DexieCloudClient({
serviceUrl: 'https://dexie.cloud',
dbUrl: DB_URL,
clientId: CLIENT_ID,
clientSecret: CLIENT_SECRET,
blobHandling: 'auto' // Binary data handled transparently
});

console.log('🔑 Authenticating...');

// --- Authenticate ---

const { accessToken } = await client.auth.authenticateWithOTP(
EMAIL,
async () => {
const otp = await rl.question('Enter OTP from email: ');
return otp.trim();
},
['ACCESS_DB']
);

console.log('🔑 Authenticating with client credentials...');
const accessToken = await client.auth.getToken(['ACCESS_DB', 'GLOBAL_WRITE']);
console.log('✅ Authenticated!\n');

// --- Create item with binary data ---

console.log('📝 Creating item with binary data...');

// Create a sample image (or read from file)
const imageData = new Uint8Array(1024);
crypto.getRandomValues(imageData); // Random bytes for demo

Expand Down Expand Up @@ -92,8 +86,6 @@ async function main() {
console.log('🗑️ Cleaning up...');
await client.data.delete('files', item.id, accessToken);
console.log('✅ Done!\n');

rl.close();
}

main().catch(err => {
Expand Down
124 changes: 93 additions & 31 deletions src/blob.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,26 +9,55 @@ import type { HttpAdapter } from './adapters.js';
import type { BlobHandling, BlobRef } from './types.js';
import { DexieCloudError } from './types.js';

/**
* Minimum byte size for offloading a binary to blob storage.
* Binaries smaller than this threshold are kept inline (as base64).
* Must match the server-side threshold.
*/
export const BLOB_THRESHOLD = 4096;

/**
* Maximum number of concurrent blob downloads in _walkForRead.
* Mirrors the client-side MAX_CONCURRENT pattern.
*/
const MAX_CONCURRENT_DOWNLOADS = 6;

/** Generate a unique blob ID */
function generateBlobId(): string {
if (typeof crypto !== 'undefined' && typeof crypto.randomUUID === 'function') {
return crypto.randomUUID().replace(/-/g, '');
}
// Fallback: timestamp + random hex
return Date.now().toString(16) + Math.random().toString(16).slice(2);
// Fallback: use getRandomValues for strong entropy
if (typeof crypto !== 'undefined' && typeof crypto.getRandomValues === 'function') {
const bytes = new Uint8Array(16);
crypto.getRandomValues(bytes);
return Array.from(bytes)
.map((b) => b.toString(16).padStart(2, '0'))
.join('');
}
// Last resort (non-browser, non-Node env): still better than Math.random alone
const ts = Date.now().toString(16);
const rand = Math.floor(Math.random() * 0xffffffff).toString(16).padStart(8, '0');
return ts + rand;
}

/** Convert Blob/ArrayBuffer/TypedArray to Uint8Array */
async function toUint8Array(data: Uint8Array | Blob | ArrayBuffer): Promise<Uint8Array> {
/**
* Convert Blob/ArrayBuffer/TypedArray/DataView to Uint8Array.
* Accepts any ArrayBufferView (TypedArrays + DataView) as well as
* Uint8Array, ArrayBuffer, and Blob.
*/
async function toUint8Array(
data: Uint8Array | Blob | ArrayBuffer | ArrayBufferView
): Promise<Uint8Array> {
if (data instanceof Uint8Array) return data;
if (data instanceof ArrayBuffer) return new Uint8Array(data);
if (typeof Blob !== 'undefined' && data instanceof Blob) {
const buf = await data.arrayBuffer();
return new Uint8Array(buf);
}
// TypedArray (e.g. Int8Array, etc.)
// Handles all TypedArrays (Int8Array, Float32Array, etc.) and DataView
if (ArrayBuffer.isView(data)) {
return new Uint8Array((data as ArrayBufferView).buffer);
return new Uint8Array(data.buffer, data.byteOffset, data.byteLength);
}
throw new TypeError('Unsupported data type for blob upload');
}
Expand Down Expand Up @@ -92,7 +121,7 @@ export class BlobManager {
* Returns the blob ref (e.g. "1:abc123...").
*/
async upload(
data: Uint8Array | Blob | ArrayBuffer,
data: Uint8Array | Blob | ArrayBuffer | ArrayBufferView,
token: string,
contentType = 'application/octet-stream'
): Promise<string> {
Expand Down Expand Up @@ -125,14 +154,20 @@ export class BlobManager {
const parsed = JSON.parse(text);
if (parsed?.ref) return parsed.ref as string;
} catch {
// ignore parse errors, construct ref ourselves
// ignore parse errors, fall through
}
// If server returned "version:blobId" directly
if (text.includes(':')) return text.trim();
}

// Fallback: assume version 1
return `1:${blobId}`;
// Server response was unparseable — we cannot safely construct a ref
// because we don't know the server-assigned version.
throw new DexieCloudError(
`Blob upload succeeded (HTTP ${response.status}) but server returned no parseable ref. ` +
`Cannot construct a safe blob reference without the server-assigned version.`,
response.status,
text
);
}

/**
Expand Down Expand Up @@ -160,8 +195,9 @@ export class BlobManager {
}

/**
* Process an object before uploading: find inline blobs, upload them,
* replace with BlobRefs. Only active in 'auto' mode.
* Process an object before uploading: find inline blobs large enough to
* offload (≥ BLOB_THRESHOLD bytes), upload them, replace with BlobRefs.
* Small binaries are left inline. Only active in 'auto' mode.
*/
async processForUpload(obj: any, token: string): Promise<any> {
if (this.mode !== 'auto') return obj;
Expand All @@ -179,8 +215,13 @@ export class BlobManager {

private async _walkForUpload(val: any, token: string): Promise<any> {
if (isInlineBlob(val)) {
// Upload inline blob, replace with BlobRef
const bytes = base64ToUint8Array(val.v);
// Only offload to blob storage if the binary meets the size threshold.
// Small binaries are cheaper to keep inline than to round-trip through
// the blob endpoint.
if (bytes.length < BLOB_THRESHOLD) {
return val; // keep as-is
}
const contentType = val.ct ?? 'application/octet-stream';
const ref = await this.upload(bytes, token, contentType);
const blobRef: BlobRef = {
Expand All @@ -193,27 +234,21 @@ export class BlobManager {
}

if (Array.isArray(val)) {
const results: any[] = [];
for (const item of val) {
results.push(await this._walkForUpload(item, token));
}
return results;
return Promise.all(val.map((item) => this._walkForUpload(item, token)));
}

if (val !== null && typeof val === 'object') {
const result: Record<string, any> = {};
for (const [k, v] of Object.entries(val)) {
result[k] = await this._walkForUpload(v, token);
}
return result;
const entries = await Promise.all(
Object.entries(val).map(async ([k, v]) => [k, await this._walkForUpload(v, token)] as const)
);
return Object.fromEntries(entries);
}

return val;
}

private async _walkForRead(val: any, token: string): Promise<any> {
if (isBlobRef(val)) {
// Download and replace with inline
const { data, contentType } = await this.download(val.ref, token);
return {
_bt: val._bt,
Expand All @@ -223,21 +258,48 @@ export class BlobManager {
}

if (Array.isArray(val)) {
const results: any[] = [];
for (const item of val) {
results.push(await this._walkForRead(item, token));
}
return results;
// Download up to MAX_CONCURRENT_DOWNLOADS blobs in parallel
return this._parallelMap(val, (item) => this._walkForRead(item, token));
}

if (val !== null && typeof val === 'object') {
const keys = Object.keys(val);
const resolvedValues = await this._parallelMap(
keys,
(k) => this._walkForRead(val[k], token)
);
const result: Record<string, any> = {};
for (const [k, v] of Object.entries(val)) {
result[k] = await this._walkForRead(v, token);
for (let i = 0; i < keys.length; i++) {
result[keys[i]!] = resolvedValues[i];
}
return result;
}

return val;
}

/**
* Like Promise.all but with a concurrency cap.
*/
private async _parallelMap<T, R>(
items: T[],
fn: (item: T) => Promise<R>
): Promise<R[]> {
const results: R[] = new Array(items.length);
let index = 0;

async function worker() {
while (index < items.length) {
const i = index++;
results[i] = await fn(items[i]!);
}
}

const workers = Array.from(
{ length: Math.min(MAX_CONCURRENT_DOWNLOADS, items.length) },
() => worker()
);
await Promise.all(workers);
return results;
}
}
3 changes: 2 additions & 1 deletion src/client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,9 @@ export class DexieCloudClient {

// Use dbUrl if provided, otherwise fall back to serviceUrl
const dbUrl = fullConfig.dbUrl ?? fullConfig.serviceUrl;
this.data = new DataManager(dbUrl, this.http);
this.blobs = new BlobManager(dbUrl, this.http, fullConfig.blobHandling ?? 'auto');
// Pass BlobManager to DataManager so create/get/list auto-process blobs
this.data = new DataManager(dbUrl, this.http, this.blobs);
}

/**
Expand Down
Loading