Skip to content
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changeset/seven-fireants-boil.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
'@powersync/node': minor
---

Pre-package all the PowerSync Rust extension binaries for all supported platforms in the NPM package. Install scripts are no longer required to download the PowerSync core.
7 changes: 4 additions & 3 deletions demos/example-electron-node/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,9 @@ In particular:
queries. This worker is part of the `@powersync/node` package and wouldn't be copied into the resulting Electron
app by default. For this reason, this example has its own `src/main/worker.ts` loaded with `new URL('./worker.ts', import.meta.url)`.
2. In addition to the worker, PowerSync requires access to a SQLite extension providing sync functionality.
This file is also part of the `@powersync/node` package and called `powersync.dll`, `libpowersync.dylib` or
`libpowersync.so` depending on the operating system.
This file is also part of the `@powersync/node` package and is the prebuilt release asset (for example
`powersync_x64.dll`, `libpowersync_x64.dylib` or `libpowersync_x64.so`) depending on the operating system and
architecture.
We use the `copy-webpack-plugin` package to make sure a copy of that file is available to the main process,
and load it in the custom `src/main/worker.ts`.
3. The `get()` and `getAll()` methods are exposed to the renderer process with an IPC channel.
Expand All @@ -21,7 +22,7 @@ To see it in action:
2. Copy `.env.local.template` to `.env.local`, and complete the environment variables. You can generate a [temporary development token](https://docs.powersync.com/usage/installation/authentication-setup/development-tokens), or leave blank to test with local-only data.
The example works with the schema from the [PowerSync + Supabase tutorial](https://docs.powersync.com/integration-guides/supabase-+-powersync#supabase-powersync).
3. `cd` into this directory. In this mono-repo, you'll have to run `./node_modules/.bin/electron-rebuild` once to make sure `@powersync/better-sqlite3` was compiled with Electron's toolchain.
3. Finally, run `pnpm start`.
4. Finally, run `pnpm start`.

Apart from the build setup, this example is purposefully kept simple.
To make sure PowerSync is working, you can run `await powersync.get('SELECT powersync_rs_version()');` in the DevTools
Expand Down
29 changes: 9 additions & 20 deletions demos/example-electron-node/config.ts
Original file line number Diff line number Diff line change
@@ -1,18 +1,17 @@
import OS from 'node:os';
import path from 'node:path';

import type { ForgeConfig } from '@electron-forge/shared-types';
import { MakerSquirrel } from '@electron-forge/maker-squirrel';
import { MakerZIP } from '@electron-forge/maker-zip';
import { MakerDeb } from '@electron-forge/maker-deb';
import { MakerRpm } from '@electron-forge/maker-rpm';
import { MakerSquirrel } from '@electron-forge/maker-squirrel';
import { MakerZIP } from '@electron-forge/maker-zip';
import { AutoUnpackNativesPlugin } from '@electron-forge/plugin-auto-unpack-natives';
import { WebpackPlugin } from '@electron-forge/plugin-webpack';
import { type Configuration, type ModuleOptions, type DefinePlugin } from 'webpack';
import type { ForgeConfig } from '@electron-forge/shared-types';
import { getPowerSyncExtensionFilename } from '@powersync/node/worker.js';
import type ICopyPlugin from 'copy-webpack-plugin';
import * as dotenv from 'dotenv';
import type IForkTsCheckerWebpackPlugin from 'fork-ts-checker-webpack-plugin';
import type ICopyPlugin from 'copy-webpack-plugin';

import { type Configuration, type DefinePlugin, type ModuleOptions } from 'webpack';
dotenv.config({ path: '.env.local' });

const ForkTsCheckerWebpackPlugin: typeof IForkTsCheckerWebpackPlugin = require('fork-ts-checker-webpack-plugin');
Expand Down Expand Up @@ -57,17 +56,7 @@ const defaultWebpackRules: () => Required<ModuleOptions>['rules'] = () => {
];
};

const platform = OS.platform();
let extensionPath: string;
if (platform === 'win32') {
extensionPath = 'powersync.dll';
} else if (platform === 'linux') {
extensionPath = 'libpowersync.so';
} else if (platform === 'darwin') {
extensionPath = 'libpowersync.dylib';
} else {
throw 'Unknown platform, PowerSync for Node.js currently supports Windows, Linux and macOS.';
}
let extensionFilename = getPowerSyncExtensionFilename();

const mainConfig: Configuration = {
/**
Expand All @@ -84,8 +73,8 @@ const mainConfig: Configuration = {
new CopyPlugin({
patterns: [
{
from: path.resolve(require.resolve('@powersync/node/package.json'), `../lib/${extensionPath}`),
to: path.join('powersync', extensionPath)
from: path.resolve(require.resolve('@powersync/node/package.json'), `../lib/${extensionFilename}`),
to: path.join('powersync', extensionFilename)
}
]
}),
Expand Down
19 changes: 4 additions & 15 deletions demos/example-electron-node/src/main/worker.ts
Original file line number Diff line number Diff line change
@@ -1,25 +1,14 @@
import * as path from 'node:path';
import OS from 'node:os';
import Database from 'better-sqlite3';
import * as path from 'node:path';

import { startPowerSyncWorker } from '@powersync/node/worker.js';
import { getPowerSyncExtensionFilename, startPowerSyncWorker } from '@powersync/node/worker.js';

function resolvePowerSyncCoreExtension() {
const platform = OS.platform();
let extensionPath: string;
if (platform === 'win32') {
extensionPath = 'powersync.dll';
} else if (platform === 'linux') {
extensionPath = 'libpowersync.so';
} else if (platform === 'darwin') {
extensionPath = 'libpowersync.dylib';
} else {
throw 'Unknown platform, PowerSync for Node.js currently supports Windows, Linux and macOS.';
}
const extensionFilename = getPowerSyncExtensionFilename();

// This example uses copy-webpack-plugin to copy the prebuilt library over. This ensures that it is
// available in packaged release builds.
let libraryPath = path.resolve(__dirname, 'powersync', extensionPath);
let libraryPath = path.resolve(__dirname, 'powersync', extensionFilename);

if (__dirname.indexOf('app.asar') != -1) {
// Our build configuration ensures the extension is always available outside of the archive too.
Expand Down
102 changes: 70 additions & 32 deletions packages/node/download_core.js
Original file line number Diff line number Diff line change
@@ -1,11 +1,8 @@
// TODO: Make this a pre-publish hook and just bundle everything
import { createHash } from 'node:crypto';
import * as OS from 'node:os';
import * as fs from 'node:fs/promises';
import * as path from 'node:path';
import { Readable } from 'node:stream';
import { finished } from 'node:stream/promises';
import { exit } from 'node:process';

// When changing this version, run node download_core.js update_hashes
const version = '0.4.6';
Expand All @@ -17,33 +14,17 @@ const versionHashes = {
'libpowersync_aarch64.dylib': 'bfb4f1ec207b298aff560f1825f8123d24316edaa27b6df3a17dd49466576b92'
};

const platform = OS.platform();
let destination;
let asset;

if (platform === 'win32') {
asset = 'powersync_x64.dll';
destination = 'powersync.dll';
} else if (platform === 'linux') {
asset = OS.arch() === 'x64' ? 'libpowersync_x64.so' : 'libpowersync_aarch64.so';
destination = 'libpowersync.so';
} else if (platform === 'darwin') {
asset = OS.arch() === 'x64' ? 'libpowersync_x64.dylib' : 'libpowersync_aarch64.dylib';
destination = 'libpowersync.dylib';
}

const expectedHash = versionHashes[asset];
const destinationPath = path.resolve('lib', destination);
const assets = Object.keys(versionHashes);

const hashStream = async (input) => {
for await (const chunk of input.pipe(createHash('sha256')).setEncoding('hex')) {
return chunk;
}
};

const hashLocal = async () => {
const hashLocal = async (filePath) => {
try {
const handle = await fs.open(destinationPath, 'r');
const handle = await fs.open(filePath, 'r');
const input = handle.createReadStream();

const result = await hashStream(input);
Expand All @@ -54,31 +35,88 @@ const hashLocal = async () => {
}
};

const download = async () => {
if ((await hashLocal()) == expectedHash) {
console.debug('Local copy is up-to-date, skipping download');
exit(0);
const downloadAsset = async (asset) => {
const destinationPath = path.resolve('lib', asset);
const expectedHash = versionHashes[asset];

// Check if file exists and has correct hash
const currentHash = await hashLocal(destinationPath);
if (currentHash == expectedHash) {
console.debug(`${asset} is up-to-date, skipping download`);
return;
}

const url = `https://github.com/powersync-ja/powersync-sqlite-core/releases/download/v${version}/${asset}`;
console.log(`Downloading ${url}`);
const response = await fetch(url);
if (response.status != 200) {
throw `Could not download ${url}`;
}

const file = await fs.open(destinationPath, 'w');
await finished(Readable.fromWeb(response.body).pipe(file.createWriteStream()));
await file.close();

const hashAfterDownloading = await hashLocal(destinationPath);
if (hashAfterDownloading != expectedHash) {
throw `Unexpected hash after downloading ${asset} (got ${hashAfterDownloading}, expected ${expectedHash})`;
}
console.log(`Successfully downloaded ${asset}`);
};

const checkAsset = async (asset) => {
const destinationPath = path.resolve('lib', asset);
const expectedHash = versionHashes[asset];
const currentHash = await hashLocal(destinationPath);

return {
asset,
destinationPath,
expectedHash,
currentHash,
exists: currentHash !== null,
isValid: currentHash == expectedHash
};
};

const download = async () => {
try {
await fs.access('lib');
} catch {
await fs.mkdir('lib');
}

const file = await fs.open(destinationPath, 'w');
await finished(Readable.fromWeb(response.body).pipe(file.createWriteStream()));
await file.close();
// First check all assets
console.log('Checking existing files...');
const checks = await Promise.all(assets.map((asset) => checkAsset(asset, asset)));

const hashAfterDownloading = await hashLocal();
if (hashAfterDownloading != expectedHash) {
throw `Unexpected hash after downloading (got ${hashAfterDownloading}, expected ${expectedHash})`;
const toDownload = checks.filter((check) => !check.isValid);
const upToDate = checks.filter((check) => check.isValid);

// Print summary
if (upToDate.length > 0) {
console.log('\nUp-to-date files:');
for (const check of upToDate) {
console.log(` ✓ ${check.asset}`);
}
}

if (toDownload.length > 0) {
console.log('\nFiles to download:');
for (const check of toDownload) {
if (!check.exists) {
console.log(` • ${check.asset} (missing)`);
} else {
console.log(` • ${check.asset} (hash mismatch)`);
}
}

console.log('\nStarting downloads...');
await Promise.all(toDownload.map((check) => downloadAsset(check.asset)));

console.log('\nAll downloads completed successfully!');
} else {
console.log('\nAll files are up-to-date, nothing to download.');
}
};

Expand Down
8 changes: 4 additions & 4 deletions packages/node/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,12 @@
"download_core.js"
],
"scripts": {
"install": "node download_core.js",
"build": "tsc -b && rollup --config",
"build:prod": "tsc -b --sourceMap false && rollup --config",
"prepare:core": "node download_core.js",
"build": " pnpm prepare:core && tsc -b && rollup --config",
"build:prod": "pnpm prepare:core && tsc -b --sourceMap false && rollup --config",
"clean": "rm -rf lib dist tsconfig.tsbuildinfo",
"watch": "tsc -b -w",
"test": "vitest",
"test": " pnpm prepare:core && vitest",
"test:exports": "attw --pack . --ignore-rules no-resolution"
},
"type": "module",
Expand Down
6 changes: 3 additions & 3 deletions packages/node/src/db/NodeSqliteWorker.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
import { threadId } from 'node:worker_threads';
import type { DatabaseSync } from 'node:sqlite';
import { threadId } from 'node:worker_threads';

import { dynamicImport } from '../utils/modules.js';
import { AsyncDatabase, AsyncDatabaseOpenOptions } from './AsyncDatabase.js';
import { PowerSyncWorkerOptions } from './SqliteWorker.js';
import { dynamicImport } from '../utils/modules.js';

class BlockingNodeDatabase implements AsyncDatabase {
private readonly db: DatabaseSync;
Expand Down Expand Up @@ -57,7 +57,7 @@ export async function openDatabase(worker: PowerSyncWorkerOptions, options: Asyn
const { DatabaseSync } = await dynamicImport('node:sqlite');

const baseDB = new DatabaseSync(options.path, { allowExtension: true });
baseDB.loadExtension(worker.extensionPath());
baseDB.loadExtension(worker.extensionPath(), 'sqlite3_powersync_init');

return new BlockingNodeDatabase(baseDB, options.isWriter);
}
66 changes: 46 additions & 20 deletions packages/node/src/db/SqliteWorker.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
import * as path from 'node:path';
import * as Comlink from 'comlink';
import { parentPort } from 'node:worker_threads';
import OS from 'node:os';
import * as path from 'node:path';
import url from 'node:url';
import { parentPort } from 'node:worker_threads';
import { dynamicImport, isBundledToCommonJs } from '../utils/modules.js';
import { AsyncDatabase, AsyncDatabaseOpener, AsyncDatabaseOpenOptions } from './AsyncDatabase.js';
import { openDatabase as openBetterSqliteDatabase } from './BetterSqliteWorker.js';
import { openDatabase as openNodeDatabase } from './NodeSqliteWorker.js';
import { AsyncDatabase, AsyncDatabaseOpener, AsyncDatabaseOpenOptions } from './AsyncDatabase.js';
import { isBundledToCommonJs } from '../utils/modules.js';
import { dynamicImport } from '../utils/modules.js';

export interface PowerSyncWorkerOptions {
/**
Expand All @@ -23,28 +22,55 @@ export interface PowerSyncWorkerOptions {
loadBetterSqlite3: () => Promise<any>;
}

/**
* @returns The relevant PowerSync extension binary filename for the current platform and architecture
*/
export function getPowerSyncExtensionFilename() {
const platform = OS.platform();
const arch = OS.arch();
let extensionFile: string;

if (platform == 'win32') {
if (arch == 'x64') {
extensionFile = 'powersync_x64.dll';
} else {
throw new Error('Windows platform only supports x64 architecture.');
}
} else if (platform == 'linux') {
if (arch == 'x64') {
extensionFile = 'libpowersync_x64.so';
} else if (arch == 'arm64') {
extensionFile = 'libpowersync_aarch64.so';
} else {
throw new Error('Linux platform only supports x64 and arm64 architectures.');
}
} else if (platform == 'darwin') {
if (arch == 'x64') {
extensionFile = 'libpowersync_x64.dylib';
} else if (arch == 'arm64') {
extensionFile = 'libpowersync_aarch64.dylib';
} else {
throw new Error('macOS platform only supports x64 and arm64 architectures.');
}
} else {
throw new Error(
`Unknown platform: ${platform}, PowerSync for Node.js currently supports Windows, Linux and macOS.`
);
}

return extensionFile;
}

export function startPowerSyncWorker(options?: Partial<PowerSyncWorkerOptions>) {
const resolvedOptions: PowerSyncWorkerOptions = {
extensionPath() {
const isCommonJsModule = isBundledToCommonJs;

const platform = OS.platform();
let extensionPath: string;
if (platform === 'win32') {
extensionPath = 'powersync.dll';
} else if (platform === 'linux') {
extensionPath = 'libpowersync.so';
} else if (platform === 'darwin') {
extensionPath = 'libpowersync.dylib';
} else {
throw 'Unknown platform, PowerSync for Node.js currently supports Windows, Linux and macOS.';
}

const extensionFilename = getPowerSyncExtensionFilename();
let resolved: string;
if (isCommonJsModule) {
resolved = path.resolve(__dirname, '../lib/', extensionPath);
resolved = path.resolve(__dirname, '../lib/', extensionFilename);
} else {
resolved = url.fileURLToPath(new URL(`../${extensionPath}`, import.meta.url));
resolved = url.fileURLToPath(new URL(`../${extensionFilename}`, import.meta.url));
}

return resolved;
Expand Down
Loading
Loading