Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 29 additions & 0 deletions .changeset/seven-fireants-boil.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
---
'@powersync/node': minor
---

Pre-package all the PowerSync Rust extension binaries for all supported platforms and architectures in the NPM package `lib` folder. Install scripts are no longer required to download the PowerSync core.

The binary files relevant to a specific architecture now have updated filenames. Custom code which previously referenced binary filenames requires updating. A helper function is available to automatically provide the correct filename.

```diff
+ import { getPowerSyncExtensionFilename } from '@powersync/node/worker.js';

function resolvePowerSyncCoreExtension() {
- const platform = OS.platform();
- let extensionPath: string;
- if (platform === 'win32') {
- extensionPath = 'powersync.dll';
- } else if (platform === 'linux') {
- extensionPath = 'libpowersync.so';
- } else if (platform === 'darwin') {
- extensionPath = 'libpowersync.dylib';
- } else {
- throw 'Unknown platform, PowerSync for Node.js currently supports Windows, Linux and macOS.';
- }
+ const extensionPath = getPowerSyncExtensionFilename();

// This example uses copy-webpack-plugin to copy the prebuilt library over. This ensures that it is
// available in packaged release builds.
let libraryPath = path.resolve(__dirname, 'powersync', extensionPath);
```
7 changes: 4 additions & 3 deletions demos/example-electron-node/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,9 @@ In particular:
queries. This worker is part of the `@powersync/node` package and wouldn't be copied into the resulting Electron
app by default. For this reason, this example has its own `src/main/worker.ts` loaded with `new URL('./worker.ts', import.meta.url)`.
2. In addition to the worker, PowerSync requires access to a SQLite extension providing sync functionality.
This file is also part of the `@powersync/node` package and called `powersync.dll`, `libpowersync.dylib` or
`libpowersync.so` depending on the operating system.
This file is also part of the `@powersync/node` package and is the prebuilt release asset (for example
`powersync_x64.dll`, `libpowersync_x64.dylib` or `libpowersync_x64.so`) depending on the operating system and
architecture.
We use the `copy-webpack-plugin` package to make sure a copy of that file is available to the main process,
and load it in the custom `src/main/worker.ts`.
3. The `get()` and `getAll()` methods are exposed to the renderer process with an IPC channel.
Expand All @@ -21,7 +22,7 @@ To see it in action:
2. Copy `.env.local.template` to `.env.local`, and complete the environment variables. You can generate a [temporary development token](https://docs.powersync.com/usage/installation/authentication-setup/development-tokens), or leave blank to test with local-only data.
The example works with the schema from the [PowerSync + Supabase tutorial](https://docs.powersync.com/integration-guides/supabase-+-powersync#supabase-powersync).
3. `cd` into this directory. In this mono-repo, you'll have to run `./node_modules/.bin/electron-rebuild` once to make sure `@powersync/better-sqlite3` was compiled with Electron's toolchain.
3. Finally, run `pnpm start`.
4. Finally, run `pnpm start`.

Apart from the build setup, this example is purposefully kept simple.
To make sure PowerSync is working, you can run `await powersync.get('SELECT powersync_rs_version()');` in the DevTools
Expand Down
29 changes: 9 additions & 20 deletions demos/example-electron-node/config.ts
Original file line number Diff line number Diff line change
@@ -1,18 +1,17 @@
import OS from 'node:os';
import path from 'node:path';

import type { ForgeConfig } from '@electron-forge/shared-types';
import { MakerSquirrel } from '@electron-forge/maker-squirrel';
import { MakerZIP } from '@electron-forge/maker-zip';
import { MakerDeb } from '@electron-forge/maker-deb';
import { MakerRpm } from '@electron-forge/maker-rpm';
import { MakerSquirrel } from '@electron-forge/maker-squirrel';
import { MakerZIP } from '@electron-forge/maker-zip';
import { AutoUnpackNativesPlugin } from '@electron-forge/plugin-auto-unpack-natives';
import { WebpackPlugin } from '@electron-forge/plugin-webpack';
import { type Configuration, type ModuleOptions, type DefinePlugin } from 'webpack';
import type { ForgeConfig } from '@electron-forge/shared-types';
import { getPowerSyncExtensionFilename } from '@powersync/node/worker.js';
import type ICopyPlugin from 'copy-webpack-plugin';
import * as dotenv from 'dotenv';
import type IForkTsCheckerWebpackPlugin from 'fork-ts-checker-webpack-plugin';
import type ICopyPlugin from 'copy-webpack-plugin';

import { type Configuration, type DefinePlugin, type ModuleOptions } from 'webpack';
dotenv.config({ path: '.env.local' });

const ForkTsCheckerWebpackPlugin: typeof IForkTsCheckerWebpackPlugin = require('fork-ts-checker-webpack-plugin');
Expand Down Expand Up @@ -57,17 +56,7 @@ const defaultWebpackRules: () => Required<ModuleOptions>['rules'] = () => {
];
};

const platform = OS.platform();
let extensionPath: string;
if (platform === 'win32') {
extensionPath = 'powersync.dll';
} else if (platform === 'linux') {
extensionPath = 'libpowersync.so';
} else if (platform === 'darwin') {
extensionPath = 'libpowersync.dylib';
} else {
throw 'Unknown platform, PowerSync for Node.js currently supports Windows, Linux and macOS.';
}
let extensionFilename = getPowerSyncExtensionFilename();

const mainConfig: Configuration = {
/**
Expand All @@ -84,8 +73,8 @@ const mainConfig: Configuration = {
new CopyPlugin({
patterns: [
{
from: path.resolve(require.resolve('@powersync/node/package.json'), `../lib/${extensionPath}`),
to: path.join('powersync', extensionPath)
from: path.resolve(require.resolve('@powersync/node/package.json'), `../lib/${extensionFilename}`),
to: path.join('powersync', extensionFilename)
}
]
}),
Expand Down
19 changes: 4 additions & 15 deletions demos/example-electron-node/src/main/worker.ts
Original file line number Diff line number Diff line change
@@ -1,25 +1,14 @@
import * as path from 'node:path';
import OS from 'node:os';
import Database from 'better-sqlite3';
import * as path from 'node:path';

import { startPowerSyncWorker } from '@powersync/node/worker.js';
import { getPowerSyncExtensionFilename, startPowerSyncWorker } from '@powersync/node/worker.js';

function resolvePowerSyncCoreExtension() {
const platform = OS.platform();
let extensionPath: string;
if (platform === 'win32') {
extensionPath = 'powersync.dll';
} else if (platform === 'linux') {
extensionPath = 'libpowersync.so';
} else if (platform === 'darwin') {
extensionPath = 'libpowersync.dylib';
} else {
throw 'Unknown platform, PowerSync for Node.js currently supports Windows, Linux and macOS.';
}
const extensionFilename = getPowerSyncExtensionFilename();

// This example uses copy-webpack-plugin to copy the prebuilt library over. This ensures that it is
// available in packaged release builds.
let libraryPath = path.resolve(__dirname, 'powersync', extensionPath);
let libraryPath = path.resolve(__dirname, 'powersync', extensionFilename);

if (__dirname.indexOf('app.asar') != -1) {
// Our build configuration ensures the extension is always available outside of the archive too.
Expand Down
107 changes: 75 additions & 32 deletions packages/node/download_core.js
Original file line number Diff line number Diff line change
@@ -1,49 +1,35 @@
// TODO: Make this a pre-publish hook and just bundle everything
import { createHash } from 'node:crypto';
import * as OS from 'node:os';
import * as fs from 'node:fs/promises';
import * as path from 'node:path';
import { Readable } from 'node:stream';
import { finished } from 'node:stream/promises';
import { exit } from 'node:process';

// When changing this version, run node download_core.js update_hashes
const version = '0.4.6';
const versionHashes = {
'powersync_x64.dll': '5efaa9ad4975094912a36843cb7b503376cacd233d21ae0956f0f4b42dcb457b',
'powersync_x86.dll': '4151ba8aa6f024b50b7aebe52ba59f2c5be54e3fed26f7f3f48e1127dcda027d',
'powersync_aarch64.dll': '3abe46074432593ff5cfc2098b186c592f020c5cfa81285f8e49962732a94bf5',
'libpowersync_x86.so': '1321a7de13fda0b2de7d2bc231a68cb5691f84010f3858e5cf02e47f88ba6f4a',
'libpowersync_x64.so': 'e9d78620d69d3cf7d57353891fe0bf85b79d326b42c4669b9500b9e610388f76',
'libpowersync_aarch64.so': '0d84c0dc0134fc89af65724d11e2c45e3c15569c575ecda52d0ec2fa2aeec495',
'libpowersync_armv7.so': 'c7887181ce9c524b68a7ac284ab447b8584511c87527ca26186e5874bf9ba3d6',
'libpowersync_riscv64gc.so': 'a89f3a71f22f707707d97517e9310e42e2a57dc5343cee08d09002a8cea048d5',
'libpowersync_x64.dylib': '9b484eaf361451f7758ca6ad53190a73563be930a8f8a39ccefd29390046ef6c',
'libpowersync_aarch64.dylib': 'bfb4f1ec207b298aff560f1825f8123d24316edaa27b6df3a17dd49466576b92'
};

const platform = OS.platform();
let destination;
let asset;

if (platform === 'win32') {
asset = 'powersync_x64.dll';
destination = 'powersync.dll';
} else if (platform === 'linux') {
asset = OS.arch() === 'x64' ? 'libpowersync_x64.so' : 'libpowersync_aarch64.so';
destination = 'libpowersync.so';
} else if (platform === 'darwin') {
asset = OS.arch() === 'x64' ? 'libpowersync_x64.dylib' : 'libpowersync_aarch64.dylib';
destination = 'libpowersync.dylib';
}

const expectedHash = versionHashes[asset];
const destinationPath = path.resolve('lib', destination);
const assets = Object.keys(versionHashes);

const hashStream = async (input) => {
for await (const chunk of input.pipe(createHash('sha256')).setEncoding('hex')) {
return chunk;
}
};

const hashLocal = async () => {
const hashLocal = async (filePath) => {
try {
const handle = await fs.open(destinationPath, 'r');
const handle = await fs.open(filePath, 'r');
const input = handle.createReadStream();

const result = await hashStream(input);
Expand All @@ -54,31 +40,88 @@ const hashLocal = async () => {
}
};

const download = async () => {
if ((await hashLocal()) == expectedHash) {
console.debug('Local copy is up-to-date, skipping download');
exit(0);
const downloadAsset = async (asset) => {
const destinationPath = path.resolve('lib', asset);
const expectedHash = versionHashes[asset];

// Check if file exists and has correct hash
const currentHash = await hashLocal(destinationPath);
if (currentHash == expectedHash) {
console.debug(`${asset} is up-to-date, skipping download`);
return;
}

const url = `https://github.com/powersync-ja/powersync-sqlite-core/releases/download/v${version}/${asset}`;
console.log(`Downloading ${url}`);
const response = await fetch(url);
if (response.status != 200) {
throw `Could not download ${url}`;
}

const file = await fs.open(destinationPath, 'w');
await finished(Readable.fromWeb(response.body).pipe(file.createWriteStream()));
await file.close();

const hashAfterDownloading = await hashLocal(destinationPath);
if (hashAfterDownloading != expectedHash) {
throw `Unexpected hash after downloading ${asset} (got ${hashAfterDownloading}, expected ${expectedHash})`;
}
console.log(`Successfully downloaded ${asset}`);
};

const checkAsset = async (asset) => {
const destinationPath = path.resolve('lib', asset);
const expectedHash = versionHashes[asset];
const currentHash = await hashLocal(destinationPath);

return {
asset,
destinationPath,
expectedHash,
currentHash,
exists: currentHash !== null,
isValid: currentHash == expectedHash
};
};

const download = async () => {
try {
await fs.access('lib');
} catch {
await fs.mkdir('lib');
}

const file = await fs.open(destinationPath, 'w');
await finished(Readable.fromWeb(response.body).pipe(file.createWriteStream()));
await file.close();
// First check all assets
console.log('Checking existing files...');
const checks = await Promise.all(assets.map((asset) => checkAsset(asset, asset)));

const hashAfterDownloading = await hashLocal();
if (hashAfterDownloading != expectedHash) {
throw `Unexpected hash after downloading (got ${hashAfterDownloading}, expected ${expectedHash})`;
const toDownload = checks.filter((check) => !check.isValid);
const upToDate = checks.filter((check) => check.isValid);

// Print summary
if (upToDate.length > 0) {
console.log('\nUp-to-date files:');
for (const check of upToDate) {
console.log(` ✓ ${check.asset}`);
}
}

if (toDownload.length > 0) {
console.log('\nFiles to download:');
for (const check of toDownload) {
if (!check.exists) {
console.log(` • ${check.asset} (missing)`);
} else {
console.log(` • ${check.asset} (hash mismatch)`);
}
}

console.log('\nStarting downloads...');
await Promise.all(toDownload.map((check) => downloadAsset(check.asset)));

console.log('\nAll downloads completed successfully!');
} else {
console.log('\nAll files are up-to-date, nothing to download.');
}
};

Expand Down
8 changes: 4 additions & 4 deletions packages/node/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,12 @@
"download_core.js"
],
"scripts": {
"install": "node download_core.js",
"build": "tsc -b && rollup --config",
"build:prod": "tsc -b --sourceMap false && rollup --config",
"prepare:core": "node download_core.js",
"build": " pnpm prepare:core && tsc -b && rollup --config",
"build:prod": "pnpm prepare:core && tsc -b --sourceMap false && rollup --config",
"clean": "rm -rf lib dist tsconfig.tsbuildinfo",
"watch": "tsc -b -w",
"test": "vitest",
"test": " pnpm prepare:core && vitest",
"test:exports": "attw --pack . --ignore-rules no-resolution"
},
"type": "module",
Expand Down
6 changes: 3 additions & 3 deletions packages/node/src/db/NodeSqliteWorker.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
import { threadId } from 'node:worker_threads';
import type { DatabaseSync } from 'node:sqlite';
import { threadId } from 'node:worker_threads';

import { dynamicImport } from '../utils/modules.js';
import { AsyncDatabase, AsyncDatabaseOpenOptions } from './AsyncDatabase.js';
import { PowerSyncWorkerOptions } from './SqliteWorker.js';
import { dynamicImport } from '../utils/modules.js';

class BlockingNodeDatabase implements AsyncDatabase {
private readonly db: DatabaseSync;
Expand Down Expand Up @@ -57,7 +57,7 @@ export async function openDatabase(worker: PowerSyncWorkerOptions, options: Asyn
const { DatabaseSync } = await dynamicImport('node:sqlite');

const baseDB = new DatabaseSync(options.path, { allowExtension: true });
baseDB.loadExtension(worker.extensionPath());
baseDB.loadExtension(worker.extensionPath(), 'sqlite3_powersync_init');

return new BlockingNodeDatabase(baseDB, options.isWriter);
}
Loading