diff --git a/.changeset/seven-fireants-boil.md b/.changeset/seven-fireants-boil.md new file mode 100644 index 000000000..f85d04de0 --- /dev/null +++ b/.changeset/seven-fireants-boil.md @@ -0,0 +1,29 @@ +--- +'@powersync/node': minor +--- + +Pre-package all the PowerSync Rust extension binaries for all supported platforms and architectures in the NPM package `lib` folder. Install scripts are no longer required to download the PowerSync core. + +The binary files relevant to a specific architecture now have updated filenames. Custom code which previously referenced binary filenames requires updating. A helper function is available to automatically provide the correct filename. + +```diff ++ import { getPowerSyncExtensionFilename } from '@powersync/node/worker.js'; + +function resolvePowerSyncCoreExtension() { +- const platform = OS.platform(); +- let extensionPath: string; +- if (platform === 'win32') { +- extensionPath = 'powersync.dll'; +- } else if (platform === 'linux') { +- extensionPath = 'libpowersync.so'; +- } else if (platform === 'darwin') { +- extensionPath = 'libpowersync.dylib'; +- } else { +- throw 'Unknown platform, PowerSync for Node.js currently supports Windows, Linux and macOS.'; +- } ++ const extensionPath = getPowerSyncExtensionFilename(); + + // This example uses copy-webpack-plugin to copy the prebuilt library over. This ensures that it is + // available in packaged release builds. + let libraryPath = path.resolve(__dirname, 'powersync', extensionPath); +``` diff --git a/demos/example-electron-node/README.md b/demos/example-electron-node/README.md index d8d0d710e..0caa5c16a 100644 --- a/demos/example-electron-node/README.md +++ b/demos/example-electron-node/README.md @@ -9,8 +9,9 @@ In particular: queries. This worker is part of the `@powersync/node` package and wouldn't be copied into the resulting Electron app by default. For this reason, this example has its own `src/main/worker.ts` loaded with `new URL('./worker.ts', import.meta.url)`. 2. In addition to the worker, PowerSync requires access to a SQLite extension providing sync functionality. - This file is also part of the `@powersync/node` package and called `powersync.dll`, `libpowersync.dylib` or - `libpowersync.so` depending on the operating system. + This file is also part of the `@powersync/node` package and is the prebuilt release asset (for example + `powersync_x64.dll`, `libpowersync_x64.dylib` or `libpowersync_x64.so`) depending on the operating system and + architecture. We use the `copy-webpack-plugin` package to make sure a copy of that file is available to the main process, and load it in the custom `src/main/worker.ts`. 3. The `get()` and `getAll()` methods are exposed to the renderer process with an IPC channel. @@ -21,7 +22,7 @@ To see it in action: 2. Copy `.env.local.template` to `.env.local`, and complete the environment variables. You can generate a [temporary development token](https://docs.powersync.com/usage/installation/authentication-setup/development-tokens), or leave blank to test with local-only data. The example works with the schema from the [PowerSync + Supabase tutorial](https://docs.powersync.com/integration-guides/supabase-+-powersync#supabase-powersync). 3. `cd` into this directory. In this mono-repo, you'll have to run `./node_modules/.bin/electron-rebuild` once to make sure `@powersync/better-sqlite3` was compiled with Electron's toolchain. -3. Finally, run `pnpm start`. +4. Finally, run `pnpm start`. Apart from the build setup, this example is purposefully kept simple. To make sure PowerSync is working, you can run `await powersync.get('SELECT powersync_rs_version()');` in the DevTools diff --git a/demos/example-electron-node/config.ts b/demos/example-electron-node/config.ts index 05a8877d6..6259e1ac7 100644 --- a/demos/example-electron-node/config.ts +++ b/demos/example-electron-node/config.ts @@ -1,18 +1,17 @@ -import OS from 'node:os'; import path from 'node:path'; -import type { ForgeConfig } from '@electron-forge/shared-types'; -import { MakerSquirrel } from '@electron-forge/maker-squirrel'; -import { MakerZIP } from '@electron-forge/maker-zip'; import { MakerDeb } from '@electron-forge/maker-deb'; import { MakerRpm } from '@electron-forge/maker-rpm'; +import { MakerSquirrel } from '@electron-forge/maker-squirrel'; +import { MakerZIP } from '@electron-forge/maker-zip'; import { AutoUnpackNativesPlugin } from '@electron-forge/plugin-auto-unpack-natives'; import { WebpackPlugin } from '@electron-forge/plugin-webpack'; -import { type Configuration, type ModuleOptions, type DefinePlugin } from 'webpack'; +import type { ForgeConfig } from '@electron-forge/shared-types'; +import { getPowerSyncExtensionFilename } from '@powersync/node/worker.js'; +import type ICopyPlugin from 'copy-webpack-plugin'; import * as dotenv from 'dotenv'; import type IForkTsCheckerWebpackPlugin from 'fork-ts-checker-webpack-plugin'; -import type ICopyPlugin from 'copy-webpack-plugin'; - +import { type Configuration, type DefinePlugin, type ModuleOptions } from 'webpack'; dotenv.config({ path: '.env.local' }); const ForkTsCheckerWebpackPlugin: typeof IForkTsCheckerWebpackPlugin = require('fork-ts-checker-webpack-plugin'); @@ -57,17 +56,7 @@ const defaultWebpackRules: () => Required['rules'] = () => { ]; }; -const platform = OS.platform(); -let extensionPath: string; -if (platform === 'win32') { - extensionPath = 'powersync.dll'; -} else if (platform === 'linux') { - extensionPath = 'libpowersync.so'; -} else if (platform === 'darwin') { - extensionPath = 'libpowersync.dylib'; -} else { - throw 'Unknown platform, PowerSync for Node.js currently supports Windows, Linux and macOS.'; -} +let extensionFilename = getPowerSyncExtensionFilename(); const mainConfig: Configuration = { /** @@ -84,8 +73,8 @@ const mainConfig: Configuration = { new CopyPlugin({ patterns: [ { - from: path.resolve(require.resolve('@powersync/node/package.json'), `../lib/${extensionPath}`), - to: path.join('powersync', extensionPath) + from: path.resolve(require.resolve('@powersync/node/package.json'), `../lib/${extensionFilename}`), + to: path.join('powersync', extensionFilename) } ] }), diff --git a/demos/example-electron-node/src/main/worker.ts b/demos/example-electron-node/src/main/worker.ts index 7c4167200..30c7f21c9 100644 --- a/demos/example-electron-node/src/main/worker.ts +++ b/demos/example-electron-node/src/main/worker.ts @@ -1,25 +1,14 @@ -import * as path from 'node:path'; -import OS from 'node:os'; import Database from 'better-sqlite3'; +import * as path from 'node:path'; -import { startPowerSyncWorker } from '@powersync/node/worker.js'; +import { getPowerSyncExtensionFilename, startPowerSyncWorker } from '@powersync/node/worker.js'; function resolvePowerSyncCoreExtension() { - const platform = OS.platform(); - let extensionPath: string; - if (platform === 'win32') { - extensionPath = 'powersync.dll'; - } else if (platform === 'linux') { - extensionPath = 'libpowersync.so'; - } else if (platform === 'darwin') { - extensionPath = 'libpowersync.dylib'; - } else { - throw 'Unknown platform, PowerSync for Node.js currently supports Windows, Linux and macOS.'; - } + const extensionFilename = getPowerSyncExtensionFilename(); // This example uses copy-webpack-plugin to copy the prebuilt library over. This ensures that it is // available in packaged release builds. - let libraryPath = path.resolve(__dirname, 'powersync', extensionPath); + let libraryPath = path.resolve(__dirname, 'powersync', extensionFilename); if (__dirname.indexOf('app.asar') != -1) { // Our build configuration ensures the extension is always available outside of the archive too. diff --git a/packages/node/download_core.js b/packages/node/download_core.js index 703295df2..67c1784fa 100644 --- a/packages/node/download_core.js +++ b/packages/node/download_core.js @@ -1,39 +1,25 @@ -// TODO: Make this a pre-publish hook and just bundle everything import { createHash } from 'node:crypto'; -import * as OS from 'node:os'; import * as fs from 'node:fs/promises'; import * as path from 'node:path'; import { Readable } from 'node:stream'; import { finished } from 'node:stream/promises'; -import { exit } from 'node:process'; // When changing this version, run node download_core.js update_hashes const version = '0.4.6'; const versionHashes = { 'powersync_x64.dll': '5efaa9ad4975094912a36843cb7b503376cacd233d21ae0956f0f4b42dcb457b', + 'powersync_x86.dll': '4151ba8aa6f024b50b7aebe52ba59f2c5be54e3fed26f7f3f48e1127dcda027d', + 'powersync_aarch64.dll': '3abe46074432593ff5cfc2098b186c592f020c5cfa81285f8e49962732a94bf5', + 'libpowersync_x86.so': '1321a7de13fda0b2de7d2bc231a68cb5691f84010f3858e5cf02e47f88ba6f4a', 'libpowersync_x64.so': 'e9d78620d69d3cf7d57353891fe0bf85b79d326b42c4669b9500b9e610388f76', 'libpowersync_aarch64.so': '0d84c0dc0134fc89af65724d11e2c45e3c15569c575ecda52d0ec2fa2aeec495', + 'libpowersync_armv7.so': 'c7887181ce9c524b68a7ac284ab447b8584511c87527ca26186e5874bf9ba3d6', + 'libpowersync_riscv64gc.so': 'a89f3a71f22f707707d97517e9310e42e2a57dc5343cee08d09002a8cea048d5', 'libpowersync_x64.dylib': '9b484eaf361451f7758ca6ad53190a73563be930a8f8a39ccefd29390046ef6c', 'libpowersync_aarch64.dylib': 'bfb4f1ec207b298aff560f1825f8123d24316edaa27b6df3a17dd49466576b92' }; -const platform = OS.platform(); -let destination; -let asset; - -if (platform === 'win32') { - asset = 'powersync_x64.dll'; - destination = 'powersync.dll'; -} else if (platform === 'linux') { - asset = OS.arch() === 'x64' ? 'libpowersync_x64.so' : 'libpowersync_aarch64.so'; - destination = 'libpowersync.so'; -} else if (platform === 'darwin') { - asset = OS.arch() === 'x64' ? 'libpowersync_x64.dylib' : 'libpowersync_aarch64.dylib'; - destination = 'libpowersync.dylib'; -} - -const expectedHash = versionHashes[asset]; -const destinationPath = path.resolve('lib', destination); +const assets = Object.keys(versionHashes); const hashStream = async (input) => { for await (const chunk of input.pipe(createHash('sha256')).setEncoding('hex')) { @@ -41,9 +27,9 @@ const hashStream = async (input) => { } }; -const hashLocal = async () => { +const hashLocal = async (filePath) => { try { - const handle = await fs.open(destinationPath, 'r'); + const handle = await fs.open(filePath, 'r'); const input = handle.createReadStream(); const result = await hashStream(input); @@ -54,31 +40,88 @@ const hashLocal = async () => { } }; -const download = async () => { - if ((await hashLocal()) == expectedHash) { - console.debug('Local copy is up-to-date, skipping download'); - exit(0); +const downloadAsset = async (asset) => { + const destinationPath = path.resolve('lib', asset); + const expectedHash = versionHashes[asset]; + + // Check if file exists and has correct hash + const currentHash = await hashLocal(destinationPath); + if (currentHash == expectedHash) { + console.debug(`${asset} is up-to-date, skipping download`); + return; } const url = `https://github.com/powersync-ja/powersync-sqlite-core/releases/download/v${version}/${asset}`; + console.log(`Downloading ${url}`); const response = await fetch(url); if (response.status != 200) { throw `Could not download ${url}`; } + const file = await fs.open(destinationPath, 'w'); + await finished(Readable.fromWeb(response.body).pipe(file.createWriteStream())); + await file.close(); + + const hashAfterDownloading = await hashLocal(destinationPath); + if (hashAfterDownloading != expectedHash) { + throw `Unexpected hash after downloading ${asset} (got ${hashAfterDownloading}, expected ${expectedHash})`; + } + console.log(`Successfully downloaded ${asset}`); +}; + +const checkAsset = async (asset) => { + const destinationPath = path.resolve('lib', asset); + const expectedHash = versionHashes[asset]; + const currentHash = await hashLocal(destinationPath); + + return { + asset, + destinationPath, + expectedHash, + currentHash, + exists: currentHash !== null, + isValid: currentHash == expectedHash + }; +}; + +const download = async () => { try { await fs.access('lib'); } catch { await fs.mkdir('lib'); } - const file = await fs.open(destinationPath, 'w'); - await finished(Readable.fromWeb(response.body).pipe(file.createWriteStream())); - await file.close(); + // First check all assets + console.log('Checking existing files...'); + const checks = await Promise.all(assets.map((asset) => checkAsset(asset, asset))); - const hashAfterDownloading = await hashLocal(); - if (hashAfterDownloading != expectedHash) { - throw `Unexpected hash after downloading (got ${hashAfterDownloading}, expected ${expectedHash})`; + const toDownload = checks.filter((check) => !check.isValid); + const upToDate = checks.filter((check) => check.isValid); + + // Print summary + if (upToDate.length > 0) { + console.log('\nUp-to-date files:'); + for (const check of upToDate) { + console.log(` ✓ ${check.asset}`); + } + } + + if (toDownload.length > 0) { + console.log('\nFiles to download:'); + for (const check of toDownload) { + if (!check.exists) { + console.log(` • ${check.asset} (missing)`); + } else { + console.log(` • ${check.asset} (hash mismatch)`); + } + } + + console.log('\nStarting downloads...'); + await Promise.all(toDownload.map((check) => downloadAsset(check.asset))); + + console.log('\nAll downloads completed successfully!'); + } else { + console.log('\nAll files are up-to-date, nothing to download.'); } }; diff --git a/packages/node/package.json b/packages/node/package.json index 94a36790a..6dd428445 100644 --- a/packages/node/package.json +++ b/packages/node/package.json @@ -13,12 +13,12 @@ "download_core.js" ], "scripts": { - "install": "node download_core.js", - "build": "tsc -b && rollup --config", - "build:prod": "tsc -b --sourceMap false && rollup --config", + "prepare:core": "node download_core.js", + "build": " pnpm prepare:core && tsc -b && rollup --config", + "build:prod": "pnpm prepare:core && tsc -b --sourceMap false && rollup --config", "clean": "rm -rf lib dist tsconfig.tsbuildinfo", "watch": "tsc -b -w", - "test": "vitest", + "test": " pnpm prepare:core && vitest", "test:exports": "attw --pack . --ignore-rules no-resolution" }, "type": "module", diff --git a/packages/node/src/db/NodeSqliteWorker.ts b/packages/node/src/db/NodeSqliteWorker.ts index c822fa4f2..9ed7b278c 100644 --- a/packages/node/src/db/NodeSqliteWorker.ts +++ b/packages/node/src/db/NodeSqliteWorker.ts @@ -1,9 +1,9 @@ -import { threadId } from 'node:worker_threads'; import type { DatabaseSync } from 'node:sqlite'; +import { threadId } from 'node:worker_threads'; +import { dynamicImport } from '../utils/modules.js'; import { AsyncDatabase, AsyncDatabaseOpenOptions } from './AsyncDatabase.js'; import { PowerSyncWorkerOptions } from './SqliteWorker.js'; -import { dynamicImport } from '../utils/modules.js'; class BlockingNodeDatabase implements AsyncDatabase { private readonly db: DatabaseSync; @@ -57,7 +57,7 @@ export async function openDatabase(worker: PowerSyncWorkerOptions, options: Asyn const { DatabaseSync } = await dynamicImport('node:sqlite'); const baseDB = new DatabaseSync(options.path, { allowExtension: true }); - baseDB.loadExtension(worker.extensionPath()); + baseDB.loadExtension(worker.extensionPath(), 'sqlite3_powersync_init'); return new BlockingNodeDatabase(baseDB, options.isWriter); } diff --git a/packages/node/src/db/SqliteWorker.ts b/packages/node/src/db/SqliteWorker.ts index e721c65f5..db1b7f937 100644 --- a/packages/node/src/db/SqliteWorker.ts +++ b/packages/node/src/db/SqliteWorker.ts @@ -1,13 +1,12 @@ -import * as path from 'node:path'; import * as Comlink from 'comlink'; -import { parentPort } from 'node:worker_threads'; import OS from 'node:os'; +import * as path from 'node:path'; import url from 'node:url'; +import { parentPort } from 'node:worker_threads'; +import { dynamicImport, isBundledToCommonJs } from '../utils/modules.js'; +import { AsyncDatabase, AsyncDatabaseOpener, AsyncDatabaseOpenOptions } from './AsyncDatabase.js'; import { openDatabase as openBetterSqliteDatabase } from './BetterSqliteWorker.js'; import { openDatabase as openNodeDatabase } from './NodeSqliteWorker.js'; -import { AsyncDatabase, AsyncDatabaseOpener, AsyncDatabaseOpenOptions } from './AsyncDatabase.js'; -import { isBundledToCommonJs } from '../utils/modules.js'; -import { dynamicImport } from '../utils/modules.js'; export interface PowerSyncWorkerOptions { /** @@ -23,28 +22,62 @@ export interface PowerSyncWorkerOptions { loadBetterSqlite3: () => Promise; } +/** + * @returns The relevant PowerSync extension binary filename for the current platform and architecture + */ +export function getPowerSyncExtensionFilename() { + const platform = OS.platform(); + const arch = OS.arch(); + let extensionFile: string; + + if (platform == 'win32') { + if (arch == 'x64') { + extensionFile = 'powersync_x64.dll'; + } else if (arch == 'ia32') { + extensionFile = 'powersync_x86.dll'; + } else if (arch == 'arm64') { + extensionFile = 'powersync_aarch64.dll'; + } else { + throw new Error('Windows platform only supports arm64, ia32 and x64 architecture.'); + } + } else if (platform == 'linux') { + if (arch == 'x64') { + extensionFile = 'libpowersync_x64.so'; + } else if (arch == 'arm64') { + // TODO detect armv7 as an option + extensionFile = 'libpowersync_aarch64.so'; + } else if (arch == 'riscv64') { + extensionFile = 'libpowersync_riscv64gc.so'; + } else { + throw new Error('Linux platform only supports x64, arm64 and riscv64 architectures.'); + } + } else if (platform == 'darwin') { + if (arch == 'x64') { + extensionFile = 'libpowersync_x64.dylib'; + } else if (arch == 'arm64') { + extensionFile = 'libpowersync_aarch64.dylib'; + } else { + throw new Error('macOS platform only supports x64 and arm64 architectures.'); + } + } else { + throw new Error( + `Unknown platform: ${platform}, PowerSync for Node.js currently supports Windows, Linux and macOS.` + ); + } + + return extensionFile; +} + export function startPowerSyncWorker(options?: Partial) { const resolvedOptions: PowerSyncWorkerOptions = { extensionPath() { const isCommonJsModule = isBundledToCommonJs; - - const platform = OS.platform(); - let extensionPath: string; - if (platform === 'win32') { - extensionPath = 'powersync.dll'; - } else if (platform === 'linux') { - extensionPath = 'libpowersync.so'; - } else if (platform === 'darwin') { - extensionPath = 'libpowersync.dylib'; - } else { - throw 'Unknown platform, PowerSync for Node.js currently supports Windows, Linux and macOS.'; - } - + const extensionFilename = getPowerSyncExtensionFilename(); let resolved: string; if (isCommonJsModule) { - resolved = path.resolve(__dirname, '../lib/', extensionPath); + resolved = path.resolve(__dirname, '../lib/', extensionFilename); } else { - resolved = url.fileURLToPath(new URL(`../${extensionPath}`, import.meta.url)); + resolved = url.fileURLToPath(new URL(`../${extensionFilename}`, import.meta.url)); } return resolved; diff --git a/packages/node/tests/utils.ts b/packages/node/tests/utils.ts index 36021726b..f9fad7090 100644 --- a/packages/node/tests/utils.ts +++ b/packages/node/tests/utils.ts @@ -1,8 +1,10 @@ -import os from 'node:os'; import fs from 'node:fs/promises'; +import os from 'node:os'; import path from 'node:path'; import { ReadableStream, TransformStream } from 'node:stream/web'; +import { createLogger } from '@powersync/common'; +import Logger from 'js-logger'; import { onTestFinished, test } from 'vitest'; import { AbstractPowerSyncDatabase, @@ -18,8 +20,6 @@ import { SyncStatus, Table } from '../lib'; -import { createLogger } from '@powersync/common'; -import Logger from 'js-logger'; export async function createTempDir() { const ostmpdir = os.tmpdir();