diff --git a/.changeset/calm-pans-worry.md b/.changeset/calm-pans-worry.md
new file mode 100644
index 000000000..088d5091e
--- /dev/null
+++ b/.changeset/calm-pans-worry.md
@@ -0,0 +1,5 @@
+---
+'@powersync/diagnostics-app': minor
+---
+
+Improve diagnostics app performance for bulk downloads.
diff --git a/tools/diagnostics-app/src/app/views/sync-diagnostics.tsx b/tools/diagnostics-app/src/app/views/sync-diagnostics.tsx
index 1b3e3deca..1aa3f90d5 100644
--- a/tools/diagnostics-app/src/app/views/sync-diagnostics.tsx
+++ b/tools/diagnostics-app/src/app/views/sync-diagnostics.tsx
@@ -1,5 +1,5 @@
import { NavigationPage } from '@/components/navigation/NavigationPage';
-import { clearData, db, syncErrorTracker } from '@/library/powersync/ConnectionManager';
+import { clearData, db, sync, syncErrorTracker } from '@/library/powersync/ConnectionManager';
import {
Box,
Button,
@@ -46,6 +46,7 @@ SELECT
stats.metadata_size,
IFNULL(stats.row_count, 0) as row_count,
local.download_size,
+ local.downloaded_operations,
local.total_operations,
local.downloading
FROM local_bucket_data local
@@ -64,6 +65,7 @@ SELECT
0 as metadata_size,
0 as row_count,
local.download_size,
+ local.downloaded_operations,
local.total_operations,
local.downloading
FROM local_bucket_data local`;
@@ -81,14 +83,23 @@ export default function SyncDiagnosticsPage() {
// Similar to db.currentState.hasSynced, but synchronized to the onChange events
const { synced_at } = await db.get<{ synced_at: string | null }>('SELECT powersync_last_synced_at() as synced_at');
setlastSyncedAt(synced_at ? new Date(synced_at + 'Z') : null);
- if (synced_at != null) {
+ if (synced_at != null && !sync.syncStatus.dataFlowStatus.downloading) {
// These are potentially expensive queries - do not run during initial sync
const bucketRows = await db.getAll(BUCKETS_QUERY);
const tableRows = await db.getAll(TABLES_QUERY);
setBucketRows(bucketRows);
setTableRows(tableRows);
+ } else if (synced_at != null) {
+ // Busy downloading, but have already synced once
+ const bucketRows = await db.getAll(BUCKETS_QUERY_FAST);
+ setBucketRows(bucketRows);
+ // Load tables if we haven't yet
+ if (tableRows == null) {
+ const tableRows = await db.getAll(TABLES_QUERY);
+ setTableRows(tableRows);
+ }
} else {
- // Fast query to show progress during initial sync
+ // Fast query to show progress during initial sync / while downloading bulk data
const bucketRows = await db.getAll(BUCKETS_QUERY_FAST);
setBucketRows(bucketRows);
setTableRows(null);
@@ -127,6 +138,7 @@ export default function SyncDiagnosticsPage() {
{ field: 'name', headerName: 'Name', flex: 2 },
{ field: 'tables', headerName: 'Table(s)', flex: 1, type: 'text' },
{ field: 'row_count', headerName: 'Row Count', flex: 1, type: 'number' },
+ { field: 'downloaded_operations', headerName: 'Downloaded Operations', flex: 1, type: 'number' },
{ field: 'total_operations', headerName: 'Total Operations', flex: 1, type: 'number' },
{
field: 'data_size',
@@ -163,6 +175,7 @@ export default function SyncDiagnosticsPage() {
name: r.name,
tables: JSON.parse(r.tables ?? '[]').join(', '),
row_count: r.row_count,
+ downloaded_operations: r.downloaded_operations,
total_operations: r.total_operations,
data_size: r.data_size,
metadata_size: r.metadata_size,
@@ -174,6 +187,7 @@ export default function SyncDiagnosticsPage() {
const totals = {
buckets: rows.length,
row_count: rows.reduce((total, row) => total + row.row_count, 0),
+ downloaded_operations: rows.reduce((total, row) => total + row.downloaded_operations, 0),
total_operations: rows.reduce((total, row) => total + row.total_operations, 0),
data_size: rows.reduce((total, row) => total + row.data_size, 0),
metadata_size: rows.reduce((total, row) => total + row.metadata_size, 0),
@@ -208,6 +222,7 @@ export default function SyncDiagnosticsPage() {
Number of buckets
Total Rows
+ Downloaded Operations
Total Operations
Total Data Size
Total Metadata Size
@@ -217,6 +232,7 @@ export default function SyncDiagnosticsPage() {
{totals.buckets}
{totals.row_count}
+ {totals.downloaded_operations}
{totals.total_operations}
{formatBytes(totals.data_size)}
{formatBytes(totals.metadata_size)}
diff --git a/tools/diagnostics-app/src/library/powersync/AppSchema.ts b/tools/diagnostics-app/src/library/powersync/AppSchema.ts
index 34492081c..da4fcd6de 100644
--- a/tools/diagnostics-app/src/library/powersync/AppSchema.ts
+++ b/tools/diagnostics-app/src/library/powersync/AppSchema.ts
@@ -5,6 +5,7 @@ export const local_bucket_data = new Table(
total_operations: column.integer,
last_op: column.text,
download_size: column.integer,
+ downloaded_operations: column.integer,
downloading: column.integer
},
{ localOnly: true }
diff --git a/tools/diagnostics-app/src/library/powersync/RecordingStorageAdapter.ts b/tools/diagnostics-app/src/library/powersync/RecordingStorageAdapter.ts
index 6c025c05a..2fcd28622 100644
--- a/tools/diagnostics-app/src/library/powersync/RecordingStorageAdapter.ts
+++ b/tools/diagnostics-app/src/library/powersync/RecordingStorageAdapter.ts
@@ -26,15 +26,16 @@ export class RecordingStorageAdapter extends SqliteBucketStorage {
await this.rdb.writeTransaction(async (tx) => {
for (const bucket of checkpoint.buckets) {
await tx.execute(
- `INSERT OR REPLACE INTO local_bucket_data(id, total_operations, last_op, download_size, downloading)
+ `INSERT OR REPLACE INTO local_bucket_data(id, total_operations, last_op, download_size, downloading, downloaded_operations)
VALUES (
?,
?,
IFNULL((SELECT last_op FROM local_bucket_data WHERE id = ?), '0'),
IFNULL((SELECT download_size FROM local_bucket_data WHERE id = ?), 0),
- IFNULL((SELECT downloading FROM local_bucket_data WHERE id = ?), TRUE)
+ IFNULL((SELECT downloading FROM local_bucket_data WHERE id = ?), TRUE),
+ IFNULL((SELECT downloaded_operations FROM local_bucket_data WHERE id = ?), TRUE)
)`,
- [bucket.bucket, bucket.count, bucket.bucket, bucket.bucket, bucket.bucket]
+ [bucket.bucket, bucket.count, bucket.bucket, bucket.bucket, bucket.bucket, bucket.bucket]
);
}
});
@@ -61,8 +62,13 @@ export class RecordingStorageAdapter extends SqliteBucketStorage {
// Record metrics
const size = JSON.stringify(bucket.data).length;
await tx.execute(
- 'UPDATE local_bucket_data SET download_size = IFNULL(download_size, 0) + ?, last_op = ?, downloading = ? WHERE id = ?',
- [size, bucket.next_after, bucket.has_more, bucket.bucket]
+ `UPDATE local_bucket_data SET
+ download_size = IFNULL(download_size, 0) + ?,
+ last_op = ?,
+ downloading = ?,
+ downloaded_operations = IFNULL(downloaded_operations, 0) + ?
+ WHERE id = ?`,
+ [size, bucket.next_after, bucket.has_more, bucket.data.length, bucket.bucket]
);
}
});