Skip to content

Commit 357477f

Browse files
committed
Merge remote-tracking branch 'origin/main' into optimize-bucket-lookups-2
2 parents 1f73456 + 7348ea0 commit 357477f

File tree

8 files changed

+35
-15
lines changed

8 files changed

+35
-15
lines changed

.changeset/cyan-otters-sleep.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
'@powersync/service-core': patch
3+
---
4+
5+
Use slot_name_prefix from the replication connection again.

.changeset/fast-flowers-scream.md

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
---
2+
'@powersync/service-module-mongodb-storage': patch
3+
'@powersync/lib-service-mongodb': patch
4+
'@powersync/service-image': patch
5+
---
6+
7+
Skip large rows, rather than causing hard replication errors

libs/lib-mongodb/package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@
3030
"dependencies": {
3131
"@powersync/lib-services-framework": "workspace:*",
3232
"bson": "^6.10.3",
33-
"mongodb": "^6.13.0",
33+
"mongodb": "^6.14.1",
3434
"ts-codec": "^1.3.0",
3535
"uri-js": "^4.4.1"
3636
},

modules/module-mongodb-storage/src/storage/implementation/MongoBucketBatch.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ import { idPrefixFilter } from './util.js';
2424
/**
2525
* 15MB
2626
*/
27-
const MAX_ROW_SIZE = 15 * 1024 * 1024;
27+
export const MAX_ROW_SIZE = 15 * 1024 * 1024;
2828

2929
// Currently, we can only have a single flush() at a time, since it locks the op_id sequence.
3030
// While the MongoDB transaction retry mechanism handles this okay, using an in-process Mutex

modules/module-mongodb-storage/src/storage/implementation/PersistedBatch.ts

Lines changed: 12 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,8 @@ import { EvaluatedParameters, EvaluatedRow } from '@powersync/service-sync-rules
44
import * as bson from 'bson';
55

66
import { logger } from '@powersync/lib-services-framework';
7-
import { BucketState, InternalOpId, storage, utils } from '@powersync/service-core';
8-
import { currentBucketKey } from './MongoBucketBatch.js';
7+
import { InternalOpId, storage, utils } from '@powersync/service-core';
8+
import { currentBucketKey, MAX_ROW_SIZE } from './MongoBucketBatch.js';
99
import { MongoIdSequence } from './MongoIdSequence.js';
1010
import { PowerSyncMongo } from './db.js';
1111
import {
@@ -98,11 +98,20 @@ export class PersistedBatch {
9898

9999
for (const k of options.evaluated) {
100100
const key = currentBucketKey(k);
101-
remaining_buckets.delete(key);
102101

103102
// INSERT
104103
const recordData = JSONBig.stringify(k.data);
105104
const checksum = utils.hashData(k.table, k.id, recordData);
105+
if (recordData.length > MAX_ROW_SIZE) {
106+
// In many cases, the raw data size would have been too large already. But there are cases where
107+
// the BSON size is small enough, but the JSON size is too large.
108+
// In these cases, we can't store the data, so we skip it, or generate a REMOVE operation if the row
109+
// was synced previously.
110+
logger.error(`powersync_${this.group_id} Row ${key} too large: ${recordData.length} bytes. Removing.`);
111+
continue;
112+
}
113+
114+
remaining_buckets.delete(key);
106115
this.currentSize += recordData.length + 200;
107116

108117
const op_id = options.op_seq.next();

packages/service-core/src/util/config/compound-config-collector.ts

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -170,8 +170,7 @@ export class CompoundConfigCollector {
170170
baseConfig.api?.parameters?.max_data_fetch_concurrency ?? DEFAULT_MAX_DATA_FETCH_CONCURRENCY
171171
},
172172
// TODO maybe move this out of the connection or something
173-
// slot_name_prefix: connections[0]?.slot_name_prefix ?? 'powersync_'
174-
slot_name_prefix: 'powersync_',
173+
slot_name_prefix: baseConfig.replication?.connections?.[0]?.slot_name_prefix ?? 'powersync_',
175174
parameters: baseConfig.parameters ?? {}
176175
};
177176

pnpm-lock.yaml

Lines changed: 7 additions & 7 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

service/package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@
3636
"ix": "^5.0.0",
3737
"jose": "^4.15.1",
3838
"lru-cache": "^10.0.1",
39-
"mongodb": "^6.13.0",
39+
"mongodb": "^6.14.1",
4040
"node-fetch": "^3.3.2",
4141
"pgwire": "github:kagis/pgwire#f1cb95f9a0f42a612bb5a6b67bb2eb793fc5fc87",
4242
"ts-codec": "^1.3.0",

0 commit comments

Comments
 (0)