Skip to content

Commit 8728315

Browse files
Merge branch 'main' into feature/sdk-reporting
2 parents 4b5dc44 + 7852eb7 commit 8728315

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

44 files changed

+912
-144
lines changed

.changeset/olive-bags-wave.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
---
2+
'@powersync/service-sync-rules': minor
3+
'@powersync/service-image': minor
4+
---
5+
6+
Introduce the `config` option on sync rules which can be used to opt-in to new features and backwards-incompatible fixes of historical issues with the PowerSync service.

.changeset/popular-zoos-hang.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
---
2+
'@powersync/service-sync-rules': minor
3+
'@powersync/service-image': minor
4+
---
5+
6+
Add the `timestamps_iso8601` option in the `config:` block for sync rules. When enabled, timestamps are consistently formatted using ISO 8601 format.

.github/workflows/test.yml

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,11 @@ jobs:
5353
username: ${{ secrets.DOCKERHUB_USERNAME }}
5454
password: ${{ secrets.DOCKERHUB_TOKEN }}
5555

56+
# The mongodb-github-action below doesn't use the Docker credentials for the pull.
57+
# We pre-pull, so that the image is cached.
58+
- name: Pre-pull Mongo image
59+
run: docker pull mongo:8.0
60+
5661
- name: Start MongoDB
5762
uses: supercharge/[email protected]
5863
with:
@@ -134,6 +139,11 @@ jobs:
134139
-p 5431:5432 \
135140
-d postgres:${{ matrix.postgres-version }}
136141
142+
# The mongodb-github-action below doesn't use the Docker credentials for the pull.
143+
# We pre-pull, so that the image is cached.
144+
- name: Pre-pull Mongo image
145+
run: docker pull mongo:8.0
146+
137147
- name: Start MongoDB
138148
uses: supercharge/[email protected]
139149
with:
@@ -206,6 +216,11 @@ jobs:
206216
--enforce_gtid_consistency=ON \
207217
--server-id=1
208218
219+
# The mongodb-github-action below doesn't use the Docker credentials for the pull.
220+
# We pre-pull, so that the image is cached.
221+
- name: Pre-pull Mongo image
222+
run: docker pull mongo:8.0
223+
209224
- name: Start MongoDB
210225
uses: supercharge/[email protected]
211226
with:
@@ -274,6 +289,11 @@ jobs:
274289
username: ${{ secrets.DOCKERHUB_USERNAME }}
275290
password: ${{ secrets.DOCKERHUB_TOKEN }}
276291

292+
# The mongodb-github-action below doesn't use the Docker credentials for the pull.
293+
# We pre-pull, so that the image is cached.
294+
- name: Pre-pull Mongo image
295+
run: docker pull mongo:${{ matrix.mongodb-version }}
296+
277297
- name: Start MongoDB
278298
uses: supercharge/[email protected]
279299
with:

libs/lib-services/package.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@
2121
"keywords": [],
2222
"dependencies": {
2323
"@powersync/service-errors": "workspace:*",
24+
"@powersync/service-sync-rules": "workspace:*",
2425
"ajv": "^8.12.0",
2526
"better-ajv-errors": "^1.2.0",
2627
"bson": "^6.10.3",

libs/lib-services/src/codec/codecs.ts

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import * as t from 'ts-codec';
22
import * as bson from 'bson';
3+
import { DateTimeValue } from '@powersync/service-sync-rules';
34

45
export const buffer = t.codec<Buffer, string>(
56
'Buffer',
@@ -12,7 +13,7 @@ export const buffer = t.codec<Buffer, string>(
1213
(buffer) => Buffer.from(buffer, 'base64')
1314
);
1415

15-
export const date = t.codec<Date, string>(
16+
export const date = t.codec<Date, string | DateTimeValue>(
1617
'Date',
1718
(date) => {
1819
if (!(date instanceof Date)) {
@@ -21,7 +22,9 @@ export const date = t.codec<Date, string>(
2122
return date.toISOString();
2223
},
2324
(date) => {
24-
const parsed = new Date(date);
25+
// In our jpgwire wrapper, we patch the row decoding logic to map timestamps into TimeValue instances, so we need to
26+
// support those here.
27+
const parsed = new Date(date instanceof DateTimeValue ? date.iso8601Representation : date);
2528
if (isNaN(parsed.getTime())) {
2629
throw new t.TransformError([`Invalid date`]);
2730
}

modules/module-mongodb/src/replication/ChangeStream.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ import {
1717
SourceTable,
1818
storage
1919
} from '@powersync/service-core';
20-
import { DatabaseInputRow, SqliteRow, SqlSyncRules, TablePattern } from '@powersync/service-sync-rules';
20+
import { DatabaseInputRow, SqliteInputRow, SqliteRow, SqlSyncRules, TablePattern } from '@powersync/service-sync-rules';
2121
import { ReplicationMetric } from '@powersync/service-types';
2222
import { MongoLSN } from '../common/MongoLSN.js';
2323
import { PostImagesOption } from '../types/types.js';
@@ -439,7 +439,7 @@ export class ChangeStream {
439439
return { $match: { ns: { $in: $inFilters } }, multipleDatabases };
440440
}
441441

442-
static *getQueryData(results: Iterable<DatabaseInputRow>): Generator<SqliteRow> {
442+
static *getQueryData(results: Iterable<DatabaseInputRow>): Generator<SqliteInputRow> {
443443
for (let row of results) {
444444
yield constructAfterRecord(row);
445445
}

modules/module-mongodb/src/replication/MongoRelation.ts

Lines changed: 26 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,18 @@
11
import { mongo } from '@powersync/lib-service-mongodb';
22
import { storage } from '@powersync/service-core';
33
import { JSONBig, JsonContainer } from '@powersync/service-jsonbig';
4-
import { SqliteRow, SqliteValue } from '@powersync/service-sync-rules';
4+
import {
5+
CompatibilityContext,
6+
CustomArray,
7+
CustomObject,
8+
CustomSqliteValue,
9+
DatabaseInputValue,
10+
SqliteInputRow,
11+
SqliteInputValue,
12+
SqliteRow,
13+
SqliteValue,
14+
DateTimeValue
15+
} from '@powersync/service-sync-rules';
516

617
import { ErrorCode, ServiceError } from '@powersync/lib-services-framework';
718
import { MongoLSN } from '../common/MongoLSN.js';
@@ -27,15 +38,15 @@ export function getCacheIdentifier(source: storage.SourceEntityDescriptor | stor
2738
return `${source.schema}.${source.name}`;
2839
}
2940

30-
export function constructAfterRecord(document: mongo.Document): SqliteRow {
31-
let record: SqliteRow = {};
41+
export function constructAfterRecord(document: mongo.Document): SqliteInputRow {
42+
let record: SqliteInputRow = {};
3243
for (let key of Object.keys(document)) {
3344
record[key] = toMongoSyncRulesValue(document[key]);
3445
}
3546
return record;
3647
}
3748

38-
export function toMongoSyncRulesValue(data: any): SqliteValue {
49+
export function toMongoSyncRulesValue(data: any): SqliteInputValue {
3950
const autoBigNum = true;
4051
if (data === null) {
4152
return null;
@@ -60,7 +71,8 @@ export function toMongoSyncRulesValue(data: any): SqliteValue {
6071
} else if (data instanceof mongo.UUID) {
6172
return data.toHexString();
6273
} else if (data instanceof Date) {
63-
return data.toISOString().replace('T', ' ');
74+
const isoString = data.toISOString();
75+
return new DateTimeValue(isoString);
6476
} else if (data instanceof mongo.Binary) {
6577
return new Uint8Array(data.buffer);
6678
} else if (data instanceof mongo.Long) {
@@ -72,26 +84,21 @@ export function toMongoSyncRulesValue(data: any): SqliteValue {
7284
} else if (data instanceof RegExp) {
7385
return JSON.stringify({ pattern: data.source, options: data.flags });
7486
} else if (Array.isArray(data)) {
75-
// We may be able to avoid some parse + stringify cycles here for JsonSqliteContainer.
76-
return JSONBig.stringify(data.map((element) => filterJsonData(element)));
87+
return new CustomArray(data, filterJsonData);
7788
} else if (data instanceof Uint8Array) {
7889
return data;
7990
} else if (data instanceof JsonContainer) {
8091
return data.toString();
8192
} else if (typeof data == 'object') {
82-
let record: Record<string, any> = {};
83-
for (let key of Object.keys(data)) {
84-
record[key] = filterJsonData(data[key]);
85-
}
86-
return JSONBig.stringify(record);
93+
return new CustomObject(data, filterJsonData);
8794
} else {
8895
return null;
8996
}
9097
}
9198

9299
const DEPTH_LIMIT = 20;
93100

94-
function filterJsonData(data: any, depth = 0): any {
101+
function filterJsonData(data: any, context: CompatibilityContext, depth = 0): any {
95102
const autoBigNum = true;
96103
if (depth > DEPTH_LIMIT) {
97104
// This is primarily to prevent infinite recursion
@@ -117,7 +124,8 @@ function filterJsonData(data: any, depth = 0): any {
117124
} else if (typeof data == 'bigint') {
118125
return data;
119126
} else if (data instanceof Date) {
120-
return data.toISOString().replace('T', ' ');
127+
const isoString = data.toISOString();
128+
return new DateTimeValue(isoString).toSqliteValue(context);
121129
} else if (data instanceof mongo.ObjectId) {
122130
return data.toHexString();
123131
} else if (data instanceof mongo.UUID) {
@@ -133,16 +141,18 @@ function filterJsonData(data: any, depth = 0): any {
133141
} else if (data instanceof RegExp) {
134142
return { pattern: data.source, options: data.flags };
135143
} else if (Array.isArray(data)) {
136-
return data.map((element) => filterJsonData(element, depth + 1));
144+
return data.map((element) => filterJsonData(element, context, depth + 1));
137145
} else if (ArrayBuffer.isView(data)) {
138146
return undefined;
147+
} else if (data instanceof CustomSqliteValue) {
148+
return data.toSqliteValue(context);
139149
} else if (data instanceof JsonContainer) {
140150
// Can be stringified directly when using our JSONBig implementation
141151
return data;
142152
} else if (typeof data == 'object') {
143153
let record: Record<string, any> = {};
144154
for (let key of Object.keys(data)) {
145-
record[key] = filterJsonData(data[key], depth + 1);
155+
record[key] = filterJsonData(data[key], context, depth + 1);
146156
}
147157
return record;
148158
} else {

modules/module-mongodb/test/src/mongo_test.test.ts

Lines changed: 58 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,11 @@
11
import { mongo } from '@powersync/lib-service-mongodb';
2-
import { SqliteRow, SqlSyncRules } from '@powersync/service-sync-rules';
2+
import {
3+
applyRowContext,
4+
CompatibilityContext,
5+
CompatibilityEdition,
6+
SqliteInputRow,
7+
SqlSyncRules
8+
} from '@powersync/service-sync-rules';
39
import { describe, expect, test } from 'vitest';
410

511
import { MongoRouteAPIAdapter } from '@module/api/MongoRouteAPIAdapter.js';
@@ -138,8 +144,10 @@ describe('mongo data types', () => {
138144
]);
139145
}
140146

141-
function checkResults(transformed: Record<string, any>[]) {
142-
expect(transformed[0]).toMatchObject({
147+
function checkResults(transformed: SqliteInputRow[]) {
148+
const sqliteValue = transformed.map((e) => applyRowContext(e, CompatibilityContext.FULL_BACKWARDS_COMPATIBILITY));
149+
150+
expect(sqliteValue[0]).toMatchObject({
143151
_id: 1n,
144152
text: 'text',
145153
uuid: 'baeb2514-4c57-436d-b3cc-c1256211656d',
@@ -152,17 +160,17 @@ describe('mongo data types', () => {
152160
null: null,
153161
decimal: '3.14'
154162
});
155-
expect(transformed[1]).toMatchObject({
163+
expect(sqliteValue[1]).toMatchObject({
156164
_id: 2n,
157165
nested: '{"test":"thing"}'
158166
});
159167

160-
expect(transformed[2]).toMatchObject({
168+
expect(sqliteValue[2]).toMatchObject({
161169
_id: 3n,
162170
date: '2023-03-06 13:47:00.000Z'
163171
});
164172

165-
expect(transformed[3]).toMatchObject({
173+
expect(sqliteValue[3]).toMatchObject({
166174
_id: 4n,
167175
objectId: '66e834cc91d805df11fa0ecb',
168176
timestamp: 1958505087099n,
@@ -177,9 +185,9 @@ describe('mongo data types', () => {
177185
});
178186

179187
// This must specifically be null, and not undefined.
180-
expect(transformed[4].undefined).toBeNull();
188+
expect(sqliteValue[4].undefined).toBeNull();
181189

182-
expect(transformed[5]).toMatchObject({
190+
expect(sqliteValue[5]).toMatchObject({
183191
_id: 6n,
184192
int4: -1n,
185193
int8: -9007199254740993n,
@@ -188,8 +196,10 @@ describe('mongo data types', () => {
188196
});
189197
}
190198

191-
function checkResultsNested(transformed: Record<string, any>[]) {
192-
expect(transformed[0]).toMatchObject({
199+
function checkResultsNested(transformed: SqliteInputRow[]) {
200+
const sqliteValue = transformed.map((e) => applyRowContext(e, CompatibilityContext.FULL_BACKWARDS_COMPATIBILITY));
201+
202+
expect(sqliteValue[0]).toMatchObject({
193203
_id: 1n,
194204
text: `["text"]`,
195205
uuid: '["baeb2514-4c57-436d-b3cc-c1256211656d"]',
@@ -204,30 +214,30 @@ describe('mongo data types', () => {
204214

205215
// Note: Depending on to what extent we use the original postgres value, the whitespace may change, and order may change.
206216
// We do expect that decimals and big numbers are preserved.
207-
expect(transformed[1]).toMatchObject({
217+
expect(sqliteValue[1]).toMatchObject({
208218
_id: 2n,
209219
nested: '[{"test":"thing"}]'
210220
});
211221

212-
expect(transformed[2]).toMatchObject({
222+
expect(sqliteValue[2]).toMatchObject({
213223
_id: 3n,
214224
date: '["2023-03-06 13:47:00.000Z"]'
215225
});
216226

217-
expect(transformed[3]).toMatchObject({
227+
expect(sqliteValue[3]).toMatchObject({
218228
_id: 5n,
219229
undefined: '[null]'
220230
});
221231

222-
expect(transformed[4]).toMatchObject({
232+
expect(sqliteValue[4]).toMatchObject({
223233
_id: 6n,
224234
int4: '[-1]',
225235
int8: '[-9007199254740993]',
226236
float: '[-3.14]',
227237
decimal: '["-3.14"]'
228238
});
229239

230-
expect(transformed[5]).toMatchObject({
240+
expect(sqliteValue[5]).toMatchObject({
231241
_id: 10n,
232242
objectId: '["66e834cc91d805df11fa0ecb"]',
233243
timestamp: '[1958505087099]',
@@ -522,13 +532,45 @@ bucket_definitions:
522532
errors: []
523533
});
524534
});
535+
536+
test('date format', async () => {
537+
const { db, client } = await connectMongoData();
538+
const collection = db.collection('test_data');
539+
try {
540+
await setupTable(db);
541+
await collection.insertOne({
542+
fraction: new Date('2023-03-06 15:47:01.123+02'),
543+
noFraction: new Date('2023-03-06 15:47:01+02')
544+
});
545+
546+
const rawResults = await db
547+
.collection('test_data')
548+
.find({}, { sort: { _id: 1 } })
549+
.toArray();
550+
const [row] = [...ChangeStream.getQueryData(rawResults)];
551+
552+
const oldFormat = applyRowContext(row, CompatibilityContext.FULL_BACKWARDS_COMPATIBILITY);
553+
expect(oldFormat).toMatchObject({
554+
fraction: '2023-03-06 13:47:01.123Z',
555+
noFraction: '2023-03-06 13:47:01.000Z'
556+
});
557+
558+
const newFormat = applyRowContext(row, new CompatibilityContext(CompatibilityEdition.SYNC_STREAMS));
559+
expect(newFormat).toMatchObject({
560+
fraction: '2023-03-06T13:47:01.123Z',
561+
noFraction: '2023-03-06T13:47:01.000Z'
562+
});
563+
} finally {
564+
await client.close();
565+
}
566+
});
525567
});
526568

527569
/**
528570
* Return all the inserts from the first transaction in the replication stream.
529571
*/
530572
async function getReplicationTx(replicationStream: mongo.ChangeStream, count: number) {
531-
let transformed: SqliteRow[] = [];
573+
let transformed: SqliteInputRow[] = [];
532574
for await (const doc of replicationStream) {
533575
// Specifically filter out map_input / map_output collections
534576
if (!(doc as any)?.ns?.coll?.startsWith('test_data')) {

modules/module-mysql/src/api/MySQLRouteAPIAdapter.ts

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,10 @@ export class MySQLRouteAPIAdapter implements api.RouteAPI {
102102
*/
103103
return fields.map((c) => {
104104
const value = row[c.name];
105-
const sqlValue = sync_rules.toSyncRulesValue(value);
105+
const sqlValue = sync_rules.applyValueContext(
106+
sync_rules.toSyncRulesValue(value),
107+
sync_rules.CompatibilityContext.FULL_BACKWARDS_COMPATIBILITY
108+
);
106109
if (typeof sqlValue == 'bigint') {
107110
return Number(value);
108111
} else if (value instanceof Date) {

0 commit comments

Comments
 (0)