|
1 | | -import { |
2 | | - BucketDataBatchOptions, |
3 | | - getUuidReplicaIdentityBson, |
4 | | - OplogEntry, |
5 | | - SaveOptions, |
6 | | - storage |
7 | | -} from '@powersync/service-core'; |
8 | | -import { DateTimeValue } from '@powersync/service-sync-rules'; |
| 1 | +import { BucketDataBatchOptions, getUuidReplicaIdentityBson, OplogEntry, storage } from '@powersync/service-core'; |
9 | 2 | import { describe, expect, test } from 'vitest'; |
10 | 3 | import * as test_utils from '../test-utils/test-utils-index.js'; |
11 | 4 | import { TEST_TABLE } from './util.js'; |
@@ -1137,72 +1130,6 @@ bucket_definitions: |
1137 | 1130 | expect(checkpoint2).toBeGreaterThan(checkpoint1); |
1138 | 1131 | }); |
1139 | 1132 |
|
1140 | | - test('data with custom types', async () => { |
1141 | | - await using factory = await generateStorageFactory(); |
1142 | | - const testValue = { |
1143 | | - sourceTable: TEST_TABLE, |
1144 | | - tag: storage.SaveOperationTag.INSERT, |
1145 | | - after: { |
1146 | | - id: 't1', |
1147 | | - description: new DateTimeValue('2025-08-28T11:30:00') |
1148 | | - }, |
1149 | | - afterReplicaId: test_utils.rid('t1') |
1150 | | - } satisfies SaveOptions; |
1151 | | - |
1152 | | - { |
1153 | | - // First, deploy old sync rules and row with date time value |
1154 | | - const syncRules = await factory.updateSyncRules({ |
1155 | | - content: ` |
1156 | | - bucket_definitions: |
1157 | | - global: |
1158 | | - data: |
1159 | | - - SELECT id, description FROM test |
1160 | | - ` |
1161 | | - }); |
1162 | | - const bucketStorage = factory.getInstance(syncRules); |
1163 | | - await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => { |
1164 | | - await batch.save(testValue); |
1165 | | - await batch.commit('1/1'); |
1166 | | - }); |
1167 | | - |
1168 | | - const { checkpoint } = await bucketStorage.getCheckpoint(); |
1169 | | - const batch = await test_utils.fromAsync( |
1170 | | - bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]])) |
1171 | | - ); |
1172 | | - expect(batch[0].chunkData.data).toMatchObject([ |
1173 | | - { |
1174 | | - data: '{"id":"t1","description":"2025-08-28 11:30:00"}' |
1175 | | - } |
1176 | | - ]); |
1177 | | - } |
1178 | | - |
1179 | | - const syncRules = await factory.updateSyncRules({ |
1180 | | - content: ` |
1181 | | - bucket_definitions: |
1182 | | - global: |
1183 | | - data: |
1184 | | - - SELECT id, description FROM test |
1185 | | - |
1186 | | - config: |
1187 | | - edition: 2 |
1188 | | - ` |
1189 | | - }); |
1190 | | - const bucketStorage = factory.getInstance(syncRules); |
1191 | | - await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => { |
1192 | | - await batch.save(testValue); |
1193 | | - await batch.commit('1/2'); |
1194 | | - }); |
1195 | | - const { checkpoint } = await bucketStorage.getCheckpoint(); |
1196 | | - const batch = await test_utils.fromAsync( |
1197 | | - bucketStorage.getBucketDataBatch(checkpoint, new Map([['2#global[]', 0n]])) |
1198 | | - ); |
1199 | | - expect(batch[0].chunkData.data).toMatchObject([ |
1200 | | - { |
1201 | | - data: '{"id":"t1","description":"2025-08-28T11:30:00"}' |
1202 | | - } |
1203 | | - ]); |
1204 | | - }); |
1205 | | - |
1206 | 1133 | test('unchanged checksums', async () => { |
1207 | 1134 | await using factory = await generateStorageFactory(); |
1208 | 1135 | const syncRules = await factory.updateSyncRules({ |
|
0 commit comments