From ad3781260475e4e758946cd292510f8f50d3b7cb Mon Sep 17 00:00:00 2001 From: Christiaan Landman Date: Tue, 20 May 2025 10:42:50 +0200 Subject: [PATCH 01/12] WIP --- .../Sync/Bucket/BucketStorageAdapter.cs | 5 + .../Client/Sync/Bucket/SqliteBucketStorage.cs | 10 ++ .../Client/Sync/Stream/Remote.cs | 34 +++-- .../Stream/StreamingSyncImplementation.cs | 125 ++++++++++++------ 4 files changed, 119 insertions(+), 55 deletions(-) diff --git a/PowerSync/PowerSync.Common/Client/Sync/Bucket/BucketStorageAdapter.cs b/PowerSync/PowerSync.Common/Client/Sync/Bucket/BucketStorageAdapter.cs index aa76d58..d47c755 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Bucket/BucketStorageAdapter.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Bucket/BucketStorageAdapter.cs @@ -113,4 +113,9 @@ public interface IBucketStorageAdapter : IEventStream /// Get a unique client ID. /// Task GetClientId(); + + /// + /// Invokes the `powersync_control` function for the sync client. + /// + Task Control(string op, object? payload); } \ No newline at end of file diff --git a/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs b/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs index 495f6e0..754156d 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs @@ -434,4 +434,14 @@ public async Task SetTargetCheckpoint(Checkpoint checkpoint) // No Op await Task.CompletedTask; } + + record ControlResult(string? value); + public async Task Control(string op, object? payload) + { + return await db.WriteTransaction(async tx => + { + var result = await tx.Get("SELECT powersync_control(?, ?)", [op, payload]); + return "5"; + }); + } } diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/Remote.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/Remote.cs index 3b6d8dc..058d436 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Stream/Remote.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/Remote.cs @@ -121,7 +121,25 @@ public async Task Get(string path, Dictionary? headers = n return JsonConvert.DeserializeObject(responseData)!; } + /// + /// Posts to the stream endpoint and returns an async enumerable of parsed NDJSON lines. + /// public async IAsyncEnumerable PostStream(SyncStreamOptions options) + { + using var stream = await PostStreamRaw(options); + using var reader = new StreamReader(stream, Encoding.UTF8); + string? line; + + while ((line = await reader.ReadLineAsync()) != null) + { + yield return ParseStreamingSyncLine(JObject.Parse(line)); + } + } + + /// + /// Posts to the stream endpoint and returns a raw stream that can be read line by line. + /// + public async Task PostStreamRaw(SyncStreamOptions options) { using var requestMessage = await BuildRequest(HttpMethod.Post, options.Path, options.Data, options.Headers); using var response = await httpClient.SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, options.CancellationToken); @@ -130,9 +148,8 @@ public async Task Get(string path, Dictionary? headers = n { throw new HttpRequestException($"HTTP {response.StatusCode}: No content"); } - - if (response.StatusCode == System.Net.HttpStatusCode.Unauthorized) - { + + if (response.StatusCode == System.Net.HttpStatusCode.Unauthorized) { InvalidateCredentials(); } @@ -142,16 +159,7 @@ public async Task Get(string path, Dictionary? headers = n throw new HttpRequestException($"HTTP {response.StatusCode}: {errorText}"); } - var stream = await response.Content.ReadAsStreamAsync(); - - // Read NDJSON stream - using var reader = new StreamReader(stream, Encoding.UTF8); - string? line; - - while ((line = await reader.ReadLineAsync()) != null) - { - yield return ParseStreamingSyncLine(JObject.Parse(line)); - } + return await response.Content.ReadAsStreamAsync(); } public static StreamingSyncLine? ParseStreamingSyncLine(JObject json) diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs index d4cb85a..5f16438 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs @@ -2,9 +2,7 @@ namespace PowerSync.Common.Client.Sync.Stream; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; - using Newtonsoft.Json; - using PowerSync.Common.Client.Sync.Bucket; using PowerSync.Common.DB.Crud; using PowerSync.Common.Utils; @@ -47,8 +45,37 @@ public class StreamingSyncImplementationOptions : AdditionalConnectionOptions public ILogger? Logger { get; init; } } -public class BaseConnectionOptions(Dictionary? parameters = null) +/// +/// Indicates which sync client implementation to use. +/// +public enum SyncClientImplementation { + /// + /// Decodes and handles sync lines received from the sync service in C#. + /// This is the default option. + /// + C_SHARP, + + /// + /// This implementation offloads the sync line decoding and handling into the PowerSync + /// core extension. + /// + /// While this implementation is more performant than , + /// it has seen less real-world testing and is marked as experimental at the moment. + /// + RUST +} + +public class BaseConnectionOptions(Dictionary? parameters = null, SyncClientImplementation? clientImplementation = null ) +{ + /// + /// Whether to use a C# implementation to handle received sync lines from the sync + /// service, or whether this work should be offloaded to the PowerSync core extension. + /// This defaults to the JavaScript implementation SyncClientImplementation.C_SHARP + /// since the SyncClientImplementation.RUST implementation is experimental at the moment. + /// + public SyncClientImplementation? ClientImplementation { get; set; } = clientImplementation; + /// /// These parameters are passed to the sync rules and will be available under the `user_parameters` object. /// @@ -57,6 +84,7 @@ public class BaseConnectionOptions(Dictionary? parameters = null public class RequiredPowerSyncConnectionOptions : BaseConnectionOptions { + public new SyncClientImplementation ClientImplementation { get; set; } = new(); public new Dictionary Params { get; set; } = new(); } @@ -94,6 +122,7 @@ public class StreamingSyncImplementation : EventStream StreamingSyncIteration(CancellationToken signal, PowerSyncConnectionOptions? options) + protected async Task StreamingSyncIteration(CancellationToken signal, + PowerSyncConnectionOptions? options) { return await locks.ObtainLock(new LockOptions { @@ -433,6 +464,7 @@ protected async Task StreamingSyncIteration(Cancel newBuckets.Add(checksum.Bucket); bucketsToDelete.Remove(checksum.Bucket); } + if (bucketsToDelete.Count > 0) { logger.LogDebug("Removing buckets: {message}", string.Join(", ", bucketsToDelete)); @@ -478,7 +510,6 @@ protected async Task StreamingSyncIteration(Cancel { ClearDownloadError = true }); - } validatedCheckpoint = targetCheckpoint; @@ -509,7 +540,8 @@ protected async Task StreamingSyncIteration(Cancel newBuckets.Remove(bucket); } - var newWriteCheckpoint = !string.IsNullOrEmpty(diff.WriteCheckpoint) ? diff.WriteCheckpoint : null; + var newWriteCheckpoint = + !string.IsNullOrEmpty(diff.WriteCheckpoint) ? diff.WriteCheckpoint : null; var newCheckpoint = new Checkpoint { LastOpId = diff.LastOpId, @@ -564,6 +596,7 @@ protected async Task StreamingSyncIteration(Cancel Options.Remote.InvalidateCredentials(); return new StreamingSyncIterationResult { Retry = true }; } + TriggerCrudUpload(); } else @@ -573,14 +606,14 @@ protected async Task StreamingSyncIteration(Cancel if (targetCheckpoint == appliedCheckpoint) { UpdateSyncStatus(new SyncStatusOptions - { - Connected = true, - LastSyncedAt = DateTime.Now, - }, - new UpdateSyncStatusOptions - { - ClearDownloadError = true - } + { + Connected = true, + LastSyncedAt = DateTime.Now, + }, + new UpdateSyncStatusOptions + { + ClearDownloadError = true + } ); } else if (validatedCheckpoint == targetCheckpoint) @@ -602,18 +635,18 @@ protected async Task StreamingSyncIteration(Cancel { appliedCheckpoint = targetCheckpoint; UpdateSyncStatus(new SyncStatusOptions - { - Connected = true, - LastSyncedAt = DateTime.Now, - DataFlow = new SyncDataFlowStatus { - Downloading = false, - } - }, - new UpdateSyncStatusOptions - { - ClearDownloadError = true - }); + Connected = true, + LastSyncedAt = DateTime.Now, + DataFlow = new SyncDataFlowStatus + { + Downloading = false, + } + }, + new UpdateSyncStatusOptions + { + ClearDownloadError = true + }); } } } @@ -624,7 +657,6 @@ protected async Task StreamingSyncIteration(Cancel return new StreamingSyncIterationResult { Retry = true }; } }); - } public new void Close() @@ -635,12 +667,14 @@ protected async Task StreamingSyncIteration(Cancel } public record ResponseData( - [property: JsonProperty("write_checkpoint")] string WriteCheckpoint + [property: JsonProperty("write_checkpoint")] + string WriteCheckpoint ); public record ApiResponse( [property: JsonProperty("data")] ResponseData Data ); + public async Task GetWriteCheckpoint() { var clientId = await Options.Adapter.GetClientId(); @@ -652,7 +686,6 @@ public async Task GetWriteCheckpoint() protected async Task InternalUploadAllCrud() { - await locks.ObtainLock(new LockOptions { Type = LockType.CRUD, @@ -683,13 +716,12 @@ await locks.ObtainLock(new LockOptions checkedCrudItem = nextCrudItem; await Options.UploadCrud(); UpdateSyncStatus(new SyncStatusOptions - { - }, - new UpdateSyncStatusOptions - { - ClearUploadError = true - }); - + { + }, + new UpdateSyncStatusOptions + { + ClearUploadError = true + }); } else { @@ -718,11 +750,14 @@ await locks.ObtainLock(new LockOptions break; } - logger.LogDebug("Caught exception when uploading. Upload will retry after a delay. Exception: {message}", ex.Message); + logger.LogDebug( + "Caught exception when uploading. Upload will retry after a delay. Exception: {message}", + ex.Message); } finally { - UpdateSyncStatus(new SyncStatusOptions { DataFlow = new SyncDataFlowStatus { Uploading = false } }); + UpdateSyncStatus(new SyncStatusOptions + { DataFlow = new SyncDataFlowStatus { Uploading = false } }); } } @@ -743,8 +778,10 @@ public async Task WaitForReady() } protected record UpdateSyncStatusOptions( - bool? ClearDownloadError = null, bool? ClearUploadError = null + bool? ClearDownloadError = null, + bool? ClearUploadError = null ); + protected void UpdateSyncStatus(SyncStatusOptions options, UpdateSyncStatusOptions? updateOptions = null) { var updatedStatus = new SyncStatus(new SyncStatusOptions @@ -756,8 +793,12 @@ protected void UpdateSyncStatus(SyncStatusOptions options, UpdateSyncStatusOptio { Uploading = options.DataFlow?.Uploading ?? SyncStatus.DataFlowStatus.Uploading, Downloading = options.DataFlow?.Downloading ?? SyncStatus.DataFlowStatus.Downloading, - DownloadError = updateOptions?.ClearDownloadError == true ? null : options.DataFlow?.DownloadError ?? SyncStatus.DataFlowStatus.DownloadError, - UploadError = updateOptions?.ClearUploadError == true ? null : options.DataFlow?.UploadError ?? SyncStatus.DataFlowStatus.UploadError, + DownloadError = updateOptions?.ClearDownloadError == true + ? null + : options.DataFlow?.DownloadError ?? SyncStatus.DataFlowStatus.DownloadError, + UploadError = updateOptions?.ClearUploadError == true + ? null + : options.DataFlow?.UploadError ?? SyncStatus.DataFlowStatus.UploadError, } }); From 1013a45958039704c785698cfb62458960f0ce4c Mon Sep 17 00:00:00 2001 From: Christiaan Landman Date: Tue, 20 May 2025 15:06:15 +0200 Subject: [PATCH 02/12] Started implementing rust sync stream. --- .../Sync/Bucket/BucketStorageAdapter.cs | 2 +- .../Client/Sync/Stream/CoreInstructions.cs | 120 ++ .../Stream/StreamingSyncImplementation.cs | 137 +- .../Client/Sync/BucketStorageTests.cs | 1786 ++++++++--------- 4 files changed, 1119 insertions(+), 926 deletions(-) create mode 100644 PowerSync/PowerSync.Common/Client/Sync/Stream/CoreInstructions.cs diff --git a/PowerSync/PowerSync.Common/Client/Sync/Bucket/BucketStorageAdapter.cs b/PowerSync/PowerSync.Common/Client/Sync/Bucket/BucketStorageAdapter.cs index d47c755..5cf7ba3 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Bucket/BucketStorageAdapter.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Bucket/BucketStorageAdapter.cs @@ -4,9 +4,9 @@ namespace PowerSync.Common.Client.Sync.Bucket; using System; using System.Threading.Tasks; +using Newtonsoft.Json; using PowerSync.Common.DB.Crud; using PowerSync.Common.Utils; -using Newtonsoft.Json; public class Checkpoint { diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/CoreInstructions.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/CoreInstructions.cs new file mode 100644 index 0000000..245e81b --- /dev/null +++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/CoreInstructions.cs @@ -0,0 +1,120 @@ +using Newtonsoft.Json.Linq; + +namespace PowerSync.Common.Client.Sync.Stream; + +using Newtonsoft.Json; +using PowerSync.Common.Client.Sync.Stream; + +[JsonConverter(typeof(InstructionConverter))] +public abstract class Instruction +{ +} + +public class LogLine: Instruction +{ + [JsonProperty("severity")] + public string Severity { get; set; } = null!; // "DEBUG", "INFO", "WARNING" + + [JsonProperty("line")] + public string Line { get; set; } = null!; +} + +public class EstablishSyncStream: Instruction +{ + [JsonProperty("request")] + public StreamingSyncRequest Request { get; set; } = null!; +} + +public class UpdateSyncStatus: Instruction +{ + [JsonProperty("status")] + public CoreSyncStatus Status { get; set; } = null!; +} + +public class CoreSyncStatus +{ + [JsonProperty("connected")] + public bool Connected { get; set; } + + [JsonProperty("connecting")] + public bool Connecting { get; set; } + + [JsonProperty("priority_status")] + public List PriorityStatus { get; set; } = null!; + + [JsonProperty("downloading")] + public DownloadProgress? Downloading { get; set; } +} + +public class SyncPriorityStatus +{ + [JsonProperty("priority")] + public int Priority { get; set; } + + [JsonProperty("last_synced_at")] + public long LastSyncedAt { get; set; } + + [JsonProperty("has_synced")] + public bool? HasSynced { get; set; } +} + +public class DownloadProgress +{ + [JsonProperty("buckets")] + public Dictionary Buckets { get; set; } = null!; +} + +public class BucketProgress +{ + [JsonProperty("priority")] + public int Priority { get; set; } + + [JsonProperty("at_last")] + public int AtLast { get; set; } + + [JsonProperty("since_last")] + public int SinceLast { get; set; } + + [JsonProperty("target_count")] + public int TargetCount { get; set; } +} + +public class FetchCredentials: Instruction +{ + [JsonProperty("did_expire")] + public bool DidExpire { get; set; } +} + +public class CloseSyncStream : Instruction { } +public class FlushFileSystem : Instruction { } +public class DidCompleteSync : Instruction { } + +public class InstructionConverter : JsonConverter +{ + public override Instruction ReadJson(JsonReader reader, Type objectType, Instruction? existingValue, bool hasExistingValue, JsonSerializer serializer) + { + var jsonObject = JObject.Load(reader); + + if (jsonObject.ContainsKey("LogLine")) + return jsonObject["LogLine"]!.ToObject(serializer)!; + if (jsonObject.ContainsKey("UpdateSyncStatus")) + return jsonObject["UpdateSyncStatus"]!.ToObject(serializer)!; + if (jsonObject.ContainsKey("EstablishSyncStream")) + return jsonObject["EstablishSyncStream"]!.ToObject(serializer)!; + if (jsonObject.ContainsKey("FetchCredentials")) + return jsonObject["FetchCredentials"]!.ToObject(serializer)!; + if (jsonObject.ContainsKey("CloseSyncStream")) + return new CloseSyncStream(); + if (jsonObject.ContainsKey("FlushFileSystem")) + return new FlushFileSystem(); + if (jsonObject.ContainsKey("DidCompleteSync")) + return new DidCompleteSync(); + + throw new JsonSerializationException("Unknown Instruction type."); + } + + public override void WriteJson(JsonWriter writer, Instruction? value, JsonSerializer serializer) + { + throw new NotImplementedException("Writing not implemented."); + } +} \ No newline at end of file diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs index 5f16438..4fda521 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs @@ -45,37 +45,8 @@ public class StreamingSyncImplementationOptions : AdditionalConnectionOptions public ILogger? Logger { get; init; } } -/// -/// Indicates which sync client implementation to use. -/// -public enum SyncClientImplementation +public class BaseConnectionOptions(Dictionary? parameters = null) { - /// - /// Decodes and handles sync lines received from the sync service in C#. - /// This is the default option. - /// - C_SHARP, - - /// - /// This implementation offloads the sync line decoding and handling into the PowerSync - /// core extension. - /// - /// While this implementation is more performant than , - /// it has seen less real-world testing and is marked as experimental at the moment. - /// - RUST -} - -public class BaseConnectionOptions(Dictionary? parameters = null, SyncClientImplementation? clientImplementation = null ) -{ - /// - /// Whether to use a C# implementation to handle received sync lines from the sync - /// service, or whether this work should be offloaded to the PowerSync core extension. - /// This defaults to the JavaScript implementation SyncClientImplementation.C_SHARP - /// since the SyncClientImplementation.RUST implementation is experimental at the moment. - /// - public SyncClientImplementation? ClientImplementation { get; set; } = clientImplementation; - /// /// These parameters are passed to the sync rules and will be available under the `user_parameters` object. /// @@ -84,7 +55,6 @@ public class BaseConnectionOptions(Dictionary? parameters = null public class RequiredPowerSyncConnectionOptions : BaseConnectionOptions { - public new SyncClientImplementation ClientImplementation { get; set; } = new(); public new Dictionary Params { get; set; } = new(); } @@ -122,7 +92,6 @@ public class StreamingSyncImplementation : EventStream StreamingSyncIteration(Cancel }); } + // StreamingSync(CancellationToken? signal + private async Task RustSyncIteration(CancellationToken? signal, RequiredPowerSyncConnectionOptions resolvedOptions) + { + TaskCompletionSource? receivingLines; + // new TaskCompletionSource(); + + var nestedCts = new CancellationTokenSource(); + signal?.Register(() => { nestedCts.Cancel(); }); + + async Task Connect(EstablishSyncStream instruction) + { + var syncOptions = new SyncStreamOptions + { + Path = "/sync/stream", + CancellationToken = nestedCts.Token, + Data = instruction.Request + }; + + + } + + async Task Stop() + { + await Control("stop"); + } + + async Task Control(string op, object? payload = null) + { + var rawResponse = await Options.Adapter.Control(op, payload); + await HandleInstructions(JsonConvert.DeserializeObject(rawResponse)); + } + + async Task HandleInstructions(Instruction[] instructions) + { + foreach (var instruction in instructions) + { + await HandleInstruction(instruction); + } + } + + async Task HandleInstruction(Instruction instruction) + { + switch (instruction) + { + case LogLine logLine: + switch (logLine.Severity) + { + case "DEBUG": + logger.LogDebug("{message}", logLine.Line); + break; + case "INFO": + logger.LogInformation("{message}", logLine.Line); + break; + case "WARNING": + logger.LogWarning("{message}", logLine.Line); + break; + } + + break; + case UpdateSyncStatus syncStatus: + break; + case EstablishSyncStream: + // if (receivingLines != null) { + // // Already connected, this shouldn't happen during a single iteration. + // throw 'Unexpected request to establish sync stream, already connected'; + // } + // + // receivingLines = connect(instruction.EstablishSyncStream); + break; + case FetchCredentials fetchCredentials when fetchCredentials.DidExpire: + Options.Remote.InvalidateCredentials(); + break; + case FetchCredentials fetchCredentials: + await Options.Remote.PrefetchCredentials(); + break; + case CloseSyncStream: + CancellationTokenSource?.Cancel(); + break; + case FlushFileSystem: + // ignore + break; + case DidCompleteSync: + UpdateSyncStatus(new SyncStatusOptions + { + }, new UpdateSyncStatusOptions { ClearDownloadError = true }); + break; + } + } + + try + { + // this.notifyCompletedUploads = () => { + // control('completed_upload'); + // }; + // + // await control('start', JSON.stringify(resolvedOptions.params)); + // await receivingLines; + } + catch (Exception ex) + { + await Stop(); + } + } + public new void Close() { crudUpdateCts?.Cancel(); diff --git a/Tests/PowerSync/PowerSync.Common.Tests/Client/Sync/BucketStorageTests.cs b/Tests/PowerSync/PowerSync.Common.Tests/Client/Sync/BucketStorageTests.cs index b19727f..f4b4762 100644 --- a/Tests/PowerSync/PowerSync.Common.Tests/Client/Sync/BucketStorageTests.cs +++ b/Tests/PowerSync/PowerSync.Common.Tests/Client/Sync/BucketStorageTests.cs @@ -123,897 +123,897 @@ private ILogger createLogger() return loggerFactory.CreateLogger("TestLogger"); } - [Fact] - public async Task BasicSetup() - { - await db.WaitForReady(); - var initialBucketStates = await bucketStorage.GetBucketStates(); - Assert.Empty(initialBucketStates); - - await bucketStorage.SaveSyncData(new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false)])); - - var bucketStates = await bucketStorage.GetBucketStates(); - - Assert.Collection(bucketStates, state => - { - Assert.Equal("bucket1", state.Bucket); - Assert.Equal("3", state.OpId); - }); - - await SyncLocalChecked(new Checkpoint - { - LastOpId = "3", - Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }] - }); - - await ExpectAsset1_3(db); - } - - [Fact] - public async Task ShouldGetObjectFromMultipleBuckets() - { - await bucketStorage.SaveSyncData( - new SyncDataBatch( - [new SyncDataBucket("bucket1", [TestData.putAsset1_3], false), new SyncDataBucket("bucket2", [TestData.putAsset1_3], false)]) - ); - - await SyncLocalChecked(new Checkpoint - { - LastOpId = "3", - Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 3 }, new BucketChecksum { Bucket = "bucket2", Checksum = 3 }] - }); - - await ExpectAsset1_3(db); - } - - [Fact] - public async Task ShouldPrioritizeLaterUpdates() - { - // Test behavior when the same object is present in multiple buckets. - // In this case, there are two different versions in the different buckets. - // While we should not get this with our server implementation, the client still specifies this behavior: - // The largest op_id wins. - - await bucketStorage.SaveSyncData( - new SyncDataBatch( - [new SyncDataBucket("bucket1", [TestData.putAsset1_3], false), new SyncDataBucket("bucket2", [TestData.putAsset1_1], false)]) - ); - - await SyncLocalChecked(new Checkpoint - { - LastOpId = "3", - Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 3 }, new BucketChecksum { Bucket = "bucket2", Checksum = 1 }] - }); - - await ExpectAsset1_3(db); - } - - [Fact] - public async Task ShouldIgnoreRemoveFromOneBucket() - { - // When we have 1 PUT and 1 REMOVE, the object must be kept.); - await bucketStorage.SaveSyncData( - new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_3], false), new SyncDataBucket("bucket2", [TestData.putAsset1_3, TestData.removeAsset1_4], false)]) - ); - - await SyncLocalChecked(new Checkpoint - { - LastOpId = "4", - Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 3 }, new BucketChecksum { Bucket = "bucket2", Checksum = 7 }] - }); - - await ExpectAsset1_3(db); - } - - [Fact] - public async Task ShouldRemoveWhenRemovedFromAllBuckets() - { - // When we only have REMOVE left for an object, it must be deleted. - await bucketStorage.SaveSyncData( - new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_3, TestData.removeAsset1_5], false), new SyncDataBucket("bucket2", [TestData.putAsset1_3, TestData.removeAsset1_4], false)]) - ); - - await SyncLocalChecked(new Checkpoint - { - LastOpId = "5", - Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 8 }, new BucketChecksum { Bucket = "bucket2", Checksum = 7 }] - }); - - await ExpectNoAssets(db); - } - - [Fact] - public async Task ShouldUseSubkeys() - { - // Subkeys cause this to be treated as a separate entity in the oplog, - // but the same entity in the local database. - - var put4 = OplogEntry.FromRow(new OplogEntryJSON - { - OpId = "4", - Op = new OpType(OpTypeEnum.PUT).ToJSON(), - Subkey = "b", - ObjectType = "assets", - ObjectId = "O1", - Data = new { description = "B" }, - Checksum = 4 - }); - - var remove5 = OplogEntry.FromRow(new OplogEntryJSON - { - OpId = "5", - Op = new OpType(OpTypeEnum.REMOVE).ToJSON(), - Subkey = "b", - ObjectType = "assets", - ObjectId = "O1", - Checksum = 5 - }); - - await bucketStorage.SaveSyncData( - new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset1_3, put4], false)]) - ); - - await SyncLocalChecked(new Checkpoint - { - LastOpId = "4", - Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 8 }] - }); - - var result = await db.GetAll("SELECT id, description, make FROM assets WHERE id = 'O1'"); - Assert.Equal(new AssetResult("O1", "B", null), result[0]); - - await bucketStorage.SaveSyncData(new SyncDataBatch([new SyncDataBucket("bucket1", [remove5], false)])); - - await SyncLocalChecked(new Checkpoint - { - LastOpId = "5", - Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 13 }] - }); - - await ExpectAsset1_3(db); - } - - [Fact] - public async Task ShouldFailChecksumValidation() - { - // Simple checksum validation - await bucketStorage.SaveSyncData( - new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false)]) - ); - - var result = await bucketStorage.SyncLocalDatabase(new Checkpoint - { - LastOpId = "3", - Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 10 }, new BucketChecksum { Bucket = "bucket2", Checksum = 1 }] - }); - - var expected = new SyncLocalDatabaseResult - { - Ready = false, - CheckpointValid = false, - CheckpointFailures = ["bucket1", "bucket2"] - }; - - Assert.Equal(expected, result); - - await ExpectNoAssets(db); - } - - [Fact] - public async Task ShouldDeleteBuckets() - { - await bucketStorage.SaveSyncData( - new SyncDataBatch( - [new SyncDataBucket("bucket1", [TestData.putAsset1_3], false), new SyncDataBucket("bucket2", [TestData.putAsset1_3], false)]) - ); - - await bucketStorage.RemoveBuckets(["bucket2"]); - // The delete only takes effect after syncLocal. - - await SyncLocalChecked(new Checkpoint - { - LastOpId = "3", - Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 3 }] - }); - - // Bucket is deleted, but object is still present in other buckets. - await ExpectAsset1_3(db); - - await bucketStorage.RemoveBuckets(["bucket1"]); - await SyncLocalChecked(new Checkpoint - { - LastOpId = "3", - Buckets = [] - }); - - // Both buckets deleted - object removed. - await ExpectNoAssets(db); - } - - [Fact] - public async Task ShouldDeleteAndRecreateBuckets() - { - // Save some data - await bucketStorage.SaveSyncData( - new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_1], false)]) - ); - - // Delete the bucket - await bucketStorage.RemoveBuckets(["bucket1"]); - - // Save some data again - await bucketStorage.SaveSyncData( - new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset1_3], false)]) - ); - - // Delete again - await bucketStorage.RemoveBuckets(["bucket1"]); - - // Final save of data - await bucketStorage.SaveSyncData( - new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset1_3], false)]) - ); - - // Check that the data is there - await SyncLocalChecked(new Checkpoint - { - LastOpId = "3", - Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 4 }] - }); - - await ExpectAsset1_3(db); - - // Now final delete - await bucketStorage.RemoveBuckets(["bucket1"]); - await SyncLocalChecked(new Checkpoint - { - LastOpId = "3", - Buckets = [] - }); - - await ExpectNoAssets(db); - } - - [Fact] - public async Task ShouldHandleMove() - { - await bucketStorage.SaveSyncData( - new SyncDataBatch( - [ - new SyncDataBucket("bucket1", - [ - OplogEntry.FromRow(new OplogEntryJSON - { - OpId = "1", - Op = new OpType(OpTypeEnum.MOVE).ToJSON(), - Checksum = 1 - }) - ], false) - ]) - ); - - await bucketStorage.SaveSyncData( - new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_3], false)]) - ); - - await SyncLocalChecked(new Checkpoint - { - LastOpId = "3", - Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 4 }] - }); - - await ExpectAsset1_3(db); - } - - [Fact] - public async Task ShouldHandleClear() - { - // Save some data - await bucketStorage.SaveSyncData( - new SyncDataBatch( - [ - new SyncDataBucket("bucket1", [TestData.putAsset1_1], false) - ]) - ); - - await SyncLocalChecked(new Checkpoint - { - LastOpId = "1", - Buckets = - [ - new BucketChecksum { Bucket = "bucket1", Checksum = 1 } - ] - }); - - // CLEAR, then save new data - await bucketStorage.SaveSyncData( - new SyncDataBatch( - [ - new SyncDataBucket("bucket1", - [ - OplogEntry.FromRow(new OplogEntryJSON - { - OpId = "2", - Op = new OpType(OpTypeEnum.CLEAR).ToJSON(), - Checksum = 2 - }), - OplogEntry.FromRow(new OplogEntryJSON - { - OpId = "3", - Op = new OpType(OpTypeEnum.PUT).ToJSON(), - Checksum = 3, - Data = TestData.putAsset2_2.Data, - ObjectId = TestData.putAsset2_2.ObjectId, - ObjectType = TestData.putAsset2_2.ObjectType - }) - ], false) - ]) - ); - - await SyncLocalChecked(new Checkpoint - { - LastOpId = "3", - // 2 + 3. 1 is replaced with 2. - Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 5 }] - }); - - await ExpectNoAsset1(db); - - var result = await db.Get("SELECT id, description FROM assets WHERE id = 'O2'"); - - Assert.Equal(new AssetResult("O2", "bar"), result); - } - - [Fact] - public async Task UpdateWithNewTypes() - { - var dbName = "test-bucket-storage-new-types.db"; - var powersync = new PowerSyncDatabase(new PowerSyncDatabaseOptions - { - Database = new SQLOpenOptions { DbFilename = dbName }, - Schema = new Schema([]), - }); - await powersync.Init(); - bucketStorage = new SqliteBucketStorage(powersync.Database); - - await bucketStorage.SaveSyncData( - new SyncDataBatch( - [new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false)]) - ); - - await SyncLocalChecked(new Checkpoint - { - LastOpId = "4", - Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }] - }); - - // Ensure an exception is thrown due to missing table - await Assert.ThrowsAsync(async () => - await powersync.GetAll("SELECT * FROM assets")); - - await powersync.Close(); - - powersync = new PowerSyncDatabase(new PowerSyncDatabaseOptions - { - Database = new SQLOpenOptions { DbFilename = dbName }, - Schema = TestSchema.AppSchema, - }); - await powersync.Init(); - - await ExpectAsset1_3(powersync); - - await powersync.DisconnectAndClear(); - await powersync.Close(); - } - - [Fact] - public async Task ShouldRemoveTypes() - { - var dbName = "test-bucket-storage-remove-types.db"; - - // Create database with initial schema - var powersync = new PowerSyncDatabase(new PowerSyncDatabaseOptions - { - Database = new SQLOpenOptions { DbFilename = dbName }, - Schema = TestSchema.AppSchema, - }); - - await powersync.Init(); - bucketStorage = new SqliteBucketStorage(powersync.Database); - - await bucketStorage.SaveSyncData( - new SyncDataBatch( - [ - new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false) - ]) - ); - - await SyncLocalChecked(new Checkpoint - { - LastOpId = "3", - Buckets = - [ - new BucketChecksum { Bucket = "bucket1", Checksum = 6 } - ] - }); - - await ExpectAsset1_3(powersync); - await powersync.Close(); - - // Now open another instance with an empty schema - powersync = new PowerSyncDatabase(new PowerSyncDatabaseOptions - { - Database = new SQLOpenOptions { DbFilename = dbName }, - Schema = new Schema([]), - }); - await powersync.Init(); - - await Assert.ThrowsAsync(async () => - await powersync.Execute("SELECT * FROM assets")); - - await powersync.Close(); - - // Reopen database with the original schema - powersync = new PowerSyncDatabase(new PowerSyncDatabaseOptions - { - Database = new SQLOpenOptions { DbFilename = dbName }, - Schema = TestSchema.AppSchema, - }); - await powersync.Init(); - - await ExpectAsset1_3(powersync); - - await powersync.DisconnectAndClear(); - await powersync.Close(); - } - - private record OplogStats(string Type, string Id, int Count); - - [Fact] - public async Task ShouldCompact() - { - // Test compacting behavior. - // This test relies heavily on internals and will have to be updated when the compact implementation is updated. - - await bucketStorage.SaveSyncData( - new SyncDataBatch( - [ - new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.removeAsset1_4], false) - ]) - ); - - await SyncLocalChecked(new Checkpoint - { - LastOpId = "4", - WriteCheckpoint = "4", - Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 7 }] - }); - - await bucketStorage.ForceCompact(); - - await SyncLocalChecked(new Checkpoint - { - LastOpId = "4", - WriteCheckpoint = "4", - Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 7 }] - }); - - var stats = await db.GetAll( - "SELECT row_type as Type, row_id as Id, count(*) as Count FROM ps_oplog GROUP BY row_type, row_id ORDER BY row_type, row_id" - ); - - var expectedStats = new List { new("assets", "O2", 1) }; - - Assert.Equal(expectedStats, stats); - } - - [Fact] - public async Task ShouldNotSyncLocalDbWithPendingCrud_ServerRemoved() - { - await bucketStorage.SaveSyncData( - new SyncDataBatch( - [ - new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false) - ]) - ); - - await SyncLocalChecked(new Checkpoint - { - LastOpId = "3", - Buckets = - [ - new BucketChecksum { Bucket = "bucket1", Checksum = 6 } - ] - }); - - // Local save - await db.Execute("INSERT INTO assets(id) VALUES(?)", ["O3"]); - - var insertedResult = await db.GetAll("SELECT id FROM assets WHERE id = 'O3'"); - Assert.Equal(new IdResult("O3"), insertedResult[0]); - - // At this point, we have data in the CRUD table and are not able to sync the local DB. - var result = await bucketStorage.SyncLocalDatabase(new Checkpoint - { - LastOpId = "3", - WriteCheckpoint = "3", - Buckets = - [ - new BucketChecksum { Bucket = "bucket1", Checksum = 6 } - ] - }); - - var expectedResult = new SyncLocalDatabaseResult - { - Ready = false, - CheckpointValid = true - }; - - Assert.Equal(expectedResult, result); - - var batch = await bucketStorage.GetCrudBatch(); - if (batch != null) - { - await batch.Complete(""); - } - - await bucketStorage.UpdateLocalTarget(() => Task.FromResult("4")); - - // At this point, the data has been uploaded but not synced back yet. - var result3 = await bucketStorage.SyncLocalDatabase(new Checkpoint - { - LastOpId = "3", - WriteCheckpoint = "3", - Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }] - }); - - Assert.Equal(expectedResult, result3); - - // The data must still be present locally. - var stillPresentResult = await db.GetAll("SELECT id FROM assets WHERE id = 'O3'"); - Assert.Equal(new IdResult("O3"), stillPresentResult[0]); - - await bucketStorage.SaveSyncData( - new SyncDataBatch( - [ - new SyncDataBucket("bucket1", Array.Empty(), false) - ]) - ); - - // Now we have synced the data back (or lack of data in this case), - // so we can do a local sync. - await SyncLocalChecked(new Checkpoint - { - LastOpId = "5", - WriteCheckpoint = "5", - Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }] - }); - - // Since the object was not in the sync response, it is deleted. - var deletedResult = await db.GetAll("SELECT id FROM assets WHERE id = 'O3'"); - Assert.Empty(deletedResult); - } - - [Fact] - public async Task ShouldNotSyncLocalDbWithPendingCrud_WhenMoreCrudIsAdded_1() - { - await bucketStorage.SaveSyncData( - new SyncDataBatch( - [ - new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false) - ]) - ); - - await SyncLocalChecked(new Checkpoint - { - LastOpId = "3", - WriteCheckpoint = "3", - Buckets = - [ - new BucketChecksum { Bucket = "bucket1", Checksum = 6 } - ] - }); - - // Local save - await db.Execute("INSERT INTO assets(id) VALUES(?)", ["O3"]); - - var batch = await bucketStorage.GetCrudBatch(); - if (batch != null) - { - await batch.Complete(""); - } - - await bucketStorage.UpdateLocalTarget(() => Task.FromResult("4")); - - var result3 = await bucketStorage.SyncLocalDatabase(new Checkpoint - { - LastOpId = "3", - WriteCheckpoint = "3", - Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }] - }); - - var expectedResult = new SyncLocalDatabaseResult - { - Ready = false, - CheckpointValid = true - }; - - Assert.Equal(expectedResult, result3); - - await bucketStorage.SaveSyncData( - new SyncDataBatch( - [ - new SyncDataBucket("bucket1", Array.Empty(), false) - ]) - ); - - // Add more data before SyncLocalDatabase. - await db.Execute("INSERT INTO assets(id) VALUES(?)", ["O4"]); - - var result4 = await bucketStorage.SyncLocalDatabase(new Checkpoint - { - LastOpId = "5", - WriteCheckpoint = "5", - Buckets = - [ - new BucketChecksum { Bucket = "bucket1", Checksum = 6 } - ] - }); - - Assert.Equal(expectedResult, result4); - } - - [Fact] - public async Task ShouldNotSyncLocalDbWithPendingCrud_WhenMoreCrudIsAdded_2() - { - await bucketStorage.SaveSyncData( - new SyncDataBatch( - [ - new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false) - ]) - ); - - await SyncLocalChecked(new Checkpoint - { - LastOpId = "3", - WriteCheckpoint = "3", - Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }] - }); - - // Local save - await db.Execute("INSERT INTO assets(id) VALUES(?)", ["O3"]); - - var batch = await bucketStorage.GetCrudBatch(); - - // Add more data before calling complete() - await db.Execute("INSERT INTO assets(id) VALUES(?)", ["O4"]); - if (batch != null) - { - await batch.Complete(""); - } - - await bucketStorage.UpdateLocalTarget(() => Task.FromResult("4")); - - await bucketStorage.SaveSyncData( - new SyncDataBatch( - [ - new SyncDataBucket("bucket1", [], false) - ]) - ); - - var result4 = await bucketStorage.SyncLocalDatabase(new Checkpoint - { - LastOpId = "5", - WriteCheckpoint = "5", - Buckets = - [ - new BucketChecksum { Bucket = "bucket1", Checksum = 6 } - ] - }); - - var expected = new SyncLocalDatabaseResult - { - Ready = false, - CheckpointValid = true - }; - - Assert.Equal(expected, result4); - } - - [Fact] - public async Task ShouldNotSyncLocalDbWithPendingCrud_UpdateOnServer() - { - await bucketStorage.SaveSyncData( - new SyncDataBatch( - [ - new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false) - ]) - ); - - await SyncLocalChecked(new Checkpoint - { - LastOpId = "3", - WriteCheckpoint = "3", - Buckets = - [ - new BucketChecksum { Bucket = "bucket1", Checksum = 6 } - ] - }); - - // Local save - await db.Execute("INSERT INTO assets(id) VALUES(?)", ["O3"]); - - var batch = await bucketStorage.GetCrudBatch(); - if (batch != null) - { - await batch.Complete(""); - } - - await bucketStorage.UpdateLocalTarget(() => Task.FromResult("4")); - - await bucketStorage.SaveSyncData( - new SyncDataBatch( - [ - new SyncDataBucket("bucket1", - [ - OplogEntry.FromRow(new OplogEntryJSON - { - OpId = "5", - Op = new OpType(OpTypeEnum.PUT).ToJSON(), - ObjectType = "assets", - ObjectId = "O3", - Checksum = 5, - Data = new { description = "server updated" } - }) - ], false) - ]) - ); - - await SyncLocalChecked(new Checkpoint - { - LastOpId = "5", - WriteCheckpoint = "5", - Buckets = - [ - new BucketChecksum { Bucket = "bucket1", Checksum = 11 } - ] - }); - - var updatedResult = await db.GetAll("SELECT description FROM assets WHERE id = 'O3'"); - Assert.Equal(new DescriptionResult("server updated"), updatedResult[0]); - } - - [Fact] - public async Task ShouldRevertAFailingInsert() - { - await bucketStorage.SaveSyncData( - new SyncDataBatch( - [ - new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false) - ]) - ); - - await SyncLocalChecked(new Checkpoint - { - LastOpId = "3", - WriteCheckpoint = "3", - Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }] - }); - - // Local insert, later rejected by server - await db.Execute("INSERT INTO assets(id, description) VALUES(?, ?)", ["O3", "inserted"]); - - var batch = await bucketStorage.GetCrudBatch(); - if (batch != null) - { - await batch.Complete(""); - } - - await bucketStorage.UpdateLocalTarget(() => Task.FromResult("4")); - - var insertedResult = await db.GetAll("SELECT description FROM assets WHERE id = 'O3'"); - Assert.Equal(new DescriptionResult("inserted"), insertedResult[0]); - - await SyncLocalChecked(new Checkpoint - { - LastOpId = "3", - WriteCheckpoint = "4", - Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }] - }); - - var revertedResult = await db.GetAll("SELECT description FROM assets WHERE id = 'O3'"); - Assert.Empty(revertedResult); - } - - [Fact] - public async Task ShouldRevertAFailingDelete() - { - await bucketStorage.SaveSyncData( - new SyncDataBatch( - [ - new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false) - ]) - ); - - await SyncLocalChecked(new Checkpoint - { - LastOpId = "3", - WriteCheckpoint = "3", - Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }] - }); - - // Local delete, later rejected by server - await db.Execute("DELETE FROM assets WHERE id = ?", ["O2"]); - - var deletedResult = await db.GetAll("SELECT description FROM assets WHERE id = 'O2'"); - Assert.Empty(deletedResult); // Ensure the record is deleted locally - - // Simulate a permissions error when uploading - data should be preserved - var batch = await bucketStorage.GetCrudBatch(); - if (batch != null) - { - await batch.Complete(""); - } - - await bucketStorage.UpdateLocalTarget(() => Task.FromResult("4")); - - await SyncLocalChecked(new Checkpoint - { - LastOpId = "3", - WriteCheckpoint = "4", - Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }] - }); - - var revertedResult = await db.GetAll("SELECT description FROM assets WHERE id = 'O2'"); - Assert.Equal(new DescriptionResult("bar"), revertedResult[0]); - } - - [Fact] - public async Task ShouldRevertAFailingUpdate() - { - await bucketStorage.SaveSyncData( - new SyncDataBatch( - [ - new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false) - ]) - ); - - await SyncLocalChecked(new Checkpoint - { - LastOpId = "3", - WriteCheckpoint = "3", - Buckets = - [ - new BucketChecksum { Bucket = "bucket1", Checksum = 6 } - ] - }); - - // Local update, later rejected by server - await db.Execute("UPDATE assets SET description = ? WHERE id = ?", ["updated", "O2"]); - - var updatedResult = await db.GetAll("SELECT description FROM assets WHERE id = 'O2'"); - Assert.Equal(new DescriptionResult("updated"), updatedResult[0]); - - // Simulate a permissions error when uploading - data should be preserved - var batch = await bucketStorage.GetCrudBatch(); - if (batch != null) - { - await batch.Complete(""); - } - - await bucketStorage.UpdateLocalTarget(async () => await Task.FromResult("4")); - - await SyncLocalChecked(new Checkpoint - { - LastOpId = "3", - WriteCheckpoint = "4", - Buckets = - [ - new BucketChecksum { Bucket = "bucket1", Checksum = 6 } - ] - }); - - var revertedResult = await db.GetAll("SELECT description FROM assets WHERE id = 'O2'"); - Assert.Equal(new DescriptionResult("bar"), revertedResult[0]); - } + // [Fact] + // public async Task BasicSetup() + // { + // await db.WaitForReady(); + // var initialBucketStates = await bucketStorage.GetBucketStates(); + // Assert.Empty(initialBucketStates); + // + // await bucketStorage.SaveSyncData(new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false)])); + // + // var bucketStates = await bucketStorage.GetBucketStates(); + // + // Assert.Collection(bucketStates, state => + // { + // Assert.Equal("bucket1", state.Bucket); + // Assert.Equal("3", state.OpId); + // }); + // + // await SyncLocalChecked(new Checkpoint + // { + // LastOpId = "3", + // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }] + // }); + // + // await ExpectAsset1_3(db); + // } + // + // [Fact] + // public async Task ShouldGetObjectFromMultipleBuckets() + // { + // await bucketStorage.SaveSyncData( + // new SyncDataBatch( + // [new SyncDataBucket("bucket1", [TestData.putAsset1_3], false), new SyncDataBucket("bucket2", [TestData.putAsset1_3], false)]) + // ); + // + // await SyncLocalChecked(new Checkpoint + // { + // LastOpId = "3", + // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 3 }, new BucketChecksum { Bucket = "bucket2", Checksum = 3 }] + // }); + // + // await ExpectAsset1_3(db); + // } + // + // [Fact] + // public async Task ShouldPrioritizeLaterUpdates() + // { + // // Test behavior when the same object is present in multiple buckets. + // // In this case, there are two different versions in the different buckets. + // // While we should not get this with our server implementation, the client still specifies this behavior: + // // The largest op_id wins. + // + // await bucketStorage.SaveSyncData( + // new SyncDataBatch( + // [new SyncDataBucket("bucket1", [TestData.putAsset1_3], false), new SyncDataBucket("bucket2", [TestData.putAsset1_1], false)]) + // ); + // + // await SyncLocalChecked(new Checkpoint + // { + // LastOpId = "3", + // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 3 }, new BucketChecksum { Bucket = "bucket2", Checksum = 1 }] + // }); + // + // await ExpectAsset1_3(db); + // } + // + // [Fact] + // public async Task ShouldIgnoreRemoveFromOneBucket() + // { + // // When we have 1 PUT and 1 REMOVE, the object must be kept.); + // await bucketStorage.SaveSyncData( + // new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_3], false), new SyncDataBucket("bucket2", [TestData.putAsset1_3, TestData.removeAsset1_4], false)]) + // ); + // + // await SyncLocalChecked(new Checkpoint + // { + // LastOpId = "4", + // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 3 }, new BucketChecksum { Bucket = "bucket2", Checksum = 7 }] + // }); + // + // await ExpectAsset1_3(db); + // } + // + // [Fact] + // public async Task ShouldRemoveWhenRemovedFromAllBuckets() + // { + // // When we only have REMOVE left for an object, it must be deleted. + // await bucketStorage.SaveSyncData( + // new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_3, TestData.removeAsset1_5], false), new SyncDataBucket("bucket2", [TestData.putAsset1_3, TestData.removeAsset1_4], false)]) + // ); + // + // await SyncLocalChecked(new Checkpoint + // { + // LastOpId = "5", + // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 8 }, new BucketChecksum { Bucket = "bucket2", Checksum = 7 }] + // }); + // + // await ExpectNoAssets(db); + // } + // + // [Fact] + // public async Task ShouldUseSubkeys() + // { + // // Subkeys cause this to be treated as a separate entity in the oplog, + // // but the same entity in the local database. + // + // var put4 = OplogEntry.FromRow(new OplogEntryJSON + // { + // OpId = "4", + // Op = new OpType(OpTypeEnum.PUT).ToJSON(), + // Subkey = "b", + // ObjectType = "assets", + // ObjectId = "O1", + // Data = new { description = "B" }, + // Checksum = 4 + // }); + // + // var remove5 = OplogEntry.FromRow(new OplogEntryJSON + // { + // OpId = "5", + // Op = new OpType(OpTypeEnum.REMOVE).ToJSON(), + // Subkey = "b", + // ObjectType = "assets", + // ObjectId = "O1", + // Checksum = 5 + // }); + // + // await bucketStorage.SaveSyncData( + // new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset1_3, put4], false)]) + // ); + // + // await SyncLocalChecked(new Checkpoint + // { + // LastOpId = "4", + // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 8 }] + // }); + // + // var result = await db.GetAll("SELECT id, description, make FROM assets WHERE id = 'O1'"); + // Assert.Equal(new AssetResult("O1", "B", null), result[0]); + // + // await bucketStorage.SaveSyncData(new SyncDataBatch([new SyncDataBucket("bucket1", [remove5], false)])); + // + // await SyncLocalChecked(new Checkpoint + // { + // LastOpId = "5", + // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 13 }] + // }); + // + // await ExpectAsset1_3(db); + // } + // + // [Fact] + // public async Task ShouldFailChecksumValidation() + // { + // // Simple checksum validation + // await bucketStorage.SaveSyncData( + // new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false)]) + // ); + // + // var result = await bucketStorage.SyncLocalDatabase(new Checkpoint + // { + // LastOpId = "3", + // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 10 }, new BucketChecksum { Bucket = "bucket2", Checksum = 1 }] + // }); + // + // var expected = new SyncLocalDatabaseResult + // { + // Ready = false, + // CheckpointValid = false, + // CheckpointFailures = ["bucket1", "bucket2"] + // }; + // + // Assert.Equal(expected, result); + // + // await ExpectNoAssets(db); + // } + // + // [Fact] + // public async Task ShouldDeleteBuckets() + // { + // await bucketStorage.SaveSyncData( + // new SyncDataBatch( + // [new SyncDataBucket("bucket1", [TestData.putAsset1_3], false), new SyncDataBucket("bucket2", [TestData.putAsset1_3], false)]) + // ); + // + // await bucketStorage.RemoveBuckets(["bucket2"]); + // // The delete only takes effect after syncLocal. + // + // await SyncLocalChecked(new Checkpoint + // { + // LastOpId = "3", + // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 3 }] + // }); + // + // // Bucket is deleted, but object is still present in other buckets. + // await ExpectAsset1_3(db); + // + // await bucketStorage.RemoveBuckets(["bucket1"]); + // await SyncLocalChecked(new Checkpoint + // { + // LastOpId = "3", + // Buckets = [] + // }); + // + // // Both buckets deleted - object removed. + // await ExpectNoAssets(db); + // } + // + // [Fact] + // public async Task ShouldDeleteAndRecreateBuckets() + // { + // // Save some data + // await bucketStorage.SaveSyncData( + // new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_1], false)]) + // ); + // + // // Delete the bucket + // await bucketStorage.RemoveBuckets(["bucket1"]); + // + // // Save some data again + // await bucketStorage.SaveSyncData( + // new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset1_3], false)]) + // ); + // + // // Delete again + // await bucketStorage.RemoveBuckets(["bucket1"]); + // + // // Final save of data + // await bucketStorage.SaveSyncData( + // new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset1_3], false)]) + // ); + // + // // Check that the data is there + // await SyncLocalChecked(new Checkpoint + // { + // LastOpId = "3", + // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 4 }] + // }); + // + // await ExpectAsset1_3(db); + // + // // Now final delete + // await bucketStorage.RemoveBuckets(["bucket1"]); + // await SyncLocalChecked(new Checkpoint + // { + // LastOpId = "3", + // Buckets = [] + // }); + // + // await ExpectNoAssets(db); + // } + // + // [Fact] + // public async Task ShouldHandleMove() + // { + // await bucketStorage.SaveSyncData( + // new SyncDataBatch( + // [ + // new SyncDataBucket("bucket1", + // [ + // OplogEntry.FromRow(new OplogEntryJSON + // { + // OpId = "1", + // Op = new OpType(OpTypeEnum.MOVE).ToJSON(), + // Checksum = 1 + // }) + // ], false) + // ]) + // ); + // + // await bucketStorage.SaveSyncData( + // new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_3], false)]) + // ); + // + // await SyncLocalChecked(new Checkpoint + // { + // LastOpId = "3", + // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 4 }] + // }); + // + // await ExpectAsset1_3(db); + // } + // + // [Fact] + // public async Task ShouldHandleClear() + // { + // // Save some data + // await bucketStorage.SaveSyncData( + // new SyncDataBatch( + // [ + // new SyncDataBucket("bucket1", [TestData.putAsset1_1], false) + // ]) + // ); + // + // await SyncLocalChecked(new Checkpoint + // { + // LastOpId = "1", + // Buckets = + // [ + // new BucketChecksum { Bucket = "bucket1", Checksum = 1 } + // ] + // }); + // + // // CLEAR, then save new data + // await bucketStorage.SaveSyncData( + // new SyncDataBatch( + // [ + // new SyncDataBucket("bucket1", + // [ + // OplogEntry.FromRow(new OplogEntryJSON + // { + // OpId = "2", + // Op = new OpType(OpTypeEnum.CLEAR).ToJSON(), + // Checksum = 2 + // }), + // OplogEntry.FromRow(new OplogEntryJSON + // { + // OpId = "3", + // Op = new OpType(OpTypeEnum.PUT).ToJSON(), + // Checksum = 3, + // Data = TestData.putAsset2_2.Data, + // ObjectId = TestData.putAsset2_2.ObjectId, + // ObjectType = TestData.putAsset2_2.ObjectType + // }) + // ], false) + // ]) + // ); + // + // await SyncLocalChecked(new Checkpoint + // { + // LastOpId = "3", + // // 2 + 3. 1 is replaced with 2. + // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 5 }] + // }); + // + // await ExpectNoAsset1(db); + // + // var result = await db.Get("SELECT id, description FROM assets WHERE id = 'O2'"); + // + // Assert.Equal(new AssetResult("O2", "bar"), result); + // } + // + // [Fact] + // public async Task UpdateWithNewTypes() + // { + // var dbName = "test-bucket-storage-new-types.db"; + // var powersync = new PowerSyncDatabase(new PowerSyncDatabaseOptions + // { + // Database = new SQLOpenOptions { DbFilename = dbName }, + // Schema = new Schema([]), + // }); + // await powersync.Init(); + // bucketStorage = new SqliteBucketStorage(powersync.Database); + // + // await bucketStorage.SaveSyncData( + // new SyncDataBatch( + // [new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false)]) + // ); + // + // await SyncLocalChecked(new Checkpoint + // { + // LastOpId = "4", + // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }] + // }); + // + // // Ensure an exception is thrown due to missing table + // await Assert.ThrowsAsync(async () => + // await powersync.GetAll("SELECT * FROM assets")); + // + // await powersync.Close(); + // + // powersync = new PowerSyncDatabase(new PowerSyncDatabaseOptions + // { + // Database = new SQLOpenOptions { DbFilename = dbName }, + // Schema = TestSchema.AppSchema, + // }); + // await powersync.Init(); + // + // await ExpectAsset1_3(powersync); + // + // await powersync.DisconnectAndClear(); + // await powersync.Close(); + // } + // + // [Fact] + // public async Task ShouldRemoveTypes() + // { + // var dbName = "test-bucket-storage-remove-types.db"; + // + // // Create database with initial schema + // var powersync = new PowerSyncDatabase(new PowerSyncDatabaseOptions + // { + // Database = new SQLOpenOptions { DbFilename = dbName }, + // Schema = TestSchema.AppSchema, + // }); + // + // await powersync.Init(); + // bucketStorage = new SqliteBucketStorage(powersync.Database); + // + // await bucketStorage.SaveSyncData( + // new SyncDataBatch( + // [ + // new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false) + // ]) + // ); + // + // await SyncLocalChecked(new Checkpoint + // { + // LastOpId = "3", + // Buckets = + // [ + // new BucketChecksum { Bucket = "bucket1", Checksum = 6 } + // ] + // }); + // + // await ExpectAsset1_3(powersync); + // await powersync.Close(); + // + // // Now open another instance with an empty schema + // powersync = new PowerSyncDatabase(new PowerSyncDatabaseOptions + // { + // Database = new SQLOpenOptions { DbFilename = dbName }, + // Schema = new Schema([]), + // }); + // await powersync.Init(); + // + // await Assert.ThrowsAsync(async () => + // await powersync.Execute("SELECT * FROM assets")); + // + // await powersync.Close(); + // + // // Reopen database with the original schema + // powersync = new PowerSyncDatabase(new PowerSyncDatabaseOptions + // { + // Database = new SQLOpenOptions { DbFilename = dbName }, + // Schema = TestSchema.AppSchema, + // }); + // await powersync.Init(); + // + // await ExpectAsset1_3(powersync); + // + // await powersync.DisconnectAndClear(); + // await powersync.Close(); + // } + // + // private record OplogStats(string Type, string Id, int Count); + // + // [Fact] + // public async Task ShouldCompact() + // { + // // Test compacting behavior. + // // This test relies heavily on internals and will have to be updated when the compact implementation is updated. + // + // await bucketStorage.SaveSyncData( + // new SyncDataBatch( + // [ + // new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.removeAsset1_4], false) + // ]) + // ); + // + // await SyncLocalChecked(new Checkpoint + // { + // LastOpId = "4", + // WriteCheckpoint = "4", + // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 7 }] + // }); + // + // await bucketStorage.ForceCompact(); + // + // await SyncLocalChecked(new Checkpoint + // { + // LastOpId = "4", + // WriteCheckpoint = "4", + // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 7 }] + // }); + // + // var stats = await db.GetAll( + // "SELECT row_type as Type, row_id as Id, count(*) as Count FROM ps_oplog GROUP BY row_type, row_id ORDER BY row_type, row_id" + // ); + // + // var expectedStats = new List { new("assets", "O2", 1) }; + // + // Assert.Equal(expectedStats, stats); + // } + // + // [Fact] + // public async Task ShouldNotSyncLocalDbWithPendingCrud_ServerRemoved() + // { + // await bucketStorage.SaveSyncData( + // new SyncDataBatch( + // [ + // new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false) + // ]) + // ); + // + // await SyncLocalChecked(new Checkpoint + // { + // LastOpId = "3", + // Buckets = + // [ + // new BucketChecksum { Bucket = "bucket1", Checksum = 6 } + // ] + // }); + // + // // Local save + // await db.Execute("INSERT INTO assets(id) VALUES(?)", ["O3"]); + // + // var insertedResult = await db.GetAll("SELECT id FROM assets WHERE id = 'O3'"); + // Assert.Equal(new IdResult("O3"), insertedResult[0]); + // + // // At this point, we have data in the CRUD table and are not able to sync the local DB. + // var result = await bucketStorage.SyncLocalDatabase(new Checkpoint + // { + // LastOpId = "3", + // WriteCheckpoint = "3", + // Buckets = + // [ + // new BucketChecksum { Bucket = "bucket1", Checksum = 6 } + // ] + // }); + // + // var expectedResult = new SyncLocalDatabaseResult + // { + // Ready = false, + // CheckpointValid = true + // }; + // + // Assert.Equal(expectedResult, result); + // + // var batch = await bucketStorage.GetCrudBatch(); + // if (batch != null) + // { + // await batch.Complete(""); + // } + // + // await bucketStorage.UpdateLocalTarget(() => Task.FromResult("4")); + // + // // At this point, the data has been uploaded but not synced back yet. + // var result3 = await bucketStorage.SyncLocalDatabase(new Checkpoint + // { + // LastOpId = "3", + // WriteCheckpoint = "3", + // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }] + // }); + // + // Assert.Equal(expectedResult, result3); + // + // // The data must still be present locally. + // var stillPresentResult = await db.GetAll("SELECT id FROM assets WHERE id = 'O3'"); + // Assert.Equal(new IdResult("O3"), stillPresentResult[0]); + // + // await bucketStorage.SaveSyncData( + // new SyncDataBatch( + // [ + // new SyncDataBucket("bucket1", Array.Empty(), false) + // ]) + // ); + // + // // Now we have synced the data back (or lack of data in this case), + // // so we can do a local sync. + // await SyncLocalChecked(new Checkpoint + // { + // LastOpId = "5", + // WriteCheckpoint = "5", + // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }] + // }); + // + // // Since the object was not in the sync response, it is deleted. + // var deletedResult = await db.GetAll("SELECT id FROM assets WHERE id = 'O3'"); + // Assert.Empty(deletedResult); + // } + // + // [Fact] + // public async Task ShouldNotSyncLocalDbWithPendingCrud_WhenMoreCrudIsAdded_1() + // { + // await bucketStorage.SaveSyncData( + // new SyncDataBatch( + // [ + // new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false) + // ]) + // ); + // + // await SyncLocalChecked(new Checkpoint + // { + // LastOpId = "3", + // WriteCheckpoint = "3", + // Buckets = + // [ + // new BucketChecksum { Bucket = "bucket1", Checksum = 6 } + // ] + // }); + // + // // Local save + // await db.Execute("INSERT INTO assets(id) VALUES(?)", ["O3"]); + // + // var batch = await bucketStorage.GetCrudBatch(); + // if (batch != null) + // { + // await batch.Complete(""); + // } + // + // await bucketStorage.UpdateLocalTarget(() => Task.FromResult("4")); + // + // var result3 = await bucketStorage.SyncLocalDatabase(new Checkpoint + // { + // LastOpId = "3", + // WriteCheckpoint = "3", + // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }] + // }); + // + // var expectedResult = new SyncLocalDatabaseResult + // { + // Ready = false, + // CheckpointValid = true + // }; + // + // Assert.Equal(expectedResult, result3); + // + // await bucketStorage.SaveSyncData( + // new SyncDataBatch( + // [ + // new SyncDataBucket("bucket1", Array.Empty(), false) + // ]) + // ); + // + // // Add more data before SyncLocalDatabase. + // await db.Execute("INSERT INTO assets(id) VALUES(?)", ["O4"]); + // + // var result4 = await bucketStorage.SyncLocalDatabase(new Checkpoint + // { + // LastOpId = "5", + // WriteCheckpoint = "5", + // Buckets = + // [ + // new BucketChecksum { Bucket = "bucket1", Checksum = 6 } + // ] + // }); + // + // Assert.Equal(expectedResult, result4); + // } + // + // [Fact] + // public async Task ShouldNotSyncLocalDbWithPendingCrud_WhenMoreCrudIsAdded_2() + // { + // await bucketStorage.SaveSyncData( + // new SyncDataBatch( + // [ + // new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false) + // ]) + // ); + // + // await SyncLocalChecked(new Checkpoint + // { + // LastOpId = "3", + // WriteCheckpoint = "3", + // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }] + // }); + // + // // Local save + // await db.Execute("INSERT INTO assets(id) VALUES(?)", ["O3"]); + // + // var batch = await bucketStorage.GetCrudBatch(); + // + // // Add more data before calling complete() + // await db.Execute("INSERT INTO assets(id) VALUES(?)", ["O4"]); + // if (batch != null) + // { + // await batch.Complete(""); + // } + // + // await bucketStorage.UpdateLocalTarget(() => Task.FromResult("4")); + // + // await bucketStorage.SaveSyncData( + // new SyncDataBatch( + // [ + // new SyncDataBucket("bucket1", [], false) + // ]) + // ); + // + // var result4 = await bucketStorage.SyncLocalDatabase(new Checkpoint + // { + // LastOpId = "5", + // WriteCheckpoint = "5", + // Buckets = + // [ + // new BucketChecksum { Bucket = "bucket1", Checksum = 6 } + // ] + // }); + // + // var expected = new SyncLocalDatabaseResult + // { + // Ready = false, + // CheckpointValid = true + // }; + // + // Assert.Equal(expected, result4); + // } + // + // [Fact] + // public async Task ShouldNotSyncLocalDbWithPendingCrud_UpdateOnServer() + // { + // await bucketStorage.SaveSyncData( + // new SyncDataBatch( + // [ + // new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false) + // ]) + // ); + // + // await SyncLocalChecked(new Checkpoint + // { + // LastOpId = "3", + // WriteCheckpoint = "3", + // Buckets = + // [ + // new BucketChecksum { Bucket = "bucket1", Checksum = 6 } + // ] + // }); + // + // // Local save + // await db.Execute("INSERT INTO assets(id) VALUES(?)", ["O3"]); + // + // var batch = await bucketStorage.GetCrudBatch(); + // if (batch != null) + // { + // await batch.Complete(""); + // } + // + // await bucketStorage.UpdateLocalTarget(() => Task.FromResult("4")); + // + // await bucketStorage.SaveSyncData( + // new SyncDataBatch( + // [ + // new SyncDataBucket("bucket1", + // [ + // OplogEntry.FromRow(new OplogEntryJSON + // { + // OpId = "5", + // Op = new OpType(OpTypeEnum.PUT).ToJSON(), + // ObjectType = "assets", + // ObjectId = "O3", + // Checksum = 5, + // Data = new { description = "server updated" } + // }) + // ], false) + // ]) + // ); + // + // await SyncLocalChecked(new Checkpoint + // { + // LastOpId = "5", + // WriteCheckpoint = "5", + // Buckets = + // [ + // new BucketChecksum { Bucket = "bucket1", Checksum = 11 } + // ] + // }); + // + // var updatedResult = await db.GetAll("SELECT description FROM assets WHERE id = 'O3'"); + // Assert.Equal(new DescriptionResult("server updated"), updatedResult[0]); + // } + // + // [Fact] + // public async Task ShouldRevertAFailingInsert() + // { + // await bucketStorage.SaveSyncData( + // new SyncDataBatch( + // [ + // new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false) + // ]) + // ); + // + // await SyncLocalChecked(new Checkpoint + // { + // LastOpId = "3", + // WriteCheckpoint = "3", + // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }] + // }); + // + // // Local insert, later rejected by server + // await db.Execute("INSERT INTO assets(id, description) VALUES(?, ?)", ["O3", "inserted"]); + // + // var batch = await bucketStorage.GetCrudBatch(); + // if (batch != null) + // { + // await batch.Complete(""); + // } + // + // await bucketStorage.UpdateLocalTarget(() => Task.FromResult("4")); + // + // var insertedResult = await db.GetAll("SELECT description FROM assets WHERE id = 'O3'"); + // Assert.Equal(new DescriptionResult("inserted"), insertedResult[0]); + // + // await SyncLocalChecked(new Checkpoint + // { + // LastOpId = "3", + // WriteCheckpoint = "4", + // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }] + // }); + // + // var revertedResult = await db.GetAll("SELECT description FROM assets WHERE id = 'O3'"); + // Assert.Empty(revertedResult); + // } + // + // [Fact] + // public async Task ShouldRevertAFailingDelete() + // { + // await bucketStorage.SaveSyncData( + // new SyncDataBatch( + // [ + // new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false) + // ]) + // ); + // + // await SyncLocalChecked(new Checkpoint + // { + // LastOpId = "3", + // WriteCheckpoint = "3", + // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }] + // }); + // + // // Local delete, later rejected by server + // await db.Execute("DELETE FROM assets WHERE id = ?", ["O2"]); + // + // var deletedResult = await db.GetAll("SELECT description FROM assets WHERE id = 'O2'"); + // Assert.Empty(deletedResult); // Ensure the record is deleted locally + // + // // Simulate a permissions error when uploading - data should be preserved + // var batch = await bucketStorage.GetCrudBatch(); + // if (batch != null) + // { + // await batch.Complete(""); + // } + // + // await bucketStorage.UpdateLocalTarget(() => Task.FromResult("4")); + // + // await SyncLocalChecked(new Checkpoint + // { + // LastOpId = "3", + // WriteCheckpoint = "4", + // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }] + // }); + // + // var revertedResult = await db.GetAll("SELECT description FROM assets WHERE id = 'O2'"); + // Assert.Equal(new DescriptionResult("bar"), revertedResult[0]); + // } + // + // [Fact] + // public async Task ShouldRevertAFailingUpdate() + // { + // await bucketStorage.SaveSyncData( + // new SyncDataBatch( + // [ + // new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false) + // ]) + // ); + // + // await SyncLocalChecked(new Checkpoint + // { + // LastOpId = "3", + // WriteCheckpoint = "3", + // Buckets = + // [ + // new BucketChecksum { Bucket = "bucket1", Checksum = 6 } + // ] + // }); + // + // // Local update, later rejected by server + // await db.Execute("UPDATE assets SET description = ? WHERE id = ?", ["updated", "O2"]); + // + // var updatedResult = await db.GetAll("SELECT description FROM assets WHERE id = 'O2'"); + // Assert.Equal(new DescriptionResult("updated"), updatedResult[0]); + // + // // Simulate a permissions error when uploading - data should be preserved + // var batch = await bucketStorage.GetCrudBatch(); + // if (batch != null) + // { + // await batch.Complete(""); + // } + // + // await bucketStorage.UpdateLocalTarget(async () => await Task.FromResult("4")); + // + // await SyncLocalChecked(new Checkpoint + // { + // LastOpId = "3", + // WriteCheckpoint = "4", + // Buckets = + // [ + // new BucketChecksum { Bucket = "bucket1", Checksum = 6 } + // ] + // }); + // + // var revertedResult = await db.GetAll("SELECT description FROM assets WHERE id = 'O2'"); + // Assert.Equal(new DescriptionResult("bar"), revertedResult[0]); + // } } \ No newline at end of file From ca8f2171b702a565291f743626ab7ecf636d42ce Mon Sep 17 00:00:00 2001 From: Christiaan Landman Date: Wed, 21 May 2025 11:45:16 +0200 Subject: [PATCH 03/12] Switching over and testing rust sync stream. Wip. --- .../Client/PowerSyncDatabase.cs | 4 +- .../Client/Sync/Bucket/SqliteBucketStorage.cs | 7 +- .../Client/Sync/Stream/CoreInstructions.cs | 4 +- .../Stream/StreamingSyncImplementation.cs | 705 ++++++++++-------- .../PowerSync.Common/DB/Crud/SyncProgress.cs | 6 + .../PowerSync.Common/DB/Crud/SyncStatus.cs | 90 ++- 6 files changed, 468 insertions(+), 348 deletions(-) create mode 100644 PowerSync/PowerSync.Common/DB/Crud/SyncProgress.cs diff --git a/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs b/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs index e79388d..6bab7dc 100644 --- a/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs +++ b/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs @@ -85,8 +85,6 @@ public interface IPowerSyncDatabase : IEventStream public class PowerSyncDatabase : EventStream, IPowerSyncDatabase { - private static readonly int FULL_SYNC_PRIORITY = 2147483647; - public IDBAdapter Database; private Schema schema; @@ -246,7 +244,7 @@ protected async Task UpdateHasSynced() { var parsedDate = DateTime.Parse(result.last_synced_at + "Z"); - if (result.priority == FULL_SYNC_PRIORITY) + if (result.priority == SyncProgress.FULL_SYNC_PRIORITY) { // This lowest-possible priority represents a complete sync. lastCompleteSync = parsedDate; diff --git a/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs b/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs index 754156d..6d5e810 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs @@ -435,13 +435,14 @@ public async Task SetTargetCheckpoint(Checkpoint checkpoint) await Task.CompletedTask; } - record ControlResult(string? value); + record ControlResult(string? r); public async Task Control(string op, object? payload) { return await db.WriteTransaction(async tx => { - var result = await tx.Get("SELECT powersync_control(?, ?)", [op, payload]); - return "5"; + var result = await tx.Get("SELECT powersync_control(?, ?) AS r", [op, payload]); + Console.WriteLine(result.r); + return result.r; }); } } diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/CoreInstructions.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/CoreInstructions.cs index 245e81b..6b2d1c2 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Stream/CoreInstructions.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/CoreInstructions.cs @@ -94,7 +94,7 @@ public class InstructionConverter : JsonConverter public override Instruction ReadJson(JsonReader reader, Type objectType, Instruction? existingValue, bool hasExistingValue, JsonSerializer serializer) { var jsonObject = JObject.Load(reader); - + Console.WriteLine("Meep" + jsonObject.ToString()); if (jsonObject.ContainsKey("LogLine")) return jsonObject["LogLine"]!.ToObject(serializer)!; if (jsonObject.ContainsKey("UpdateSyncStatus")) @@ -109,7 +109,7 @@ public override Instruction ReadJson(JsonReader reader, Type objectType, Instruc return new FlushFileSystem(); if (jsonObject.ContainsKey("DidCompleteSync")) return new DidCompleteSync(); - + Console.WriteLine("Throwing on" + jsonObject.ToString()); throw new JsonSerializationException("Unknown Instruction type."); } diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs index 4fda521..ab5860d 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs @@ -1,3 +1,5 @@ +using System.Text; + namespace PowerSync.Common.Client.Sync.Stream; using Microsoft.Extensions.Logging; @@ -104,10 +106,13 @@ public class StreamingSyncImplementation : EventStream await InternalUploadAllCrud()); }; } @@ -234,7 +241,7 @@ protected async Task StreamingSync(CancellationToken? signal, PowerSyncConnectio } crudUpdateCts = new CancellationTokenSource(); - var _ = Task.Run(() => + _ = Task.Run(() => { foreach (var _ in Options.Adapter.Listen(crudUpdateCts.Token)) { @@ -258,10 +265,10 @@ protected async Task StreamingSync(CancellationToken? signal, PowerSyncConnectio }); }); - /// This loops runs until [retry] is false or the abort signal is set to aborted. - /// Aborting the nestedCts will: - /// - Abort any pending fetch requests - /// - Close any sync stream ReadableStreams (which will also close any established network requests) + // This loops runs until [retry] is false or the abort signal is set to aborted. + // Aborting the nestedCts will: + // - Abort any pending fetch requests + // - Close any sync stream ReadableStreams (which will also close any established network requests) while (true) { UpdateSyncStatus(new SyncStatusOptions { Connecting = true }); @@ -273,14 +280,7 @@ protected async Task StreamingSync(CancellationToken? signal, PowerSyncConnectio break; } - var iterationResult = await StreamingSyncIteration(nestedCts.Token, options); - if (!iterationResult.Retry) - { - // A sync error ocurred that we cannot recover from here. - // This loop must terminate. - // The nestedCts will close any open network requests and streams below. - break; - } + await StreamingSyncIteration(nestedCts.Token, options); // Continue immediately } catch (Exception ex) @@ -331,15 +331,10 @@ protected async Task StreamingSync(CancellationToken? signal, PowerSyncConnectio }); } - protected record StreamingSyncIterationResult - { - public bool Retry { get; init; } - } - - protected async Task StreamingSyncIteration(CancellationToken signal, + protected async Task StreamingSyncIteration(CancellationToken signal, PowerSyncConnectionOptions? options) { - return await locks.ObtainLock(new LockOptions + return await locks.ObtainLock(new LockOptions { Type = LockType.SYNC, Token = signal, @@ -349,294 +344,320 @@ protected async Task StreamingSyncIteration(Cancel { Params = options?.Params ?? DEFAULT_STREAM_CONNECTION_OPTIONS.Params }; - - logger.LogDebug("Streaming sync iteration started"); - Options.Adapter.StartSession(); - var bucketEntries = await Options.Adapter.GetBucketStates(); - var initialBuckets = new Dictionary(); - - foreach (var entry in bucketEntries) - { - initialBuckets[entry.Bucket] = entry.OpId; - } - - var req = initialBuckets - .Select(kvp => new BucketRequest - { - Name = kvp.Key, - After = kvp.Value - }) - .ToList(); - - var targetCheckpoint = (Checkpoint?)null; - var validatedCheckpoint = (Checkpoint?)null; - var appliedCheckpoint = (Checkpoint?)null; - - var bucketSet = new HashSet(initialBuckets.Keys); - - var clientId = await Options.Adapter.GetClientId(); - - logger.LogDebug("Requesting stream from server"); - - var syncOptions = new SyncStreamOptions - { - Path = "/sync/stream", - CancellationToken = signal, - Data = new StreamingSyncRequest - { - Buckets = req, - IncludeChecksum = true, - RawData = true, - Parameters = resolvedOptions.Params, // Replace with actual params - ClientId = clientId - } - }; - - var stream = Options.Remote.PostStream(syncOptions); - var first = true; - await foreach (var line in stream) - { - if (first) - { - first = false; - logger.LogDebug("Stream established. Processing events"); - } - - if (line == null) - { - logger.LogDebug("Stream has closed while waiting"); - // The stream has closed while waiting - return new StreamingSyncIterationResult { Retry = true }; - } - - // A connection is active and messages are being received - if (!SyncStatus.Connected) - { - // There is a connection now - UpdateSyncStatus(new SyncStatusOptions - { - Connected = true - }); - TriggerCrudUpload(); - } - - if (line is StreamingSyncCheckpoint syncCheckpoint) - { - logger.LogDebug("Sync checkpoint: {message}", syncCheckpoint); - - targetCheckpoint = syncCheckpoint.Checkpoint; - var bucketsToDelete = new HashSet(bucketSet); - var newBuckets = new HashSet(); - - foreach (var checksum in syncCheckpoint.Checkpoint.Buckets) - { - newBuckets.Add(checksum.Bucket); - bucketsToDelete.Remove(checksum.Bucket); - } - - if (bucketsToDelete.Count > 0) - { - logger.LogDebug("Removing buckets: {message}", string.Join(", ", bucketsToDelete)); - } - - bucketSet = newBuckets; - await Options.Adapter.RemoveBuckets([.. bucketsToDelete]); - await Options.Adapter.SetTargetCheckpoint(targetCheckpoint); - } - else if (line is StreamingSyncCheckpointComplete checkpointComplete) - { - logger.LogDebug("Checkpoint complete: {message}", targetCheckpoint); - - var result = await Options.Adapter.SyncLocalDatabase(targetCheckpoint!); - - if (!result.CheckpointValid) - { - // This means checksums failed. Start again with a new checkpoint. - // TODO: better back-off - await Task.Delay(50); - return new StreamingSyncIterationResult { Retry = true }; - } - else if (!result.Ready) - { - // Checksums valid, but need more data for a consistent checkpoint. - // Continue waiting. - // Landing here the whole time - } - else - { - appliedCheckpoint = targetCheckpoint; - logger.LogDebug("Validated checkpoint: {message}", appliedCheckpoint); - - UpdateSyncStatus(new SyncStatusOptions - { - Connected = true, - LastSyncedAt = DateTime.Now, - DataFlow = new SyncDataFlowStatus - { - Downloading = false - } - }, new UpdateSyncStatusOptions - { - ClearDownloadError = true - }); - } - - validatedCheckpoint = targetCheckpoint; - } - else if (line is StreamingSyncCheckpointDiff checkpointDiff) - { - // TODO: It may be faster to just keep track of the diff, instead of the entire checkpoint - if (targetCheckpoint == null) - { - throw new Exception("Checkpoint diff without previous checkpoint"); - } - - var diff = checkpointDiff.CheckpointDiff; - var newBuckets = new Dictionary(); - - foreach (var checksum in targetCheckpoint.Buckets) - { - newBuckets[checksum.Bucket] = checksum; - } - - foreach (var checksum in diff.UpdatedBuckets) - { - newBuckets[checksum.Bucket] = checksum; - } - - foreach (var bucket in diff.RemovedBuckets) - { - newBuckets.Remove(bucket); - } - - var newWriteCheckpoint = - !string.IsNullOrEmpty(diff.WriteCheckpoint) ? diff.WriteCheckpoint : null; - var newCheckpoint = new Checkpoint - { - LastOpId = diff.LastOpId, - Buckets = [.. newBuckets.Values], - WriteCheckpoint = newWriteCheckpoint - }; - - targetCheckpoint = newCheckpoint; - - bucketSet = [.. newBuckets.Keys]; - - var bucketsToDelete = diff.RemovedBuckets.ToArray(); - if (bucketsToDelete.Length > 0) - { - logger.LogDebug("Remove buckets: {message}", string.Join(", ", bucketsToDelete)); - } - - // Perform async operations - await Options.Adapter.RemoveBuckets(bucketsToDelete); - await Options.Adapter.SetTargetCheckpoint(targetCheckpoint); - } - else if (line is StreamingSyncDataJSON dataJSON) - { - UpdateSyncStatus(new SyncStatusOptions - { - DataFlow = new SyncDataFlowStatus - { - Downloading = true - } - }); - await Options.Adapter.SaveSyncData(new SyncDataBatch([SyncDataBucket.FromRow(dataJSON.Data)])); - } - else if (line is StreamingSyncKeepalive keepalive) - { - var remainingSeconds = keepalive.TokenExpiresIn; - if (remainingSeconds == 0) - { - // Connection would be closed automatically right after this - logger.LogDebug("Token expiring; reconnect"); - Options.Remote.InvalidateCredentials(); - - // For a rare case where the backend connector does not update the token - // (uses the same one), this should have some delay. - // - await DelayRetry(); - return new StreamingSyncIterationResult { Retry = true }; - } - else if (remainingSeconds < 30) - { - logger.LogDebug("Token will expire soon; reconnect"); - // Pre-emptively refresh the token - Options.Remote.InvalidateCredentials(); - return new StreamingSyncIterationResult { Retry = true }; - } - - TriggerCrudUpload(); - } - else - { - logger.LogDebug("Sync complete"); - - if (targetCheckpoint == appliedCheckpoint) - { - UpdateSyncStatus(new SyncStatusOptions - { - Connected = true, - LastSyncedAt = DateTime.Now, - }, - new UpdateSyncStatusOptions - { - ClearDownloadError = true - } - ); - } - else if (validatedCheckpoint == targetCheckpoint) - { - var result = await Options.Adapter.SyncLocalDatabase(targetCheckpoint!); - if (!result.CheckpointValid) - { - // This means checksums failed. Start again with a new checkpoint. - // TODO: better back-off - await Task.Delay(50); - return new StreamingSyncIterationResult { Retry = false }; - } - else if (!result.Ready) - { - // Checksums valid, but need more data for a consistent checkpoint. - // Continue waiting. - } - else - { - appliedCheckpoint = targetCheckpoint; - UpdateSyncStatus(new SyncStatusOptions - { - Connected = true, - LastSyncedAt = DateTime.Now, - DataFlow = new SyncDataFlowStatus - { - Downloading = false, - } - }, - new UpdateSyncStatusOptions - { - ClearDownloadError = true - }); - } - } - } - } - - logger.LogDebug("Stream input empty"); - // Connection closed. Likely due to auth issue. - return new StreamingSyncIterationResult { Retry = true }; + + await RustSyncIteration(signal, resolvedOptions); + + return true; } }); } + // private async Task legacySyncIteration(CancellationToken? signal, RequiredPowerSyncConnectionOptions resolvedOptions) + // { + // logger.LogDebug("Streaming sync iteration started"); + // Options.Adapter.StartSession(); + // var bucketEntries = await Options.Adapter.GetBucketStates(); + // var initialBuckets = new Dictionary(); + // + // foreach (var entry in bucketEntries) + // { + // initialBuckets[entry.Bucket] = entry.OpId; + // } + // + // var req = initialBuckets + // .Select(kvp => new BucketRequest + // { + // Name = kvp.Key, + // After = kvp.Value + // }) + // .ToList(); + // + // var targetCheckpoint = (Checkpoint?)null; + // var validatedCheckpoint = (Checkpoint?)null; + // var appliedCheckpoint = (Checkpoint?)null; + // + // var bucketSet = new HashSet(initialBuckets.Keys); + // + // var clientId = await Options.Adapter.GetClientId(); + // + // logger.LogDebug("Requesting stream from server"); + // + // var syncOptions = new SyncStreamOptions + // { + // Path = "/sync/stream", + // CancellationToken = signal, + // Data = new StreamingSyncRequest + // { + // Buckets = req, + // IncludeChecksum = true, + // RawData = true, + // Parameters = resolvedOptions.Params, // Replace with actual params + // ClientId = clientId + // } + // }; + // + // var stream = Options.Remote.PostStream(syncOptions); + // var first = true; + // await foreach (var line in stream) + // { + // if (first) + // { + // first = false; + // logger.LogDebug("Stream established. Processing events"); + // } + // + // if (line == null) + // { + // logger.LogDebug("Stream has closed while waiting"); + // // The stream has closed while waiting + // return new StreamingSyncIterationResult { Retry = true }; + // } + // + // // A connection is active and messages are being received + // if (!SyncStatus.Connected) + // { + // // There is a connection now + // UpdateSyncStatus(new SyncStatusOptions + // { + // Connected = true + // }); + // TriggerCrudUpload(); + // } + // + // if (line is StreamingSyncCheckpoint syncCheckpoint) + // { + // logger.LogDebug("Sync checkpoint: {message}", syncCheckpoint); + // + // targetCheckpoint = syncCheckpoint.Checkpoint; + // var bucketsToDelete = new HashSet(bucketSet); + // var newBuckets = new HashSet(); + // + // foreach (var checksum in syncCheckpoint.Checkpoint.Buckets) + // { + // newBuckets.Add(checksum.Bucket); + // bucketsToDelete.Remove(checksum.Bucket); + // } + // + // if (bucketsToDelete.Count > 0) + // { + // logger.LogDebug("Removing buckets: {message}", string.Join(", ", bucketsToDelete)); + // } + // + // bucketSet = newBuckets; + // await Options.Adapter.RemoveBuckets([.. bucketsToDelete]); + // await Options.Adapter.SetTargetCheckpoint(targetCheckpoint); + // } + // else if (line is StreamingSyncCheckpointComplete checkpointComplete) + // { + // logger.LogDebug("Checkpoint complete: {message}", targetCheckpoint); + // + // var result = await Options.Adapter.SyncLocalDatabase(targetCheckpoint!); + // + // if (!result.CheckpointValid) + // { + // // This means checksums failed. Start again with a new checkpoint. + // // TODO: better back-off + // await Task.Delay(50); + // return new StreamingSyncIterationResult { Retry = true }; + // } + // else if (!result.Ready) + // { + // // Checksums valid, but need more data for a consistent checkpoint. + // // Continue waiting. + // // Landing here the whole time + // } + // else + // { + // appliedCheckpoint = targetCheckpoint; + // logger.LogDebug("Validated checkpoint: {message}", appliedCheckpoint); + // + // UpdateSyncStatus(new SyncStatusOptions + // { + // Connected = true, + // LastSyncedAt = DateTime.Now, + // DataFlow = new SyncDataFlowStatus + // { + // Downloading = false + // } + // }, new UpdateSyncStatusOptions + // { + // ClearDownloadError = true + // }); + // } + // + // validatedCheckpoint = targetCheckpoint; + // } + // else if (line is StreamingSyncCheckpointDiff checkpointDiff) + // { + // // TODO: It may be faster to just keep track of the diff, instead of the entire checkpoint + // if (targetCheckpoint == null) + // { + // throw new Exception("Checkpoint diff without previous checkpoint"); + // } + // + // var diff = checkpointDiff.CheckpointDiff; + // var newBuckets = new Dictionary(); + // + // foreach (var checksum in targetCheckpoint.Buckets) + // { + // newBuckets[checksum.Bucket] = checksum; + // } + // + // foreach (var checksum in diff.UpdatedBuckets) + // { + // newBuckets[checksum.Bucket] = checksum; + // } + // + // foreach (var bucket in diff.RemovedBuckets) + // { + // newBuckets.Remove(bucket); + // } + // + // var newWriteCheckpoint = + // !string.IsNullOrEmpty(diff.WriteCheckpoint) ? diff.WriteCheckpoint : null; + // var newCheckpoint = new Checkpoint + // { + // LastOpId = diff.LastOpId, + // Buckets = [.. newBuckets.Values], + // WriteCheckpoint = newWriteCheckpoint + // }; + // + // targetCheckpoint = newCheckpoint; + // + // bucketSet = [.. newBuckets.Keys]; + // + // var bucketsToDelete = diff.RemovedBuckets.ToArray(); + // if (bucketsToDelete.Length > 0) + // { + // logger.LogDebug("Remove buckets: {message}", string.Join(", ", bucketsToDelete)); + // } + // + // // Perform async operations + // await Options.Adapter.RemoveBuckets(bucketsToDelete); + // await Options.Adapter.SetTargetCheckpoint(targetCheckpoint); + // } + // else if (line is StreamingSyncDataJSON dataJSON) + // { + // UpdateSyncStatus(new SyncStatusOptions + // { + // DataFlow = new SyncDataFlowStatus + // { + // Downloading = true + // } + // }); + // await Options.Adapter.SaveSyncData(new SyncDataBatch([SyncDataBucket.FromRow(dataJSON.Data)])); + // } + // else if (line is StreamingSyncKeepalive keepalive) + // { + // var remainingSeconds = keepalive.TokenExpiresIn; + // if (remainingSeconds == 0) + // { + // // Connection would be closed automatically right after this + // logger.LogDebug("Token expiring; reconnect"); + // Options.Remote.InvalidateCredentials(); + // + // // For a rare case where the backend connector does not update the token + // // (uses the same one), this should have some delay. + // // + // await DelayRetry(); + // return new StreamingSyncIterationResult { Retry = true }; + // } + // else if (remainingSeconds < 30) + // { + // logger.LogDebug("Token will expire soon; reconnect"); + // // Pre-emptively refresh the token + // Options.Remote.InvalidateCredentials(); + // return new StreamingSyncIterationResult { Retry = true }; + // } + // + // TriggerCrudUpload(); + // } + // else + // { + // logger.LogDebug("Sync complete"); + // + // if (targetCheckpoint == appliedCheckpoint) + // { + // UpdateSyncStatus(new SyncStatusOptions + // { + // Connected = true, + // LastSyncedAt = DateTime.Now, + // }, + // new UpdateSyncStatusOptions + // { + // ClearDownloadError = true + // } + // ); + // } + // else if (validatedCheckpoint == targetCheckpoint) + // { + // var result = await Options.Adapter.SyncLocalDatabase(targetCheckpoint!); + // if (!result.CheckpointValid) + // { + // // This means checksums failed. Start again with a new checkpoint. + // // TODO: better back-off + // await Task.Delay(50); + // return new StreamingSyncIterationResult { Retry = false }; + // } + // else if (!result.Ready) + // { + // // Checksums valid, but need more data for a consistent checkpoint. + // // Continue waiting. + // } + // else + // { + // appliedCheckpoint = targetCheckpoint; + // UpdateSyncStatus(new SyncStatusOptions + // { + // Connected = true, + // LastSyncedAt = DateTime.Now, + // DataFlow = new SyncDataFlowStatus + // { + // Downloading = false, + // } + // }, + // new UpdateSyncStatusOptions + // { + // ClearDownloadError = true + // }); + // } + // } + // } + // } + // + // logger.LogDebug("Stream input empty"); + // // Connection closed. Likely due to auth issue. + // return new StreamingSyncIterationResult { Retry = true }; + // } + // StreamingSync(CancellationToken? signal private async Task RustSyncIteration(CancellationToken? signal, RequiredPowerSyncConnectionOptions resolvedOptions) { - TaskCompletionSource? receivingLines; - // new TaskCompletionSource(); + Task? receivingLines = null; var nestedCts = new CancellationTokenSource(); signal?.Register(() => { nestedCts.Cancel(); }); + + try + { + notifyCompletedUploads = () => { + Task.Run(async () => await Control("completed_upload")); + }; + + await Control("start", JsonConvert.SerializeObject(resolvedOptions.Params)); + if (receivingLines != null) { + await receivingLines; + } + } + finally + { + notifyCompletedUploads = null; + await Stop(); + } + + return; + async Task Connect(EstablishSyncStream instruction) { var syncOptions = new SyncStreamOptions @@ -646,6 +667,15 @@ async Task Connect(EstablishSyncStream instruction) Data = instruction.Request }; + using var stream = await Options.Remote.PostStreamRaw(syncOptions); + using var reader = new StreamReader(stream, Encoding.UTF8); + string? line; + + while ((line = await reader.ReadLineAsync()) != null) + { + logger.LogDebug("Parsing line for rust sync stream {message}", line); + await Control("line_binary", line); + } } @@ -656,6 +686,8 @@ async Task Stop() async Task Control(string op, object? payload = null) { + logger.LogDebug("Control call {message}", op); + var rawResponse = await Options.Adapter.Control(op, payload); await HandleInstructions(JsonConvert.DeserializeObject(rawResponse)); } @@ -667,6 +699,18 @@ async Task HandleInstructions(Instruction[] instructions) await HandleInstruction(instruction); } } + + DB.Crud.SyncPriorityStatus CoreStatusToSyncStatus(SyncPriorityStatus status) + { + logger.LogWarning("Sync status {status}", status?.LastSyncedAt != null ? new DateTime(status!.LastSyncedAt) : null); + return new DB.Crud.SyncPriorityStatus + { + Priority = status.Priority, + HasSynced = status.HasSynced ?? null, + // TODO check this value + LastSyncedAt = status?.LastSyncedAt != null ? new DateTime(status!.LastSyncedAt) : null + }; + } async Task HandleInstruction(Instruction instruction) { @@ -688,19 +732,45 @@ async Task HandleInstruction(Instruction instruction) break; case UpdateSyncStatus syncStatus: + var info = syncStatus.Status; + var coreCompleteSync = info.PriorityStatus.FirstOrDefault(s => s.Priority == SyncProgress.FULL_SYNC_PRIORITY); + var completeSync = coreCompleteSync != null ? CoreStatusToSyncStatus(coreCompleteSync) : null; + + UpdateSyncStatus(new SyncStatusOptions + { + Connected = info.Connected, + Connecting = info.Connecting, + LastSyncedAt = completeSync?.LastSyncedAt, + HasSynced = completeSync?.HasSynced, + PriorityStatusEntries = info.PriorityStatus.Select(CoreStatusToSyncStatus).ToArray(), + DataFlow = new SyncDataFlowStatus + { + Downloading = info.Downloading != null, + // TODO CL + // DownloadProgress = info.Downloading?.Buckets + } + }, + // TODO handle errors later? + new UpdateSyncStatusOptions + { + ClearDownloadError = true, + ClearUploadError = true + } + ); + break; - case EstablishSyncStream: - // if (receivingLines != null) { - // // Already connected, this shouldn't happen during a single iteration. - // throw 'Unexpected request to establish sync stream, already connected'; - // } - // - // receivingLines = connect(instruction.EstablishSyncStream); + case EstablishSyncStream establishSyncStream: + if (receivingLines != null) { + // Already connected, this shouldn't happen during a single iteration. + throw new Exception("Unexpected request to establish sync stream, already connected"); + } + + receivingLines = Connect(establishSyncStream); break; - case FetchCredentials fetchCredentials when fetchCredentials.DidExpire: + case FetchCredentials { DidExpire: true, }: Options.Remote.InvalidateCredentials(); break; - case FetchCredentials fetchCredentials: + case FetchCredentials: await Options.Remote.PrefetchCredentials(); break; case CloseSyncStream: @@ -710,26 +780,12 @@ async Task HandleInstruction(Instruction instruction) // ignore break; case DidCompleteSync: - UpdateSyncStatus(new SyncStatusOptions - { - }, new UpdateSyncStatusOptions { ClearDownloadError = true }); + UpdateSyncStatus( + new SyncStatusOptions{}, + new UpdateSyncStatusOptions { ClearDownloadError = true }); break; } } - - try - { - // this.notifyCompletedUploads = () => { - // control('completed_upload'); - // }; - // - // await control('start', JSON.stringify(resolvedOptions.params)); - // await receivingLines; - } - catch (Exception ex) - { - await Stop(); - } } public new void Close() @@ -872,7 +928,8 @@ protected void UpdateSyncStatus(SyncStatusOptions options, UpdateSyncStatusOptio UploadError = updateOptions?.ClearUploadError == true ? null : options.DataFlow?.UploadError ?? SyncStatus.DataFlowStatus.UploadError, - } + }, + PriorityStatusEntries = options.PriorityStatusEntries ?? SyncStatus.PriorityStatusEntries }); if (!SyncStatus.Equals(updatedStatus)) diff --git a/PowerSync/PowerSync.Common/DB/Crud/SyncProgress.cs b/PowerSync/PowerSync.Common/DB/Crud/SyncProgress.cs new file mode 100644 index 0000000..03e2d31 --- /dev/null +++ b/PowerSync/PowerSync.Common/DB/Crud/SyncProgress.cs @@ -0,0 +1,6 @@ +namespace PowerSync.Common.DB.Crud; + +public class SyncProgress +{ + public static readonly int FULL_SYNC_PRIORITY = 2147483647; +} \ No newline at end of file diff --git a/PowerSync/PowerSync.Common/DB/Crud/SyncStatus.cs b/PowerSync/PowerSync.Common/DB/Crud/SyncStatus.cs index 070e3b1..b9663ba 100644 --- a/PowerSync/PowerSync.Common/DB/Crud/SyncStatus.cs +++ b/PowerSync/PowerSync.Common/DB/Crud/SyncStatus.cs @@ -4,11 +4,9 @@ namespace PowerSync.Common.DB.Crud; public class SyncDataFlowStatus { - [JsonProperty("downloading")] - public bool Downloading { get; set; } = false; + [JsonProperty("downloading")] public bool Downloading { get; set; } = false; - [JsonProperty("uploading")] - public bool Uploading { get; set; } = false; + [JsonProperty("uploading")] public bool Uploading { get; set; } = false; /// /// Error during downloading (including connecting). @@ -25,9 +23,21 @@ public class SyncDataFlowStatus public Exception? UploadError { get; set; } = null; } +public class SyncPriorityStatus +{ + [JsonProperty("uploading")] public int Priority { get; set; } + + + [JsonProperty("lastSyncedAt")] public DateTime? LastSyncedAt { get; set; } + + [JsonProperty("hasSynced")] public bool? HasSynced { get; set; } +} + public class SyncStatusOptions { - public SyncStatusOptions() { } + public SyncStatusOptions() + { + } public SyncStatusOptions(SyncStatusOptions options) { @@ -36,22 +46,21 @@ public SyncStatusOptions(SyncStatusOptions options) DataFlow = options.DataFlow; LastSyncedAt = options.LastSyncedAt; HasSynced = options.HasSynced; + PriorityStatusEntries = options.PriorityStatusEntries; } - [JsonProperty("connected")] - public bool? Connected { get; set; } + [JsonProperty("connected")] public bool? Connected { get; set; } - [JsonProperty("connecting")] - public bool? Connecting { get; set; } + [JsonProperty("connecting")] public bool? Connecting { get; set; } - [JsonProperty("dataFlow")] - public SyncDataFlowStatus? DataFlow { get; set; } + [JsonProperty("dataFlow")] public SyncDataFlowStatus? DataFlow { get; set; } - [JsonProperty("lastSyncedAt")] - public DateTime? LastSyncedAt { get; set; } + [JsonProperty("lastSyncedAt")] public DateTime? LastSyncedAt { get; set; } - [JsonProperty("hasSynced")] - public bool? HasSynced { get; set; } + [JsonProperty("hasSynced")] public bool? HasSynced { get; set; } + + [JsonProperty("priorityStatusEntries")] + public SyncPriorityStatus[]? PriorityStatusEntries { get; set; } } public class SyncStatus(SyncStatusOptions options) @@ -79,6 +88,48 @@ public class SyncStatus(SyncStatusOptions options) /// public SyncDataFlowStatus DataFlowStatus => Options.DataFlow ?? new SyncDataFlowStatus(); + /// + /// Provides sync status information for all bucket priorities, sorted by priority (highest first). + /// + public SyncPriorityStatus[] PriorityStatusEntries => + (Options.PriorityStatusEntries ?? []) + .OrderBy(entry => entry.Priority) + .ToArray(); + + /// + /// Reports the sync status (a pair of HasSynced and LastSyncedAt fields) + /// for a specific bucket priority level. + /// + /// When buckets with different priorities are declared, PowerSync may choose to synchronize higher-priority + /// buckets first. When a consistent view over all buckets for all priorities up until the given priority is + /// reached, PowerSync makes data from those buckets available before lower-priority buckets have finished + /// syncing. + /// + /// This method returns the status for the requested priority or the next higher priority level that has + /// status information available. This is because when PowerSync makes data for a given priority available, + /// all buckets in higher-priorities are guaranteed to be consistent with that checkpoint. + /// For example, if PowerSync just finished synchronizing buckets in priority level 3, calling this method + /// with a priority of 1 may return information for priority level 3. + /// + public SyncPriorityStatus StatusForPriority(int priority) + { + foreach (var known in PriorityStatusEntries) + { + if (known.Priority >= priority) + { + return known; + } + } + + // Fallback if no matching or higher-priority entry is found + return new SyncPriorityStatus + { + Priority = priority, + LastSyncedAt = LastSyncedAt, + HasSynced = HasSynced + }; + } + public bool IsEqual(SyncStatus status) { return JsonConvert.SerializeObject(Options) == JsonConvert.SerializeObject(status.Options); @@ -87,11 +138,18 @@ public bool IsEqual(SyncStatus status) public string GetMessage() { var dataFlow = DataFlowStatus; - return $"SyncStatus"; + return + $"SyncStatus"; } public string ToJSON() { return JsonConvert.SerializeObject(this); } + + private static int ComparePriorities(SyncPriorityStatus a, SyncPriorityStatus b) + { + // Lower numbers = higher priority + return a.Priority.CompareTo(b.Priority); + } } \ No newline at end of file From ca0152dedaa781fc2251136cad2d886b304e6ae8 Mon Sep 17 00:00:00 2001 From: Christiaan Landman Date: Wed, 21 May 2025 16:02:03 +0200 Subject: [PATCH 04/12] Fix instruction parsing. --- .../Client/Sync/Bucket/SqliteBucketStorage.cs | 2 +- .../Client/Sync/Stream/CoreInstructions.cs | 77 +++++++++++-------- .../Client/Sync/Stream/Remote.cs | 31 +++++--- .../Stream/StreamingSyncImplementation.cs | 60 ++++++++------- 4 files changed, 97 insertions(+), 73 deletions(-) diff --git a/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs b/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs index 6d5e810..b5b86fc 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs @@ -441,7 +441,7 @@ public async Task Control(string op, object? payload) return await db.WriteTransaction(async tx => { var result = await tx.Get("SELECT powersync_control(?, ?) AS r", [op, payload]); - Console.WriteLine(result.r); + Console.WriteLine("Control Response: " + result.r); return result.r; }); } diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/CoreInstructions.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/CoreInstructions.cs index 6b2d1c2..7cc18d4 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Stream/CoreInstructions.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/CoreInstructions.cs @@ -3,11 +3,50 @@ namespace PowerSync.Common.Client.Sync.Stream; using Newtonsoft.Json; -using PowerSync.Common.Client.Sync.Stream; -[JsonConverter(typeof(InstructionConverter))] +/// +/// An internal instruction emitted by the sync client in the core extension in response to the +/// SDK passing sync data into the extension. +/// public abstract class Instruction { + + public static Instruction[] ParseInstructions(string rawResponse) + { + var jsonArray = JArray.Parse(rawResponse); + List instructions = []; + + Console.WriteLine("Scanning instructions: "+ jsonArray.Count); + foreach (JObject item in jsonArray) + { + instructions.Add(ParseInstruction(item)); + Console.WriteLine("Parsed instruction: " + JsonConvert.SerializeObject(ParseInstruction(item))); + } + + + + return instructions.ToArray(); + } + + public static Instruction? ParseInstruction(JObject json) + { + if (json.ContainsKey("LogLine")) + return json["LogLine"]!.ToObject(); + if (json.ContainsKey("UpdateSyncStatus")) + return json["UpdateSyncStatus"]!.ToObject(); + if (json.ContainsKey("EstablishSyncStream")) + return json["EstablishSyncStream"]!.ToObject(); + if (json.ContainsKey("FetchCredentials")) + return json["FetchCredentials"]!.ToObject(); + if (json.ContainsKey("CloseSyncStream")) + return new CloseSyncStream(); + if (json.ContainsKey("FlushFileSystem")) + return new FlushFileSystem(); + if (json.ContainsKey("DidCompleteSync")) + return new DidCompleteSync(); + + throw new JsonSerializationException("Unknown Instruction type."); + } } public class LogLine: Instruction @@ -40,7 +79,7 @@ public class CoreSyncStatus public bool Connecting { get; set; } [JsonProperty("priority_status")] - public List PriorityStatus { get; set; } = null!; + public List PriorityStatus { get; set; } = []; [JsonProperty("downloading")] public DownloadProgress? Downloading { get; set; } @@ -87,34 +126,4 @@ public class FetchCredentials: Instruction public class CloseSyncStream : Instruction { } public class FlushFileSystem : Instruction { } -public class DidCompleteSync : Instruction { } - -public class InstructionConverter : JsonConverter -{ - public override Instruction ReadJson(JsonReader reader, Type objectType, Instruction? existingValue, bool hasExistingValue, JsonSerializer serializer) - { - var jsonObject = JObject.Load(reader); - Console.WriteLine("Meep" + jsonObject.ToString()); - if (jsonObject.ContainsKey("LogLine")) - return jsonObject["LogLine"]!.ToObject(serializer)!; - if (jsonObject.ContainsKey("UpdateSyncStatus")) - return jsonObject["UpdateSyncStatus"]!.ToObject(serializer)!; - if (jsonObject.ContainsKey("EstablishSyncStream")) - return jsonObject["EstablishSyncStream"]!.ToObject(serializer)!; - if (jsonObject.ContainsKey("FetchCredentials")) - return jsonObject["FetchCredentials"]!.ToObject(serializer)!; - if (jsonObject.ContainsKey("CloseSyncStream")) - return new CloseSyncStream(); - if (jsonObject.ContainsKey("FlushFileSystem")) - return new FlushFileSystem(); - if (jsonObject.ContainsKey("DidCompleteSync")) - return new DidCompleteSync(); - Console.WriteLine("Throwing on" + jsonObject.ToString()); - throw new JsonSerializationException("Unknown Instruction type."); - } - - public override void WriteJson(JsonWriter writer, Instruction? value, JsonSerializer serializer) - { - throw new NotImplementedException("Writing not implemented."); - } -} \ No newline at end of file +public class DidCompleteSync : Instruction { } \ No newline at end of file diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/Remote.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/Remote.cs index 058d436..f359974 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Stream/Remote.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/Remote.cs @@ -121,12 +121,25 @@ public async Task Get(string path, Dictionary? headers = n return JsonConvert.DeserializeObject(responseData)!; } - /// - /// Posts to the stream endpoint and returns an async enumerable of parsed NDJSON lines. - /// - public async IAsyncEnumerable PostStream(SyncStreamOptions options) + public async IAsyncEnumerable OldPostStream(SyncStreamOptions options) { - using var stream = await PostStreamRaw(options); + using var requestMessage = await BuildRequest(HttpMethod.Post, options.Path, options.Data, options.Headers); + using var response = await httpClient.SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, options.CancellationToken); + + if (response.Content == null) + { + throw new HttpRequestException($"HTTP {response.StatusCode}: No content"); + } + else + if (!response.IsSuccessStatusCode) + { + var errorText = await response.Content.ReadAsStringAsync(); + throw new HttpRequestException($"HTTP {response.StatusCode}: {errorText}"); + } + + var stream = await response.Content.ReadAsStreamAsync(); + + // Read NDJSON stream using var reader = new StreamReader(stream, Encoding.UTF8); string? line; @@ -135,14 +148,14 @@ public async Task Get(string path, Dictionary? headers = n yield return ParseStreamingSyncLine(JObject.Parse(line)); } } - + /// - /// Posts to the stream endpoint and returns a raw stream that can be read line by line. + /// Posts to the stream endpoint and returns a raw NDJSON stream that can be read line by line. /// public async Task PostStreamRaw(SyncStreamOptions options) { - using var requestMessage = await BuildRequest(HttpMethod.Post, options.Path, options.Data, options.Headers); - using var response = await httpClient.SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, options.CancellationToken); + var requestMessage = await BuildRequest(HttpMethod.Post, options.Path, options.Data, options.Headers); + var response = await httpClient.SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, options.CancellationToken); if (response.Content == null) { diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs index ab5860d..df91ff5 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs @@ -1,4 +1,5 @@ using System.Text; +using Newtonsoft.Json.Linq; namespace PowerSync.Common.Client.Sync.Stream; @@ -107,12 +108,12 @@ public class StreamingSyncImplementation : EventStream StreamingSyncIteration(CancellationToken signal, { Params = options?.Params ?? DEFAULT_STREAM_CONNECTION_OPTIONS.Params }; - + await RustSyncIteration(signal, resolvedOptions); - + return true; } }); @@ -638,15 +639,14 @@ private async Task RustSyncIteration(CancellationToken? signal, RequiredPowerSyn var nestedCts = new CancellationTokenSource(); signal?.Register(() => { nestedCts.Cancel(); }); - + try { - notifyCompletedUploads = () => { - Task.Run(async () => await Control("completed_upload")); - }; - + notifyCompletedUploads = () => { Task.Run(async () => await Control("completed_upload")); }; + await Control("start", JsonConvert.SerializeObject(resolvedOptions.Params)); - if (receivingLines != null) { + if (receivingLines != null) + { await receivingLines; } } @@ -657,7 +657,7 @@ private async Task RustSyncIteration(CancellationToken? signal, RequiredPowerSyn } return; - + async Task Connect(EstablishSyncStream instruction) { var syncOptions = new SyncStreamOptions @@ -666,17 +666,16 @@ async Task Connect(EstablishSyncStream instruction) CancellationToken = nestedCts.Token, Data = instruction.Request }; - - using var stream = await Options.Remote.PostStreamRaw(syncOptions); + + var stream = await Options.Remote.PostStreamRaw(syncOptions); using var reader = new StreamReader(stream, Encoding.UTF8); string? line; while ((line = await reader.ReadLineAsync()) != null) { logger.LogDebug("Parsing line for rust sync stream {message}", line); - await Control("line_binary", line); + await Control("line_text", line); } - } async Task Stop() @@ -689,7 +688,7 @@ async Task Control(string op, object? payload = null) logger.LogDebug("Control call {message}", op); var rawResponse = await Options.Adapter.Control(op, payload); - await HandleInstructions(JsonConvert.DeserializeObject(rawResponse)); + await HandleInstructions(Instruction.ParseInstructions(rawResponse)); } async Task HandleInstructions(Instruction[] instructions) @@ -699,10 +698,11 @@ async Task HandleInstructions(Instruction[] instructions) await HandleInstruction(instruction); } } - + DB.Crud.SyncPriorityStatus CoreStatusToSyncStatus(SyncPriorityStatus status) { - logger.LogWarning("Sync status {status}", status?.LastSyncedAt != null ? new DateTime(status!.LastSyncedAt) : null); + logger.LogWarning("Sync status {status}", + status?.LastSyncedAt != null ? new DateTime(status!.LastSyncedAt) : null); return new DB.Crud.SyncPriorityStatus { Priority = status.Priority, @@ -733,9 +733,10 @@ async Task HandleInstruction(Instruction instruction) break; case UpdateSyncStatus syncStatus: var info = syncStatus.Status; - var coreCompleteSync = info.PriorityStatus.FirstOrDefault(s => s.Priority == SyncProgress.FULL_SYNC_PRIORITY); + var coreCompleteSync = + info.PriorityStatus.FirstOrDefault(s => s.Priority == SyncProgress.FULL_SYNC_PRIORITY); var completeSync = coreCompleteSync != null ? CoreStatusToSyncStatus(coreCompleteSync) : null; - + UpdateSyncStatus(new SyncStatusOptions { Connected = info.Connected, @@ -753,21 +754,22 @@ async Task HandleInstruction(Instruction instruction) // TODO handle errors later? new UpdateSyncStatusOptions { - ClearDownloadError = true, - ClearUploadError = true + // ClearDownloadError = true, + // ClearUploadError = true } ); - + break; case EstablishSyncStream establishSyncStream: - if (receivingLines != null) { + if (receivingLines != null) + { // Already connected, this shouldn't happen during a single iteration. throw new Exception("Unexpected request to establish sync stream, already connected"); } - + receivingLines = Connect(establishSyncStream); break; - case FetchCredentials { DidExpire: true, }: + case FetchCredentials { DidExpire: true, }: Options.Remote.InvalidateCredentials(); break; case FetchCredentials: @@ -781,7 +783,7 @@ async Task HandleInstruction(Instruction instruction) break; case DidCompleteSync: UpdateSyncStatus( - new SyncStatusOptions{}, + new SyncStatusOptions { }, new UpdateSyncStatusOptions { ClearDownloadError = true }); break; } @@ -929,7 +931,7 @@ protected void UpdateSyncStatus(SyncStatusOptions options, UpdateSyncStatusOptio ? null : options.DataFlow?.UploadError ?? SyncStatus.DataFlowStatus.UploadError, }, - PriorityStatusEntries = options.PriorityStatusEntries ?? SyncStatus.PriorityStatusEntries + PriorityStatusEntries = options.PriorityStatusEntries ?? SyncStatus.PriorityStatusEntries }); if (!SyncStatus.Equals(updatedStatus)) From 24bcebad13610a19a7533abac070c77639b71209 Mon Sep 17 00:00:00 2001 From: Christiaan Landman Date: Wed, 21 May 2025 17:07:24 +0200 Subject: [PATCH 05/12] Cleanup. --- .../Client/Sync/Bucket/SqliteBucketStorage.cs | 103 ++++--- .../Client/Sync/Stream/CoreInstructions.cs | 4 - .../Stream/StreamingSyncImplementation.cs | 282 +----------------- 3 files changed, 58 insertions(+), 331 deletions(-) diff --git a/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs b/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs index b5b86fc..0b9d0f7 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs @@ -4,19 +4,15 @@ namespace PowerSync.Common.Client.Sync.Bucket; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; - using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; - using Newtonsoft.Json; - using PowerSync.Common.DB; using PowerSync.Common.DB.Crud; using PowerSync.Common.Utils; public class SqliteBucketStorage : EventStream, IBucketStorageAdapter { - public static readonly string MAX_OP_ID = "9223372036854775807"; private readonly IDBAdapter db; @@ -37,7 +33,8 @@ private record ExistingTableRowsResult(string name); public SqliteBucketStorage(IDBAdapter db, ILogger? logger = null) { this.db = db; - this.logger = logger ?? NullLogger.Instance; ; + this.logger = logger ?? NullLogger.Instance; + ; hasCompletedSync = false; pendingBucketDeletes = true; tableNames = []; @@ -62,9 +59,10 @@ public SqliteBucketStorage(IDBAdapter db, ILogger? logger = null) public async Task Init() { - hasCompletedSync = false; - var existingTableRows = await db.GetAll("SELECT name FROM sqlite_master WHERE type='table' AND name GLOB 'ps_data_*'"); + var existingTableRows = + await db.GetAll( + "SELECT name FROM sqlite_master WHERE type='table' AND name GLOB 'ps_data_*'"); foreach (var row in existingTableRows) { @@ -79,6 +77,7 @@ public async Task Init() } private record ClientIdResult(string? client_id); + public async Task GetClientId() { if (clientId == null) @@ -95,12 +94,15 @@ public string GetMaxOpId() return MAX_OP_ID; } - public void StartSession() { } + public void StartSession() + { + } public async Task GetBucketStates() { return - await db.GetAll("SELECT name as bucket, cast(last_op as TEXT) as op_id FROM ps_buckets WHERE pending_delete = 0 AND name != '$local'"); + await db.GetAll( + "SELECT name as bucket, cast(last_op as TEXT) as op_id FROM ps_buckets WHERE pending_delete = 0 AND name != '$local'"); } public async Task SaveSyncData(SyncDataBatch batch) @@ -115,6 +117,7 @@ await db.WriteTransaction(async tx => logger.LogDebug("saveSyncData {message}", JsonConvert.SerializeObject(result)); count += b.Data.Length; } + compactCounter += count; }); } @@ -140,6 +143,7 @@ await tx.Execute("INSERT INTO powersync_operations(op, data) VALUES(?, ?)", } private record LastSyncedResult(string? synced_at); + public async Task HasCompletedSync() { if (hasCompletedSync) return true; @@ -155,11 +159,13 @@ public async Task SyncLocalDatabase(Checkpoint checkpoi var validation = await ValidateChecksums(checkpoint); if (!validation.CheckpointValid) { - logger.LogError("Checksums failed for {failures}", JsonConvert.SerializeObject(validation.CheckpointFailures)); + logger.LogError("Checksums failed for {failures}", + JsonConvert.SerializeObject(validation.CheckpointFailures)); foreach (var failedBucket in validation.CheckpointFailures ?? []) { await DeleteBucket(failedBucket); } + return new SyncLocalDatabaseResult { Ready = false, @@ -210,7 +216,7 @@ private async Task UpdateObjectsFromBuckets(Checkpoint checkpoint) return await db.WriteTransaction(async tx => { var result = await tx.Execute("INSERT INTO powersync_operations(op, data) VALUES(?, ?)", - ["sync_local", ""]); + ["sync_local", ""]); return result.InsertId == 1; }); @@ -220,18 +226,16 @@ private record ResultResult(object result); public class ResultDetail { - [JsonProperty("valid")] - public bool Valid { get; set; } + [JsonProperty("valid")] public bool Valid { get; set; } - [JsonProperty("failed_buckets")] - public List? FailedBuckets { get; set; } + [JsonProperty("failed_buckets")] public List? FailedBuckets { get; set; } } public async Task ValidateChecksums( Checkpoint checkpoint) { var result = await db.Get("SELECT powersync_validate_checkpoint(?) as result", - [JsonConvert.SerializeObject(checkpoint)]); + [JsonConvert.SerializeObject(checkpoint)]); logger.LogDebug("validateChecksums result item {message}", JsonConvert.SerializeObject(result)); @@ -298,6 +302,7 @@ await tx.Execute("INSERT INTO powersync_operations(op, data) VALUES (?, ?)", } private record TargetOpResult(string target_op); + private record SequenceResult(int seq); public async Task UpdateLocalTarget(Func> callback) @@ -351,16 +356,18 @@ public async Task UpdateLocalTarget(Func> callback) if (seqAfter != seqBefore) { - logger.LogDebug("[updateLocalTarget] seqAfter ({seqAfter}) != seqBefore ({seqBefore})", seqAfter, seqBefore); + logger.LogDebug("[updateLocalTarget] seqAfter ({seqAfter}) != seqBefore ({seqBefore})", seqAfter, + seqBefore); return false; } var response = await tx.Execute( - "UPDATE ps_buckets SET target_op = CAST(? as INTEGER) WHERE name='$local'", - [opId] - ); + "UPDATE ps_buckets SET target_op = CAST(? as INTEGER) WHERE name='$local'", + [opId] + ); - logger.LogDebug("[updateLocalTarget] Response from updating target_op: {response}", JsonConvert.SerializeObject(response)); + logger.LogDebug("[updateLocalTarget] Response from updating target_op: {response}", + JsonConvert.SerializeObject(response)); return true; }); } @@ -388,33 +395,33 @@ public async Task UpdateLocalTarget(Func> callback) var last = all[all.Length - 1]; return new CrudBatch( - Crud: all, - HaveMore: true, - CompleteCallback: async (string? writeCheckpoint) => - { - await db.WriteTransaction(async tx => + Crud: all, + HaveMore: true, + CompleteCallback: async (string? writeCheckpoint) => { - await tx.Execute("DELETE FROM ps_crud WHERE id <= ?", [last.ClientId]); - - if (!string.IsNullOrEmpty(writeCheckpoint)) + await db.WriteTransaction(async tx => { - var crudResult = await tx.GetAll("SELECT 1 FROM ps_crud LIMIT 1"); - if (crudResult?.Length > 0) + await tx.Execute("DELETE FROM ps_crud WHERE id <= ?", [last.ClientId]); + + if (!string.IsNullOrEmpty(writeCheckpoint)) + { + var crudResult = await tx.GetAll("SELECT 1 FROM ps_crud LIMIT 1"); + if (crudResult?.Length > 0) + { + await tx.Execute( + "UPDATE ps_buckets SET target_op = CAST(? as INTEGER) WHERE name='$local'", + [writeCheckpoint]); + } + } + else { await tx.Execute( "UPDATE ps_buckets SET target_op = CAST(? as INTEGER) WHERE name='$local'", - [writeCheckpoint]); + [GetMaxOpId()]); } - } - else - { - await tx.Execute( - "UPDATE ps_buckets SET target_op = CAST(? as INTEGER) WHERE name='$local'", - [GetMaxOpId()]); - } - }); - } - ); + }); + } + ); } public async Task NextCrudItem() @@ -436,13 +443,15 @@ public async Task SetTargetCheckpoint(Checkpoint checkpoint) } record ControlResult(string? r); + public async Task Control(string op, object? payload) { return await db.WriteTransaction(async tx => { - var result = await tx.Get("SELECT powersync_control(?, ?) AS r", [op, payload]); - Console.WriteLine("Control Response: " + result.r); - return result.r; - }); + var result = await tx.Get("SELECT powersync_control(?, ?) AS r", [op, payload]); + + + return result.r; + }); } -} +} \ No newline at end of file diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/CoreInstructions.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/CoreInstructions.cs index 7cc18d4..833da24 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Stream/CoreInstructions.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/CoreInstructions.cs @@ -16,15 +16,11 @@ public static Instruction[] ParseInstructions(string rawResponse) var jsonArray = JArray.Parse(rawResponse); List instructions = []; - Console.WriteLine("Scanning instructions: "+ jsonArray.Count); foreach (JObject item in jsonArray) { instructions.Add(ParseInstruction(item)); - Console.WriteLine("Parsed instruction: " + JsonConvert.SerializeObject(ParseInstruction(item))); } - - return instructions.ToArray(); } diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs index df91ff5..808dcc7 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs @@ -352,286 +352,7 @@ protected async Task StreamingSyncIteration(CancellationToken signal, } }); } - - // private async Task legacySyncIteration(CancellationToken? signal, RequiredPowerSyncConnectionOptions resolvedOptions) - // { - // logger.LogDebug("Streaming sync iteration started"); - // Options.Adapter.StartSession(); - // var bucketEntries = await Options.Adapter.GetBucketStates(); - // var initialBuckets = new Dictionary(); - // - // foreach (var entry in bucketEntries) - // { - // initialBuckets[entry.Bucket] = entry.OpId; - // } - // - // var req = initialBuckets - // .Select(kvp => new BucketRequest - // { - // Name = kvp.Key, - // After = kvp.Value - // }) - // .ToList(); - // - // var targetCheckpoint = (Checkpoint?)null; - // var validatedCheckpoint = (Checkpoint?)null; - // var appliedCheckpoint = (Checkpoint?)null; - // - // var bucketSet = new HashSet(initialBuckets.Keys); - // - // var clientId = await Options.Adapter.GetClientId(); - // - // logger.LogDebug("Requesting stream from server"); - // - // var syncOptions = new SyncStreamOptions - // { - // Path = "/sync/stream", - // CancellationToken = signal, - // Data = new StreamingSyncRequest - // { - // Buckets = req, - // IncludeChecksum = true, - // RawData = true, - // Parameters = resolvedOptions.Params, // Replace with actual params - // ClientId = clientId - // } - // }; - // - // var stream = Options.Remote.PostStream(syncOptions); - // var first = true; - // await foreach (var line in stream) - // { - // if (first) - // { - // first = false; - // logger.LogDebug("Stream established. Processing events"); - // } - // - // if (line == null) - // { - // logger.LogDebug("Stream has closed while waiting"); - // // The stream has closed while waiting - // return new StreamingSyncIterationResult { Retry = true }; - // } - // - // // A connection is active and messages are being received - // if (!SyncStatus.Connected) - // { - // // There is a connection now - // UpdateSyncStatus(new SyncStatusOptions - // { - // Connected = true - // }); - // TriggerCrudUpload(); - // } - // - // if (line is StreamingSyncCheckpoint syncCheckpoint) - // { - // logger.LogDebug("Sync checkpoint: {message}", syncCheckpoint); - // - // targetCheckpoint = syncCheckpoint.Checkpoint; - // var bucketsToDelete = new HashSet(bucketSet); - // var newBuckets = new HashSet(); - // - // foreach (var checksum in syncCheckpoint.Checkpoint.Buckets) - // { - // newBuckets.Add(checksum.Bucket); - // bucketsToDelete.Remove(checksum.Bucket); - // } - // - // if (bucketsToDelete.Count > 0) - // { - // logger.LogDebug("Removing buckets: {message}", string.Join(", ", bucketsToDelete)); - // } - // - // bucketSet = newBuckets; - // await Options.Adapter.RemoveBuckets([.. bucketsToDelete]); - // await Options.Adapter.SetTargetCheckpoint(targetCheckpoint); - // } - // else if (line is StreamingSyncCheckpointComplete checkpointComplete) - // { - // logger.LogDebug("Checkpoint complete: {message}", targetCheckpoint); - // - // var result = await Options.Adapter.SyncLocalDatabase(targetCheckpoint!); - // - // if (!result.CheckpointValid) - // { - // // This means checksums failed. Start again with a new checkpoint. - // // TODO: better back-off - // await Task.Delay(50); - // return new StreamingSyncIterationResult { Retry = true }; - // } - // else if (!result.Ready) - // { - // // Checksums valid, but need more data for a consistent checkpoint. - // // Continue waiting. - // // Landing here the whole time - // } - // else - // { - // appliedCheckpoint = targetCheckpoint; - // logger.LogDebug("Validated checkpoint: {message}", appliedCheckpoint); - // - // UpdateSyncStatus(new SyncStatusOptions - // { - // Connected = true, - // LastSyncedAt = DateTime.Now, - // DataFlow = new SyncDataFlowStatus - // { - // Downloading = false - // } - // }, new UpdateSyncStatusOptions - // { - // ClearDownloadError = true - // }); - // } - // - // validatedCheckpoint = targetCheckpoint; - // } - // else if (line is StreamingSyncCheckpointDiff checkpointDiff) - // { - // // TODO: It may be faster to just keep track of the diff, instead of the entire checkpoint - // if (targetCheckpoint == null) - // { - // throw new Exception("Checkpoint diff without previous checkpoint"); - // } - // - // var diff = checkpointDiff.CheckpointDiff; - // var newBuckets = new Dictionary(); - // - // foreach (var checksum in targetCheckpoint.Buckets) - // { - // newBuckets[checksum.Bucket] = checksum; - // } - // - // foreach (var checksum in diff.UpdatedBuckets) - // { - // newBuckets[checksum.Bucket] = checksum; - // } - // - // foreach (var bucket in diff.RemovedBuckets) - // { - // newBuckets.Remove(bucket); - // } - // - // var newWriteCheckpoint = - // !string.IsNullOrEmpty(diff.WriteCheckpoint) ? diff.WriteCheckpoint : null; - // var newCheckpoint = new Checkpoint - // { - // LastOpId = diff.LastOpId, - // Buckets = [.. newBuckets.Values], - // WriteCheckpoint = newWriteCheckpoint - // }; - // - // targetCheckpoint = newCheckpoint; - // - // bucketSet = [.. newBuckets.Keys]; - // - // var bucketsToDelete = diff.RemovedBuckets.ToArray(); - // if (bucketsToDelete.Length > 0) - // { - // logger.LogDebug("Remove buckets: {message}", string.Join(", ", bucketsToDelete)); - // } - // - // // Perform async operations - // await Options.Adapter.RemoveBuckets(bucketsToDelete); - // await Options.Adapter.SetTargetCheckpoint(targetCheckpoint); - // } - // else if (line is StreamingSyncDataJSON dataJSON) - // { - // UpdateSyncStatus(new SyncStatusOptions - // { - // DataFlow = new SyncDataFlowStatus - // { - // Downloading = true - // } - // }); - // await Options.Adapter.SaveSyncData(new SyncDataBatch([SyncDataBucket.FromRow(dataJSON.Data)])); - // } - // else if (line is StreamingSyncKeepalive keepalive) - // { - // var remainingSeconds = keepalive.TokenExpiresIn; - // if (remainingSeconds == 0) - // { - // // Connection would be closed automatically right after this - // logger.LogDebug("Token expiring; reconnect"); - // Options.Remote.InvalidateCredentials(); - // - // // For a rare case where the backend connector does not update the token - // // (uses the same one), this should have some delay. - // // - // await DelayRetry(); - // return new StreamingSyncIterationResult { Retry = true }; - // } - // else if (remainingSeconds < 30) - // { - // logger.LogDebug("Token will expire soon; reconnect"); - // // Pre-emptively refresh the token - // Options.Remote.InvalidateCredentials(); - // return new StreamingSyncIterationResult { Retry = true }; - // } - // - // TriggerCrudUpload(); - // } - // else - // { - // logger.LogDebug("Sync complete"); - // - // if (targetCheckpoint == appliedCheckpoint) - // { - // UpdateSyncStatus(new SyncStatusOptions - // { - // Connected = true, - // LastSyncedAt = DateTime.Now, - // }, - // new UpdateSyncStatusOptions - // { - // ClearDownloadError = true - // } - // ); - // } - // else if (validatedCheckpoint == targetCheckpoint) - // { - // var result = await Options.Adapter.SyncLocalDatabase(targetCheckpoint!); - // if (!result.CheckpointValid) - // { - // // This means checksums failed. Start again with a new checkpoint. - // // TODO: better back-off - // await Task.Delay(50); - // return new StreamingSyncIterationResult { Retry = false }; - // } - // else if (!result.Ready) - // { - // // Checksums valid, but need more data for a consistent checkpoint. - // // Continue waiting. - // } - // else - // { - // appliedCheckpoint = targetCheckpoint; - // UpdateSyncStatus(new SyncStatusOptions - // { - // Connected = true, - // LastSyncedAt = DateTime.Now, - // DataFlow = new SyncDataFlowStatus - // { - // Downloading = false, - // } - // }, - // new UpdateSyncStatusOptions - // { - // ClearDownloadError = true - // }); - // } - // } - // } - // } - // - // logger.LogDebug("Stream input empty"); - // // Connection closed. Likely due to auth issue. - // return new StreamingSyncIterationResult { Retry = true }; - // } - - // StreamingSync(CancellationToken? signal + private async Task RustSyncIteration(CancellationToken? signal, RequiredPowerSyncConnectionOptions resolvedOptions) { Task? receivingLines = null; @@ -675,6 +396,7 @@ async Task Connect(EstablishSyncStream instruction) { logger.LogDebug("Parsing line for rust sync stream {message}", line); await Control("line_text", line); + } } From e359ca9200c77f38c940444789cc7525f14d0d88 Mon Sep 17 00:00:00 2001 From: Christiaan Landman Date: Thu, 22 May 2025 13:19:44 +0200 Subject: [PATCH 06/12] Cleaning out BucketStorage interface. --- .../Sync/Bucket/BucketStorageAdapter.cs | 44 +- .../Client/Sync/Bucket/SqliteBucketStorage.cs | 177 +-- .../Client/Sync/Stream/CoreInstructions.cs | 2 +- .../Stream/StreamingSyncImplementation.cs | 5 - .../PowerSync.Common/DB/Crud/SyncStatus.cs | 9 +- .../Client/Sync/BucketStorageTests.cs | 1019 ----------------- 6 files changed, 7 insertions(+), 1249 deletions(-) delete mode 100644 Tests/PowerSync/PowerSync.Common.Tests/Client/Sync/BucketStorageTests.cs diff --git a/PowerSync/PowerSync.Common/Client/Sync/Bucket/BucketStorageAdapter.cs b/PowerSync/PowerSync.Common/Client/Sync/Bucket/BucketStorageAdapter.cs index 5cf7ba3..3026071 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Bucket/BucketStorageAdapter.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Bucket/BucketStorageAdapter.cs @@ -20,38 +20,6 @@ public class Checkpoint public string? WriteCheckpoint { get; set; } = null; } -public class BucketState -{ - [JsonProperty("bucket")] - public string Bucket { get; set; } = null!; - - [JsonProperty("op_id")] - public string OpId { get; set; } = null!; -} - -public class SyncLocalDatabaseResult -{ - [JsonProperty("ready")] - public bool Ready { get; set; } - - [JsonProperty("checkpointValid")] - public bool CheckpointValid { get; set; } - - [JsonProperty("checkpointFailures")] - public string[]? CheckpointFailures { get; set; } - - public override bool Equals(object? obj) - { - if (obj is not SyncLocalDatabaseResult other) return false; - return JsonConvert.SerializeObject(this) == JsonConvert.SerializeObject(other); - } - - public override int GetHashCode() - { - return JsonConvert.SerializeObject(this).GetHashCode(); - } -} - public class BucketChecksum { [JsonProperty("bucket")] @@ -84,21 +52,11 @@ public class BucketStorageEvent public interface IBucketStorageAdapter : IEventStream { Task Init(); - Task SaveSyncData(SyncDataBatch batch); - Task RemoveBuckets(string[] buckets); - Task SetTargetCheckpoint(Checkpoint checkpoint); - - void StartSession(); - - Task GetBucketStates(); - - Task SyncLocalDatabase(Checkpoint checkpoint); - + Task NextCrudItem(); Task HasCrud(); Task GetCrudBatch(int limit = 100); - Task HasCompletedSync(); Task UpdateLocalTarget(Func> callback); /// diff --git a/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs b/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs index 0b9d0f7..3c25cdc 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs @@ -16,7 +16,6 @@ public class SqliteBucketStorage : EventStream, IBucketStora public static readonly string MAX_OP_ID = "9223372036854775807"; private readonly IDBAdapter db; - private bool hasCompletedSync; private bool pendingBucketDeletes; private readonly HashSet tableNames; private string? clientId; @@ -34,8 +33,6 @@ public SqliteBucketStorage(IDBAdapter db, ILogger? logger = null) { this.db = db; this.logger = logger ?? NullLogger.Instance; - ; - hasCompletedSync = false; pendingBucketDeletes = true; tableNames = []; @@ -59,7 +56,6 @@ public SqliteBucketStorage(IDBAdapter db, ILogger? logger = null) public async Task Init() { - hasCompletedSync = false; var existingTableRows = await db.GetAll( "SELECT name FROM sqlite_master WHERE type='table' AND name GLOB 'ps_data_*'"); @@ -93,171 +89,7 @@ public string GetMaxOpId() { return MAX_OP_ID; } - - public void StartSession() - { - } - - public async Task GetBucketStates() - { - return - await db.GetAll( - "SELECT name as bucket, cast(last_op as TEXT) as op_id FROM ps_buckets WHERE pending_delete = 0 AND name != '$local'"); - } - - public async Task SaveSyncData(SyncDataBatch batch) - { - await db.WriteTransaction(async tx => - { - int count = 0; - foreach (var b in batch.Buckets) - { - var result = await tx.Execute("INSERT INTO powersync_operations(op, data) VALUES(?, ?)", - ["save", JsonConvert.SerializeObject(new { buckets = new[] { b.ToJSON() } })]); - logger.LogDebug("saveSyncData {message}", JsonConvert.SerializeObject(result)); - count += b.Data.Length; - } - - compactCounter += count; - }); - } - - public async Task RemoveBuckets(string[] buckets) - { - foreach (var bucket in buckets) - { - await DeleteBucket(bucket); - } - } - - private async Task DeleteBucket(string bucket) - { - await db.WriteTransaction(async tx => - { - await tx.Execute("INSERT INTO powersync_operations(op, data) VALUES(?, ?)", - ["delete_bucket", bucket]); - }); - - logger.LogDebug("Done deleting bucket"); - pendingBucketDeletes = true; - } - - private record LastSyncedResult(string? synced_at); - - public async Task HasCompletedSync() - { - if (hasCompletedSync) return true; - - var result = await db.Get("SELECT powersync_last_synced_at() as synced_at"); - - hasCompletedSync = result.synced_at != null; - return hasCompletedSync; - } - - public async Task SyncLocalDatabase(Checkpoint checkpoint) - { - var validation = await ValidateChecksums(checkpoint); - if (!validation.CheckpointValid) - { - logger.LogError("Checksums failed for {failures}", - JsonConvert.SerializeObject(validation.CheckpointFailures)); - foreach (var failedBucket in validation.CheckpointFailures ?? []) - { - await DeleteBucket(failedBucket); - } - - return new SyncLocalDatabaseResult - { - Ready = false, - CheckpointValid = false, - CheckpointFailures = validation.CheckpointFailures - }; - } - - var bucketNames = checkpoint.Buckets.Select(b => b.Bucket).ToArray(); - await db.WriteTransaction(async tx => - { - await tx.Execute( - "UPDATE ps_buckets SET last_op = ? WHERE name IN (SELECT json_each.value FROM json_each(?))", - [checkpoint.LastOpId, JsonConvert.SerializeObject(bucketNames)] - ); - - if (checkpoint.WriteCheckpoint != null) - { - await tx.Execute( - "UPDATE ps_buckets SET last_op = ? WHERE name = '$local'", - [checkpoint.WriteCheckpoint] - ); - } - }); - - var valid = await UpdateObjectsFromBuckets(checkpoint); - if (!valid) - { - logger.LogDebug("Not at a consistent checkpoint - cannot update local db"); - return new SyncLocalDatabaseResult - { - Ready = false, - CheckpointValid = true - }; - } - - await ForceCompact(); - - return new SyncLocalDatabaseResult - { - Ready = true, - CheckpointValid = true - }; - } - - private async Task UpdateObjectsFromBuckets(Checkpoint checkpoint) - { - return await db.WriteTransaction(async tx => - { - var result = await tx.Execute("INSERT INTO powersync_operations(op, data) VALUES(?, ?)", - ["sync_local", ""]); - - return result.InsertId == 1; - }); - } - - private record ResultResult(object result); - - public class ResultDetail - { - [JsonProperty("valid")] public bool Valid { get; set; } - - [JsonProperty("failed_buckets")] public List? FailedBuckets { get; set; } - } - - public async Task ValidateChecksums( - Checkpoint checkpoint) - { - var result = await db.Get("SELECT powersync_validate_checkpoint(?) as result", - [JsonConvert.SerializeObject(checkpoint)]); - - logger.LogDebug("validateChecksums result item {message}", JsonConvert.SerializeObject(result)); - - if (result == null) return new SyncLocalDatabaseResult { CheckpointValid = false, Ready = false }; - - var resultDetail = JsonConvert.DeserializeObject(result.result.ToString() ?? "{}"); - - if (resultDetail?.Valid == true) - { - return new SyncLocalDatabaseResult { Ready = true, CheckpointValid = true }; - } - else - { - return new SyncLocalDatabaseResult - { - CheckpointValid = false, - Ready = false, - CheckpointFailures = resultDetail?.FailedBuckets?.ToArray() ?? [] - }; - } - } - + /// /// Force a compact operation, primarily for testing purposes. /// @@ -435,12 +267,7 @@ public async Task HasCrud() { return await db.GetOptional("SELECT 1 as ignore FROM ps_crud LIMIT 1") != null; } - - public async Task SetTargetCheckpoint(Checkpoint checkpoint) - { - // No Op - await Task.CompletedTask; - } + record ControlResult(string? r); diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/CoreInstructions.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/CoreInstructions.cs index 833da24..7842ff1 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Stream/CoreInstructions.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/CoreInstructions.cs @@ -122,4 +122,4 @@ public class FetchCredentials: Instruction public class CloseSyncStream : Instruction { } public class FlushFileSystem : Instruction { } -public class DidCompleteSync : Instruction { } \ No newline at end of file +public class DidCompleteSync : Instruction { } diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs index 808dcc7..0ff607c 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs @@ -619,11 +619,6 @@ await locks.ObtainLock(new LockOptions }); } - public async Task HasCompletedSync() - { - return await Options.Adapter.HasCompletedSync(); - } - public async Task WaitForReady() { // Do nothing diff --git a/PowerSync/PowerSync.Common/DB/Crud/SyncStatus.cs b/PowerSync/PowerSync.Common/DB/Crud/SyncStatus.cs index b9663ba..fcdd838 100644 --- a/PowerSync/PowerSync.Common/DB/Crud/SyncStatus.cs +++ b/PowerSync/PowerSync.Common/DB/Crud/SyncStatus.cs @@ -26,8 +26,7 @@ public class SyncDataFlowStatus public class SyncPriorityStatus { [JsonProperty("uploading")] public int Priority { get; set; } - - + [JsonProperty("lastSyncedAt")] public DateTime? LastSyncedAt { get; set; } [JsonProperty("hasSynced")] public bool? HasSynced { get; set; } @@ -35,9 +34,7 @@ public class SyncPriorityStatus public class SyncStatusOptions { - public SyncStatusOptions() - { - } + public SyncStatusOptions() {} public SyncStatusOptions(SyncStatusOptions options) { @@ -146,7 +143,7 @@ public string ToJSON() { return JsonConvert.SerializeObject(this); } - + private static int ComparePriorities(SyncPriorityStatus a, SyncPriorityStatus b) { // Lower numbers = higher priority diff --git a/Tests/PowerSync/PowerSync.Common.Tests/Client/Sync/BucketStorageTests.cs b/Tests/PowerSync/PowerSync.Common.Tests/Client/Sync/BucketStorageTests.cs deleted file mode 100644 index f4b4762..0000000 --- a/Tests/PowerSync/PowerSync.Common.Tests/Client/Sync/BucketStorageTests.cs +++ /dev/null @@ -1,1019 +0,0 @@ -namespace PowerSync.Common.Tests.Client.Sync; - -using System.Threading.Tasks; - -using Microsoft.Data.Sqlite; -using Microsoft.Extensions.Logging; - -using PowerSync.Common.Client; -using PowerSync.Common.Client.Sync.Bucket; -using PowerSync.Common.DB.Schema; - -class TestData -{ - public static OplogEntry putAsset1_1 = OplogEntry.FromRow(new OplogEntryJSON - { - OpId = "1", - Op = new OpType(OpTypeEnum.PUT).ToJSON(), - ObjectType = "assets", - ObjectId = "O1", - Data = new { description = "bar" }, - Checksum = 1 - }); - - public static OplogEntry putAsset2_2 = OplogEntry.FromRow(new OplogEntryJSON - { - OpId = "2", - Op = new OpType(OpTypeEnum.PUT).ToJSON(), - ObjectType = "assets", - ObjectId = "O2", - Data = new { description = "bar" }, - Checksum = 2 - }); - - public static OplogEntry putAsset1_3 = OplogEntry.FromRow(new OplogEntryJSON - { - OpId = "3", - Op = new OpType(OpTypeEnum.PUT).ToJSON(), - ObjectType = "assets", - ObjectId = "O1", - Data = new { description = "bard" }, - Checksum = 3 - }); - - public static OplogEntry removeAsset1_4 = OplogEntry.FromRow(new OplogEntryJSON - { - OpId = "4", - Op = new OpType(OpTypeEnum.REMOVE).ToJSON(), - ObjectType = "assets", - ObjectId = "O1", - Checksum = 4 - }); - - public static OplogEntry removeAsset1_5 = OplogEntry.FromRow(new OplogEntryJSON - { - OpId = "5", - Op = new OpType(OpTypeEnum.REMOVE).ToJSON(), - ObjectType = "assets", - ObjectId = "O1", - Checksum = 5 - }); -} - -public class BucketStorageTests : IAsyncLifetime -{ - private PowerSyncDatabase db = default!; - private IBucketStorageAdapter bucketStorage = default!; - - public async Task InitializeAsync() - { - db = new PowerSyncDatabase(new PowerSyncDatabaseOptions - { - Database = new SQLOpenOptions { DbFilename = "powersync.db" }, - Schema = TestSchema.AppSchema, - }); - await db.Init(); - bucketStorage = new SqliteBucketStorage(db.Database, createLogger()); - - } - - public async Task DisposeAsync() - { - await db.DisconnectAndClear(); - await db.Close(); - bucketStorage.Close(); - } - - private record IdResult(string id); - private record DescriptionResult(string description); - private record AssetResult(string id, string description, string? make = null); - - static async Task ExpectAsset1_3(PowerSyncDatabase database) - { - var result = await database.GetAll("SELECT id, description, make FROM assets WHERE id = 'O1'"); - Assert.Equal(new AssetResult("O1", "bard", null), result[0]); - } - - static async Task ExpectNoAsset1(PowerSyncDatabase database) - { - var result = await database.GetAll("SELECT id, description, make FROM assets WHERE id = 'O1'"); - Assert.Empty(result); - } - - static async Task ExpectNoAssets(PowerSyncDatabase database) - { - var result = await database.GetAll("SELECT id, description, make FROM assets"); - Assert.Empty(result); - } - - async Task SyncLocalChecked(Checkpoint checkpoint) - { - var result = await bucketStorage.SyncLocalDatabase(checkpoint); - Assert.Equal(new SyncLocalDatabaseResult { Ready = true, CheckpointValid = true }, result); - } - - private ILogger createLogger() - { - ILoggerFactory loggerFactory = LoggerFactory.Create(builder => - { - builder.AddConsole(); // Enable console logging - builder.SetMinimumLevel(LogLevel.Debug); - }); - - return loggerFactory.CreateLogger("TestLogger"); - } - - // [Fact] - // public async Task BasicSetup() - // { - // await db.WaitForReady(); - // var initialBucketStates = await bucketStorage.GetBucketStates(); - // Assert.Empty(initialBucketStates); - // - // await bucketStorage.SaveSyncData(new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false)])); - // - // var bucketStates = await bucketStorage.GetBucketStates(); - // - // Assert.Collection(bucketStates, state => - // { - // Assert.Equal("bucket1", state.Bucket); - // Assert.Equal("3", state.OpId); - // }); - // - // await SyncLocalChecked(new Checkpoint - // { - // LastOpId = "3", - // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }] - // }); - // - // await ExpectAsset1_3(db); - // } - // - // [Fact] - // public async Task ShouldGetObjectFromMultipleBuckets() - // { - // await bucketStorage.SaveSyncData( - // new SyncDataBatch( - // [new SyncDataBucket("bucket1", [TestData.putAsset1_3], false), new SyncDataBucket("bucket2", [TestData.putAsset1_3], false)]) - // ); - // - // await SyncLocalChecked(new Checkpoint - // { - // LastOpId = "3", - // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 3 }, new BucketChecksum { Bucket = "bucket2", Checksum = 3 }] - // }); - // - // await ExpectAsset1_3(db); - // } - // - // [Fact] - // public async Task ShouldPrioritizeLaterUpdates() - // { - // // Test behavior when the same object is present in multiple buckets. - // // In this case, there are two different versions in the different buckets. - // // While we should not get this with our server implementation, the client still specifies this behavior: - // // The largest op_id wins. - // - // await bucketStorage.SaveSyncData( - // new SyncDataBatch( - // [new SyncDataBucket("bucket1", [TestData.putAsset1_3], false), new SyncDataBucket("bucket2", [TestData.putAsset1_1], false)]) - // ); - // - // await SyncLocalChecked(new Checkpoint - // { - // LastOpId = "3", - // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 3 }, new BucketChecksum { Bucket = "bucket2", Checksum = 1 }] - // }); - // - // await ExpectAsset1_3(db); - // } - // - // [Fact] - // public async Task ShouldIgnoreRemoveFromOneBucket() - // { - // // When we have 1 PUT and 1 REMOVE, the object must be kept.); - // await bucketStorage.SaveSyncData( - // new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_3], false), new SyncDataBucket("bucket2", [TestData.putAsset1_3, TestData.removeAsset1_4], false)]) - // ); - // - // await SyncLocalChecked(new Checkpoint - // { - // LastOpId = "4", - // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 3 }, new BucketChecksum { Bucket = "bucket2", Checksum = 7 }] - // }); - // - // await ExpectAsset1_3(db); - // } - // - // [Fact] - // public async Task ShouldRemoveWhenRemovedFromAllBuckets() - // { - // // When we only have REMOVE left for an object, it must be deleted. - // await bucketStorage.SaveSyncData( - // new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_3, TestData.removeAsset1_5], false), new SyncDataBucket("bucket2", [TestData.putAsset1_3, TestData.removeAsset1_4], false)]) - // ); - // - // await SyncLocalChecked(new Checkpoint - // { - // LastOpId = "5", - // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 8 }, new BucketChecksum { Bucket = "bucket2", Checksum = 7 }] - // }); - // - // await ExpectNoAssets(db); - // } - // - // [Fact] - // public async Task ShouldUseSubkeys() - // { - // // Subkeys cause this to be treated as a separate entity in the oplog, - // // but the same entity in the local database. - // - // var put4 = OplogEntry.FromRow(new OplogEntryJSON - // { - // OpId = "4", - // Op = new OpType(OpTypeEnum.PUT).ToJSON(), - // Subkey = "b", - // ObjectType = "assets", - // ObjectId = "O1", - // Data = new { description = "B" }, - // Checksum = 4 - // }); - // - // var remove5 = OplogEntry.FromRow(new OplogEntryJSON - // { - // OpId = "5", - // Op = new OpType(OpTypeEnum.REMOVE).ToJSON(), - // Subkey = "b", - // ObjectType = "assets", - // ObjectId = "O1", - // Checksum = 5 - // }); - // - // await bucketStorage.SaveSyncData( - // new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset1_3, put4], false)]) - // ); - // - // await SyncLocalChecked(new Checkpoint - // { - // LastOpId = "4", - // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 8 }] - // }); - // - // var result = await db.GetAll("SELECT id, description, make FROM assets WHERE id = 'O1'"); - // Assert.Equal(new AssetResult("O1", "B", null), result[0]); - // - // await bucketStorage.SaveSyncData(new SyncDataBatch([new SyncDataBucket("bucket1", [remove5], false)])); - // - // await SyncLocalChecked(new Checkpoint - // { - // LastOpId = "5", - // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 13 }] - // }); - // - // await ExpectAsset1_3(db); - // } - // - // [Fact] - // public async Task ShouldFailChecksumValidation() - // { - // // Simple checksum validation - // await bucketStorage.SaveSyncData( - // new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false)]) - // ); - // - // var result = await bucketStorage.SyncLocalDatabase(new Checkpoint - // { - // LastOpId = "3", - // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 10 }, new BucketChecksum { Bucket = "bucket2", Checksum = 1 }] - // }); - // - // var expected = new SyncLocalDatabaseResult - // { - // Ready = false, - // CheckpointValid = false, - // CheckpointFailures = ["bucket1", "bucket2"] - // }; - // - // Assert.Equal(expected, result); - // - // await ExpectNoAssets(db); - // } - // - // [Fact] - // public async Task ShouldDeleteBuckets() - // { - // await bucketStorage.SaveSyncData( - // new SyncDataBatch( - // [new SyncDataBucket("bucket1", [TestData.putAsset1_3], false), new SyncDataBucket("bucket2", [TestData.putAsset1_3], false)]) - // ); - // - // await bucketStorage.RemoveBuckets(["bucket2"]); - // // The delete only takes effect after syncLocal. - // - // await SyncLocalChecked(new Checkpoint - // { - // LastOpId = "3", - // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 3 }] - // }); - // - // // Bucket is deleted, but object is still present in other buckets. - // await ExpectAsset1_3(db); - // - // await bucketStorage.RemoveBuckets(["bucket1"]); - // await SyncLocalChecked(new Checkpoint - // { - // LastOpId = "3", - // Buckets = [] - // }); - // - // // Both buckets deleted - object removed. - // await ExpectNoAssets(db); - // } - // - // [Fact] - // public async Task ShouldDeleteAndRecreateBuckets() - // { - // // Save some data - // await bucketStorage.SaveSyncData( - // new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_1], false)]) - // ); - // - // // Delete the bucket - // await bucketStorage.RemoveBuckets(["bucket1"]); - // - // // Save some data again - // await bucketStorage.SaveSyncData( - // new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset1_3], false)]) - // ); - // - // // Delete again - // await bucketStorage.RemoveBuckets(["bucket1"]); - // - // // Final save of data - // await bucketStorage.SaveSyncData( - // new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset1_3], false)]) - // ); - // - // // Check that the data is there - // await SyncLocalChecked(new Checkpoint - // { - // LastOpId = "3", - // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 4 }] - // }); - // - // await ExpectAsset1_3(db); - // - // // Now final delete - // await bucketStorage.RemoveBuckets(["bucket1"]); - // await SyncLocalChecked(new Checkpoint - // { - // LastOpId = "3", - // Buckets = [] - // }); - // - // await ExpectNoAssets(db); - // } - // - // [Fact] - // public async Task ShouldHandleMove() - // { - // await bucketStorage.SaveSyncData( - // new SyncDataBatch( - // [ - // new SyncDataBucket("bucket1", - // [ - // OplogEntry.FromRow(new OplogEntryJSON - // { - // OpId = "1", - // Op = new OpType(OpTypeEnum.MOVE).ToJSON(), - // Checksum = 1 - // }) - // ], false) - // ]) - // ); - // - // await bucketStorage.SaveSyncData( - // new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_3], false)]) - // ); - // - // await SyncLocalChecked(new Checkpoint - // { - // LastOpId = "3", - // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 4 }] - // }); - // - // await ExpectAsset1_3(db); - // } - // - // [Fact] - // public async Task ShouldHandleClear() - // { - // // Save some data - // await bucketStorage.SaveSyncData( - // new SyncDataBatch( - // [ - // new SyncDataBucket("bucket1", [TestData.putAsset1_1], false) - // ]) - // ); - // - // await SyncLocalChecked(new Checkpoint - // { - // LastOpId = "1", - // Buckets = - // [ - // new BucketChecksum { Bucket = "bucket1", Checksum = 1 } - // ] - // }); - // - // // CLEAR, then save new data - // await bucketStorage.SaveSyncData( - // new SyncDataBatch( - // [ - // new SyncDataBucket("bucket1", - // [ - // OplogEntry.FromRow(new OplogEntryJSON - // { - // OpId = "2", - // Op = new OpType(OpTypeEnum.CLEAR).ToJSON(), - // Checksum = 2 - // }), - // OplogEntry.FromRow(new OplogEntryJSON - // { - // OpId = "3", - // Op = new OpType(OpTypeEnum.PUT).ToJSON(), - // Checksum = 3, - // Data = TestData.putAsset2_2.Data, - // ObjectId = TestData.putAsset2_2.ObjectId, - // ObjectType = TestData.putAsset2_2.ObjectType - // }) - // ], false) - // ]) - // ); - // - // await SyncLocalChecked(new Checkpoint - // { - // LastOpId = "3", - // // 2 + 3. 1 is replaced with 2. - // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 5 }] - // }); - // - // await ExpectNoAsset1(db); - // - // var result = await db.Get("SELECT id, description FROM assets WHERE id = 'O2'"); - // - // Assert.Equal(new AssetResult("O2", "bar"), result); - // } - // - // [Fact] - // public async Task UpdateWithNewTypes() - // { - // var dbName = "test-bucket-storage-new-types.db"; - // var powersync = new PowerSyncDatabase(new PowerSyncDatabaseOptions - // { - // Database = new SQLOpenOptions { DbFilename = dbName }, - // Schema = new Schema([]), - // }); - // await powersync.Init(); - // bucketStorage = new SqliteBucketStorage(powersync.Database); - // - // await bucketStorage.SaveSyncData( - // new SyncDataBatch( - // [new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false)]) - // ); - // - // await SyncLocalChecked(new Checkpoint - // { - // LastOpId = "4", - // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }] - // }); - // - // // Ensure an exception is thrown due to missing table - // await Assert.ThrowsAsync(async () => - // await powersync.GetAll("SELECT * FROM assets")); - // - // await powersync.Close(); - // - // powersync = new PowerSyncDatabase(new PowerSyncDatabaseOptions - // { - // Database = new SQLOpenOptions { DbFilename = dbName }, - // Schema = TestSchema.AppSchema, - // }); - // await powersync.Init(); - // - // await ExpectAsset1_3(powersync); - // - // await powersync.DisconnectAndClear(); - // await powersync.Close(); - // } - // - // [Fact] - // public async Task ShouldRemoveTypes() - // { - // var dbName = "test-bucket-storage-remove-types.db"; - // - // // Create database with initial schema - // var powersync = new PowerSyncDatabase(new PowerSyncDatabaseOptions - // { - // Database = new SQLOpenOptions { DbFilename = dbName }, - // Schema = TestSchema.AppSchema, - // }); - // - // await powersync.Init(); - // bucketStorage = new SqliteBucketStorage(powersync.Database); - // - // await bucketStorage.SaveSyncData( - // new SyncDataBatch( - // [ - // new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false) - // ]) - // ); - // - // await SyncLocalChecked(new Checkpoint - // { - // LastOpId = "3", - // Buckets = - // [ - // new BucketChecksum { Bucket = "bucket1", Checksum = 6 } - // ] - // }); - // - // await ExpectAsset1_3(powersync); - // await powersync.Close(); - // - // // Now open another instance with an empty schema - // powersync = new PowerSyncDatabase(new PowerSyncDatabaseOptions - // { - // Database = new SQLOpenOptions { DbFilename = dbName }, - // Schema = new Schema([]), - // }); - // await powersync.Init(); - // - // await Assert.ThrowsAsync(async () => - // await powersync.Execute("SELECT * FROM assets")); - // - // await powersync.Close(); - // - // // Reopen database with the original schema - // powersync = new PowerSyncDatabase(new PowerSyncDatabaseOptions - // { - // Database = new SQLOpenOptions { DbFilename = dbName }, - // Schema = TestSchema.AppSchema, - // }); - // await powersync.Init(); - // - // await ExpectAsset1_3(powersync); - // - // await powersync.DisconnectAndClear(); - // await powersync.Close(); - // } - // - // private record OplogStats(string Type, string Id, int Count); - // - // [Fact] - // public async Task ShouldCompact() - // { - // // Test compacting behavior. - // // This test relies heavily on internals and will have to be updated when the compact implementation is updated. - // - // await bucketStorage.SaveSyncData( - // new SyncDataBatch( - // [ - // new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.removeAsset1_4], false) - // ]) - // ); - // - // await SyncLocalChecked(new Checkpoint - // { - // LastOpId = "4", - // WriteCheckpoint = "4", - // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 7 }] - // }); - // - // await bucketStorage.ForceCompact(); - // - // await SyncLocalChecked(new Checkpoint - // { - // LastOpId = "4", - // WriteCheckpoint = "4", - // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 7 }] - // }); - // - // var stats = await db.GetAll( - // "SELECT row_type as Type, row_id as Id, count(*) as Count FROM ps_oplog GROUP BY row_type, row_id ORDER BY row_type, row_id" - // ); - // - // var expectedStats = new List { new("assets", "O2", 1) }; - // - // Assert.Equal(expectedStats, stats); - // } - // - // [Fact] - // public async Task ShouldNotSyncLocalDbWithPendingCrud_ServerRemoved() - // { - // await bucketStorage.SaveSyncData( - // new SyncDataBatch( - // [ - // new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false) - // ]) - // ); - // - // await SyncLocalChecked(new Checkpoint - // { - // LastOpId = "3", - // Buckets = - // [ - // new BucketChecksum { Bucket = "bucket1", Checksum = 6 } - // ] - // }); - // - // // Local save - // await db.Execute("INSERT INTO assets(id) VALUES(?)", ["O3"]); - // - // var insertedResult = await db.GetAll("SELECT id FROM assets WHERE id = 'O3'"); - // Assert.Equal(new IdResult("O3"), insertedResult[0]); - // - // // At this point, we have data in the CRUD table and are not able to sync the local DB. - // var result = await bucketStorage.SyncLocalDatabase(new Checkpoint - // { - // LastOpId = "3", - // WriteCheckpoint = "3", - // Buckets = - // [ - // new BucketChecksum { Bucket = "bucket1", Checksum = 6 } - // ] - // }); - // - // var expectedResult = new SyncLocalDatabaseResult - // { - // Ready = false, - // CheckpointValid = true - // }; - // - // Assert.Equal(expectedResult, result); - // - // var batch = await bucketStorage.GetCrudBatch(); - // if (batch != null) - // { - // await batch.Complete(""); - // } - // - // await bucketStorage.UpdateLocalTarget(() => Task.FromResult("4")); - // - // // At this point, the data has been uploaded but not synced back yet. - // var result3 = await bucketStorage.SyncLocalDatabase(new Checkpoint - // { - // LastOpId = "3", - // WriteCheckpoint = "3", - // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }] - // }); - // - // Assert.Equal(expectedResult, result3); - // - // // The data must still be present locally. - // var stillPresentResult = await db.GetAll("SELECT id FROM assets WHERE id = 'O3'"); - // Assert.Equal(new IdResult("O3"), stillPresentResult[0]); - // - // await bucketStorage.SaveSyncData( - // new SyncDataBatch( - // [ - // new SyncDataBucket("bucket1", Array.Empty(), false) - // ]) - // ); - // - // // Now we have synced the data back (or lack of data in this case), - // // so we can do a local sync. - // await SyncLocalChecked(new Checkpoint - // { - // LastOpId = "5", - // WriteCheckpoint = "5", - // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }] - // }); - // - // // Since the object was not in the sync response, it is deleted. - // var deletedResult = await db.GetAll("SELECT id FROM assets WHERE id = 'O3'"); - // Assert.Empty(deletedResult); - // } - // - // [Fact] - // public async Task ShouldNotSyncLocalDbWithPendingCrud_WhenMoreCrudIsAdded_1() - // { - // await bucketStorage.SaveSyncData( - // new SyncDataBatch( - // [ - // new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false) - // ]) - // ); - // - // await SyncLocalChecked(new Checkpoint - // { - // LastOpId = "3", - // WriteCheckpoint = "3", - // Buckets = - // [ - // new BucketChecksum { Bucket = "bucket1", Checksum = 6 } - // ] - // }); - // - // // Local save - // await db.Execute("INSERT INTO assets(id) VALUES(?)", ["O3"]); - // - // var batch = await bucketStorage.GetCrudBatch(); - // if (batch != null) - // { - // await batch.Complete(""); - // } - // - // await bucketStorage.UpdateLocalTarget(() => Task.FromResult("4")); - // - // var result3 = await bucketStorage.SyncLocalDatabase(new Checkpoint - // { - // LastOpId = "3", - // WriteCheckpoint = "3", - // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }] - // }); - // - // var expectedResult = new SyncLocalDatabaseResult - // { - // Ready = false, - // CheckpointValid = true - // }; - // - // Assert.Equal(expectedResult, result3); - // - // await bucketStorage.SaveSyncData( - // new SyncDataBatch( - // [ - // new SyncDataBucket("bucket1", Array.Empty(), false) - // ]) - // ); - // - // // Add more data before SyncLocalDatabase. - // await db.Execute("INSERT INTO assets(id) VALUES(?)", ["O4"]); - // - // var result4 = await bucketStorage.SyncLocalDatabase(new Checkpoint - // { - // LastOpId = "5", - // WriteCheckpoint = "5", - // Buckets = - // [ - // new BucketChecksum { Bucket = "bucket1", Checksum = 6 } - // ] - // }); - // - // Assert.Equal(expectedResult, result4); - // } - // - // [Fact] - // public async Task ShouldNotSyncLocalDbWithPendingCrud_WhenMoreCrudIsAdded_2() - // { - // await bucketStorage.SaveSyncData( - // new SyncDataBatch( - // [ - // new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false) - // ]) - // ); - // - // await SyncLocalChecked(new Checkpoint - // { - // LastOpId = "3", - // WriteCheckpoint = "3", - // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }] - // }); - // - // // Local save - // await db.Execute("INSERT INTO assets(id) VALUES(?)", ["O3"]); - // - // var batch = await bucketStorage.GetCrudBatch(); - // - // // Add more data before calling complete() - // await db.Execute("INSERT INTO assets(id) VALUES(?)", ["O4"]); - // if (batch != null) - // { - // await batch.Complete(""); - // } - // - // await bucketStorage.UpdateLocalTarget(() => Task.FromResult("4")); - // - // await bucketStorage.SaveSyncData( - // new SyncDataBatch( - // [ - // new SyncDataBucket("bucket1", [], false) - // ]) - // ); - // - // var result4 = await bucketStorage.SyncLocalDatabase(new Checkpoint - // { - // LastOpId = "5", - // WriteCheckpoint = "5", - // Buckets = - // [ - // new BucketChecksum { Bucket = "bucket1", Checksum = 6 } - // ] - // }); - // - // var expected = new SyncLocalDatabaseResult - // { - // Ready = false, - // CheckpointValid = true - // }; - // - // Assert.Equal(expected, result4); - // } - // - // [Fact] - // public async Task ShouldNotSyncLocalDbWithPendingCrud_UpdateOnServer() - // { - // await bucketStorage.SaveSyncData( - // new SyncDataBatch( - // [ - // new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false) - // ]) - // ); - // - // await SyncLocalChecked(new Checkpoint - // { - // LastOpId = "3", - // WriteCheckpoint = "3", - // Buckets = - // [ - // new BucketChecksum { Bucket = "bucket1", Checksum = 6 } - // ] - // }); - // - // // Local save - // await db.Execute("INSERT INTO assets(id) VALUES(?)", ["O3"]); - // - // var batch = await bucketStorage.GetCrudBatch(); - // if (batch != null) - // { - // await batch.Complete(""); - // } - // - // await bucketStorage.UpdateLocalTarget(() => Task.FromResult("4")); - // - // await bucketStorage.SaveSyncData( - // new SyncDataBatch( - // [ - // new SyncDataBucket("bucket1", - // [ - // OplogEntry.FromRow(new OplogEntryJSON - // { - // OpId = "5", - // Op = new OpType(OpTypeEnum.PUT).ToJSON(), - // ObjectType = "assets", - // ObjectId = "O3", - // Checksum = 5, - // Data = new { description = "server updated" } - // }) - // ], false) - // ]) - // ); - // - // await SyncLocalChecked(new Checkpoint - // { - // LastOpId = "5", - // WriteCheckpoint = "5", - // Buckets = - // [ - // new BucketChecksum { Bucket = "bucket1", Checksum = 11 } - // ] - // }); - // - // var updatedResult = await db.GetAll("SELECT description FROM assets WHERE id = 'O3'"); - // Assert.Equal(new DescriptionResult("server updated"), updatedResult[0]); - // } - // - // [Fact] - // public async Task ShouldRevertAFailingInsert() - // { - // await bucketStorage.SaveSyncData( - // new SyncDataBatch( - // [ - // new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false) - // ]) - // ); - // - // await SyncLocalChecked(new Checkpoint - // { - // LastOpId = "3", - // WriteCheckpoint = "3", - // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }] - // }); - // - // // Local insert, later rejected by server - // await db.Execute("INSERT INTO assets(id, description) VALUES(?, ?)", ["O3", "inserted"]); - // - // var batch = await bucketStorage.GetCrudBatch(); - // if (batch != null) - // { - // await batch.Complete(""); - // } - // - // await bucketStorage.UpdateLocalTarget(() => Task.FromResult("4")); - // - // var insertedResult = await db.GetAll("SELECT description FROM assets WHERE id = 'O3'"); - // Assert.Equal(new DescriptionResult("inserted"), insertedResult[0]); - // - // await SyncLocalChecked(new Checkpoint - // { - // LastOpId = "3", - // WriteCheckpoint = "4", - // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }] - // }); - // - // var revertedResult = await db.GetAll("SELECT description FROM assets WHERE id = 'O3'"); - // Assert.Empty(revertedResult); - // } - // - // [Fact] - // public async Task ShouldRevertAFailingDelete() - // { - // await bucketStorage.SaveSyncData( - // new SyncDataBatch( - // [ - // new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false) - // ]) - // ); - // - // await SyncLocalChecked(new Checkpoint - // { - // LastOpId = "3", - // WriteCheckpoint = "3", - // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }] - // }); - // - // // Local delete, later rejected by server - // await db.Execute("DELETE FROM assets WHERE id = ?", ["O2"]); - // - // var deletedResult = await db.GetAll("SELECT description FROM assets WHERE id = 'O2'"); - // Assert.Empty(deletedResult); // Ensure the record is deleted locally - // - // // Simulate a permissions error when uploading - data should be preserved - // var batch = await bucketStorage.GetCrudBatch(); - // if (batch != null) - // { - // await batch.Complete(""); - // } - // - // await bucketStorage.UpdateLocalTarget(() => Task.FromResult("4")); - // - // await SyncLocalChecked(new Checkpoint - // { - // LastOpId = "3", - // WriteCheckpoint = "4", - // Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }] - // }); - // - // var revertedResult = await db.GetAll("SELECT description FROM assets WHERE id = 'O2'"); - // Assert.Equal(new DescriptionResult("bar"), revertedResult[0]); - // } - // - // [Fact] - // public async Task ShouldRevertAFailingUpdate() - // { - // await bucketStorage.SaveSyncData( - // new SyncDataBatch( - // [ - // new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false) - // ]) - // ); - // - // await SyncLocalChecked(new Checkpoint - // { - // LastOpId = "3", - // WriteCheckpoint = "3", - // Buckets = - // [ - // new BucketChecksum { Bucket = "bucket1", Checksum = 6 } - // ] - // }); - // - // // Local update, later rejected by server - // await db.Execute("UPDATE assets SET description = ? WHERE id = ?", ["updated", "O2"]); - // - // var updatedResult = await db.GetAll("SELECT description FROM assets WHERE id = 'O2'"); - // Assert.Equal(new DescriptionResult("updated"), updatedResult[0]); - // - // // Simulate a permissions error when uploading - data should be preserved - // var batch = await bucketStorage.GetCrudBatch(); - // if (batch != null) - // { - // await batch.Complete(""); - // } - // - // await bucketStorage.UpdateLocalTarget(async () => await Task.FromResult("4")); - // - // await SyncLocalChecked(new Checkpoint - // { - // LastOpId = "3", - // WriteCheckpoint = "4", - // Buckets = - // [ - // new BucketChecksum { Bucket = "bucket1", Checksum = 6 } - // ] - // }); - // - // var revertedResult = await db.GetAll("SELECT description FROM assets WHERE id = 'O2'"); - // Assert.Equal(new DescriptionResult("bar"), revertedResult[0]); - // } -} \ No newline at end of file From 80b7dd37c43d2a53d612d9a86988afd1e4f42b68 Mon Sep 17 00:00:00 2001 From: Christiaan Landman Date: Thu, 22 May 2025 14:47:54 +0200 Subject: [PATCH 07/12] Updated WaitForFirstSync to support priorities. Removing old sync from remote. --- .../Client/PowerSyncDatabase.cs | 65 ++++++++-- .../Client/Sync/Stream/Remote.cs | 65 +--------- .../Stream/StreamingSyncImplementation.cs | 8 +- .../Client/Sync/Stream/StreamingSyncTypes.cs | 115 +----------------- 4 files changed, 58 insertions(+), 195 deletions(-) diff --git a/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs b/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs index 6bab7dc..8348398 100644 --- a/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs +++ b/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs @@ -158,10 +158,37 @@ public async Task WaitForReady() await isReadyTask; } + + public class PrioritySyncRequest + { + public CancellationToken? Token { get; set; } + public int? Priority { get; set; } + } - public async Task WaitForFirstSync(CancellationToken? cancellationToken = null) + /// + /// Wait for the first sync operation to complete. + /// + /// + /// An object providing a cancellation token and a priority target. + /// When a priority target is set, the task may complete when all buckets with the given (or higher) + /// priorities have been synchronized. This can be earlier than a complete sync. + /// + /// A task which will complete once the first full sync has completed. + public async Task WaitForFirstSync(PrioritySyncRequest? request = null) { - if (CurrentStatus.HasSynced == true) + var priority = request?.Priority ?? null; + var cancellationToken = request?.Token ?? null; + + bool StatusMatches(SyncStatus status) + { + if (priority == null) + { + return status.HasSynced == true; + } + return status.StatusForPriority(priority.Value).HasSynced == true; + } + + if (StatusMatches(CurrentStatus)) { return; } @@ -169,11 +196,11 @@ public async Task WaitForFirstSync(CancellationToken? cancellationToken = null) var tcs = new TaskCompletionSource(); var cts = new CancellationTokenSource(); - var _ = Task.Run(() => + _ = Task.Run(() => { foreach (var update in Listen(cts.Token)) { - if (update.StatusChanged?.HasSynced == true) + if (update.StatusChanged != null && StatusMatches(update.StatusChanged!)) { cts.Cancel(); tcs.SetResult(true); @@ -230,7 +257,7 @@ private async Task LoadVersion() } } - private record LastSyncedResult(int? priority, string? last_synced_at); + private record LastSyncedResult(int priority, string? last_synced_at); protected async Task UpdateHasSynced() { @@ -239,6 +266,7 @@ protected async Task UpdateHasSynced() ); DateTime? lastCompleteSync = null; + List priorityStatuses = []; foreach (var result in results) { @@ -249,17 +277,28 @@ protected async Task UpdateHasSynced() // This lowest-possible priority represents a complete sync. lastCompleteSync = parsedDate; } + else + { + priorityStatuses.Add(new DB.Crud.SyncPriorityStatus + { + Priority = result.priority, + HasSynced = true, + LastSyncedAt = parsedDate + }); + } } var hasSynced = lastCompleteSync != null; - if (hasSynced != CurrentStatus.HasSynced) + var updatedStatus = new SyncStatus(new SyncStatusOptions(CurrentStatus.Options) { - CurrentStatus = new SyncStatus(new SyncStatusOptions(CurrentStatus.Options) - { - HasSynced = hasSynced, - LastSyncedAt = lastCompleteSync, - }); - + HasSynced = hasSynced, + PriorityStatusEntries = priorityStatuses.ToArray(), + LastSyncedAt = lastCompleteSync, + }); + + if (!updatedStatus.IsEqual(CurrentStatus)) + { + CurrentStatus = updatedStatus; Emit(new PowerSyncDBEvent { StatusChanged = CurrentStatus }); } } @@ -536,7 +575,7 @@ await tx.Execute( } /// - /// Get an unique client id for this database. + /// Get a unique client id for this database. /// /// The id is not reset when the database is cleared, only when the database is deleted. /// diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/Remote.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/Remote.cs index f359974..589bc36 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Stream/Remote.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/Remote.cs @@ -16,18 +16,13 @@ namespace PowerSync.Common.Client.Sync.Stream; public class SyncStreamOptions { public string Path { get; set; } = ""; + public StreamingSyncRequest Data { get; set; } = new(); public Dictionary Headers { get; set; } = new(); public CancellationToken CancellationToken { get; set; } = CancellationToken.None; } -public class RequestDetails -{ - public string Url { get; set; } = ""; - public Dictionary Headers { get; set; } = new(); -} - public class Remote { private readonly HttpClient httpClient; @@ -120,34 +115,6 @@ public async Task Get(string path, Dictionary? headers = n var responseData = await response.Content.ReadAsStringAsync(); return JsonConvert.DeserializeObject(responseData)!; } - - public async IAsyncEnumerable OldPostStream(SyncStreamOptions options) - { - using var requestMessage = await BuildRequest(HttpMethod.Post, options.Path, options.Data, options.Headers); - using var response = await httpClient.SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, options.CancellationToken); - - if (response.Content == null) - { - throw new HttpRequestException($"HTTP {response.StatusCode}: No content"); - } - else - if (!response.IsSuccessStatusCode) - { - var errorText = await response.Content.ReadAsStringAsync(); - throw new HttpRequestException($"HTTP {response.StatusCode}: {errorText}"); - } - - var stream = await response.Content.ReadAsStreamAsync(); - - // Read NDJSON stream - using var reader = new StreamReader(stream, Encoding.UTF8); - string? line; - - while ((line = await reader.ReadLineAsync()) != null) - { - yield return ParseStreamingSyncLine(JObject.Parse(line)); - } - } /// /// Posts to the stream endpoint and returns a raw NDJSON stream that can be read line by line. @@ -175,35 +142,7 @@ public async Task PostStreamRaw(SyncStreamOptions options) return await response.Content.ReadAsStreamAsync(); } - public static StreamingSyncLine? ParseStreamingSyncLine(JObject json) - { - // Determine the type based on available keys - if (json.ContainsKey("checkpoint")) - { - return json.ToObject(); - } - else if (json.ContainsKey("checkpoint_diff")) - { - return json.ToObject(); - } - else if (json.ContainsKey("checkpoint_complete")) - { - return json.ToObject(); - } - else if (json.ContainsKey("data")) - { - return json.ToObject(); - } - else if (json.ContainsKey("token_expires_in")) - { - return json.ToObject(); - } - else - { - return null; - } - } - + private async Task BuildRequest(HttpMethod method, string path, object? data = null, Dictionary? additionalHeaders = null) { var credentials = await GetCredentials(); diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs index 0ff607c..16aaebc 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs @@ -332,10 +332,10 @@ protected async Task StreamingSync(CancellationToken? signal, PowerSyncConnectio }); } - protected async Task StreamingSyncIteration(CancellationToken signal, + protected async Task StreamingSyncIteration(CancellationToken signal, PowerSyncConnectionOptions? options) { - return await locks.ObtainLock(new LockOptions + await locks.ObtainLock(new LockOptions { Type = LockType.SYNC, Token = signal, @@ -473,11 +473,9 @@ async Task HandleInstruction(Instruction instruction) // DownloadProgress = info.Downloading?.Buckets } }, - // TODO handle errors later? new UpdateSyncStatusOptions { - // ClearDownloadError = true, - // ClearUploadError = true + ClearDownloadError = true, } ); diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncTypes.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncTypes.cs index 6f78059..a7934fb 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncTypes.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncTypes.cs @@ -4,54 +4,6 @@ namespace PowerSync.Common.Client.Sync.Stream; using PowerSync.Common.DB.Crud; using Newtonsoft.Json; -public class ContinueCheckpointRequest -{ - [JsonProperty("buckets")] - public List Buckets { get; set; } = new(); - - [JsonProperty("checkpoint_token")] - public string CheckpointToken { get; set; } = ""; - - [JsonProperty("limit")] - public int? Limit { get; set; } -} - -public class SyncNewCheckpointRequest -{ - [JsonProperty("buckets")] - public List? Buckets { get; set; } - - [JsonProperty("request_checkpoint")] - public RequestCheckpoint RequestCheckpoint { get; set; } = new(); - - [JsonProperty("limit")] - public int? Limit { get; set; } -} - -public class RequestCheckpoint -{ - [JsonProperty("include_data")] - public bool IncludeData { get; set; } - - [JsonProperty("include_checksum")] - public bool IncludeChecksum { get; set; } -} - -public class SyncResponse -{ - [JsonProperty("data")] - public List? Data { get; set; } - - [JsonProperty("has_more")] - public bool HasMore { get; set; } - - [JsonProperty("checkpoint_token")] - public string? CheckpointToken { get; set; } - - [JsonProperty("checkpoint")] - public Checkpoint? Checkpoint { get; set; } -} - public class StreamingSyncRequest { [JsonProperty("buckets")] @@ -80,69 +32,4 @@ public class BucketRequest [JsonProperty("after")] public string After { get; set; } = ""; -} - -public abstract class StreamingSyncLine { } - -public class StreamingSyncCheckpoint : StreamingSyncLine -{ - [JsonProperty("checkpoint")] - public Checkpoint Checkpoint { get; set; } = new(); -} - -public class StreamingSyncCheckpointDiff : StreamingSyncLine -{ - [JsonProperty("checkpoint_diff")] - public CheckpointDiff CheckpointDiff { get; set; } = new(); -} - -public class CheckpointDiff -{ - [JsonProperty("last_op_id")] - public string LastOpId { get; set; } = ""; - - [JsonProperty("updated_buckets")] - public List UpdatedBuckets { get; set; } = new(); - - [JsonProperty("removed_buckets")] - public List RemovedBuckets { get; set; } = new(); - - [JsonProperty("write_checkpoint")] - public string WriteCheckpoint { get; set; } = ""; -} - -public class StreamingSyncDataJSON : StreamingSyncLine -{ - [JsonProperty("data")] - public SyncDataBucketJSON Data { get; set; } = new(); -} - -public class StreamingSyncCheckpointComplete : StreamingSyncLine -{ - [JsonProperty("checkpoint_complete")] - public CheckpointComplete CheckpointComplete { get; set; } = new(); -} - -public class CheckpointComplete -{ - [JsonProperty("last_op_id")] - public string LastOpId { get; set; } = ""; -} - -public class StreamingSyncKeepalive : StreamingSyncLine -{ - [JsonProperty("token_expires_in")] - public int? TokenExpiresIn { get; set; } -} - -public class CrudRequest -{ - [JsonProperty("data")] - public List Data { get; set; } = new(); -} - -public class CrudResponse -{ - [JsonProperty("checkpoint")] - public string? Checkpoint { get; set; } -} +} \ No newline at end of file From f7d6db0c388a144c4109fc6a820102aea0484517 Mon Sep 17 00:00:00 2001 From: Christiaan Landman Date: Thu, 22 May 2025 15:46:16 +0200 Subject: [PATCH 08/12] Added sync progress definition. --- .../Stream/StreamingSyncImplementation.cs | 2 +- .../PowerSync.Common/DB/Crud/SyncProgress.cs | 113 +++++++++++++++++- 2 files changed, 113 insertions(+), 2 deletions(-) diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs index 16aaebc..b2a0415 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs @@ -493,7 +493,7 @@ async Task HandleInstruction(Instruction instruction) Options.Remote.InvalidateCredentials(); break; case FetchCredentials: - await Options.Remote.PrefetchCredentials(); + Options.Remote.InvalidateCredentials(); break; case CloseSyncStream: CancellationTokenSource?.Cancel(); diff --git a/PowerSync/PowerSync.Common/DB/Crud/SyncProgress.cs b/PowerSync/PowerSync.Common/DB/Crud/SyncProgress.cs index 03e2d31..b83c96b 100644 --- a/PowerSync/PowerSync.Common/DB/Crud/SyncProgress.cs +++ b/PowerSync/PowerSync.Common/DB/Crud/SyncProgress.cs @@ -1,6 +1,117 @@ namespace PowerSync.Common.DB.Crud; -public class SyncProgress +/// +/// Provides realtime progress on how PowerSync is downloading rows. +/// +/// The reported progress always reflects the status towards th end of a sync iteration (after +/// which a consistent snapshot of all buckets is available locally). +/// +/// In rare cases (in particular, when a [compacting](https://docs.powersync.com/usage/lifecycle-maintenance/compacting-buckets) +/// operation takes place between syncs), it's possible for the returned numbers to be slightly +/// inaccurate. For this reason, the sync progress should be seen as an approximation of progress. +/// The information returned is good enough to build progress bars, but not exact enough to track +/// individual download counts. +/// +/// Also note that data is downloaded in bulk, which means that individual counters are unlikely +/// to be updated one-by-one. +/// +public class SyncProgress : ProgressWithOperations { public static readonly int FULL_SYNC_PRIORITY = 2147483647; + + private InternalProgressInformation internalProgress; + + public SyncProgress(InternalProgressInformation progress) + { + this.internalProgress = progress; + var untilCompletion = UntilPriority(FULL_SYNC_PRIORITY); + + TotalOperations = untilCompletion.TotalOperations; + DownloadedOperations = untilCompletion.DownloadedOperations; + DownloadedFraction = untilCompletion.DownloadedFraction; + } + + private ProgressWithOperations UntilPriority(int priority) + { + var total = 0; + var downloaded = 0; + + foreach (var progress in internalProgress.Buckets.Values) + { + // Include higher-priority buckets, which are represented by lower numbers. + if (progress.Priority <= priority) + { + downloaded += progress.SinceLast; + total += progress.TargetCount - progress.AtLast; + } + } + + return new ProgressWithOperations + { + TotalOperations = total, + DownloadedOperations = downloaded, + DownloadedFraction = total == 0 ? 1.0 : (double)downloaded / total + }; + } +} + +/// +/// Represents progress information for sync operations. +/// +public class InternalProgressInformation +{ + /// + /// Dictionary mapping bucket names to their progress information. + /// + public Dictionary Buckets { get; set; } = new(); +} + +/// +/// Represents progress information for a single bucket. +/// +public class BucketProgressInfo +{ + /// + /// Priority of the associated buckets + /// + public int Priority { get; set; } + + /// + /// Total ops at last completed sync, or 0 + /// + public int AtLast { get; set; } + + /// + /// Total ops since the last completed sync + /// + public int SinceLast { get; set; } + + /// + /// Total opcount for next checkpoint as indicated by service + /// + public int TargetCount { get; set; } +} + +/// +/// Information about a progressing download made by the PowerSync SDK. +/// +/// +public class ProgressWithOperations +{ + /// + /// The total number of operations to download for the current sync iteration to complete. + /// + public int TotalOperations { get; set; } + + /// + /// The numnber of operations that have already been downloaded. + /// + public int DownloadedOperations { get; set; } + + /// + /// This will be a number between 0.0 and 1.0 (inclusive). + /// + /// When this number reaches 1.0, all changes have been received from the sync service. + /// + public double DownloadedFraction { get; set; } } \ No newline at end of file From a2dae0a81546a3451ce277ed758a343a0d0d1c7d Mon Sep 17 00:00:00 2001 From: Christiaan Landman Date: Mon, 26 May 2025 14:48:36 +0200 Subject: [PATCH 09/12] Capturing sync progress received by core extension. --- .../Stream/StreamingSyncImplementation.cs | 4 +- .../PowerSync.Common/DB/Crud/SyncProgress.cs | 52 +++---------------- .../PowerSync.Common/DB/Crud/SyncStatus.cs | 28 +++++++++- 3 files changed, 36 insertions(+), 48 deletions(-) diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs index b2a0415..1639ca1 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs @@ -469,8 +469,7 @@ async Task HandleInstruction(Instruction instruction) DataFlow = new SyncDataFlowStatus { Downloading = info.Downloading != null, - // TODO CL - // DownloadProgress = info.Downloading?.Buckets + DownloadProgress = info.Downloading?.Buckets } }, new UpdateSyncStatusOptions @@ -645,6 +644,7 @@ protected void UpdateSyncStatus(SyncStatusOptions options, UpdateSyncStatusOptio UploadError = updateOptions?.ClearUploadError == true ? null : options.DataFlow?.UploadError ?? SyncStatus.DataFlowStatus.UploadError, + DownloadProgress = options.DataFlow?.DownloadProgress ?? SyncStatus.DataFlowStatus.DownloadProgress, }, PriorityStatusEntries = options.PriorityStatusEntries ?? SyncStatus.PriorityStatusEntries }); diff --git a/PowerSync/PowerSync.Common/DB/Crud/SyncProgress.cs b/PowerSync/PowerSync.Common/DB/Crud/SyncProgress.cs index b83c96b..7931b8a 100644 --- a/PowerSync/PowerSync.Common/DB/Crud/SyncProgress.cs +++ b/PowerSync/PowerSync.Common/DB/Crud/SyncProgress.cs @@ -1,3 +1,5 @@ +using PowerSync.Common.Client.Sync.Stream; + namespace PowerSync.Common.DB.Crud; /// @@ -18,12 +20,11 @@ namespace PowerSync.Common.DB.Crud; public class SyncProgress : ProgressWithOperations { public static readonly int FULL_SYNC_PRIORITY = 2147483647; - - private InternalProgressInformation internalProgress; - - public SyncProgress(InternalProgressInformation progress) + protected Dictionary InternalProgress { get; } + + public SyncProgress(Dictionary progress) { - this.internalProgress = progress; + this.InternalProgress = progress; var untilCompletion = UntilPriority(FULL_SYNC_PRIORITY); TotalOperations = untilCompletion.TotalOperations; @@ -31,12 +32,12 @@ public SyncProgress(InternalProgressInformation progress) DownloadedFraction = untilCompletion.DownloadedFraction; } - private ProgressWithOperations UntilPriority(int priority) + public ProgressWithOperations UntilPriority(int priority) { var total = 0; var downloaded = 0; - foreach (var progress in internalProgress.Buckets.Values) + foreach (var progress in InternalProgress.Values) { // Include higher-priority buckets, which are represented by lower numbers. if (progress.Priority <= priority) @@ -55,43 +56,6 @@ private ProgressWithOperations UntilPriority(int priority) } } -/// -/// Represents progress information for sync operations. -/// -public class InternalProgressInformation -{ - /// - /// Dictionary mapping bucket names to their progress information. - /// - public Dictionary Buckets { get; set; } = new(); -} - -/// -/// Represents progress information for a single bucket. -/// -public class BucketProgressInfo -{ - /// - /// Priority of the associated buckets - /// - public int Priority { get; set; } - - /// - /// Total ops at last completed sync, or 0 - /// - public int AtLast { get; set; } - - /// - /// Total ops since the last completed sync - /// - public int SinceLast { get; set; } - - /// - /// Total opcount for next checkpoint as indicated by service - /// - public int TargetCount { get; set; } -} - /// /// Information about a progressing download made by the PowerSync SDK. /// diff --git a/PowerSync/PowerSync.Common/DB/Crud/SyncStatus.cs b/PowerSync/PowerSync.Common/DB/Crud/SyncStatus.cs index fcdd838..7416816 100644 --- a/PowerSync/PowerSync.Common/DB/Crud/SyncStatus.cs +++ b/PowerSync/PowerSync.Common/DB/Crud/SyncStatus.cs @@ -1,5 +1,6 @@ namespace PowerSync.Common.DB.Crud; +using PowerSync.Common.Client.Sync.Stream; using Newtonsoft.Json; public class SyncDataFlowStatus @@ -21,12 +22,18 @@ public class SyncDataFlowStatus /// [JsonProperty("uploadError")] public Exception? UploadError { get; set; } = null; + + + /// + /// Internal information about how far we are downloading operations in buckets. + /// + public Dictionary? DownloadProgress { get; set; } = null; } public class SyncPriorityStatus { [JsonProperty("uploading")] public int Priority { get; set; } - + [JsonProperty("lastSyncedAt")] public DateTime? LastSyncedAt { get; set; } [JsonProperty("hasSynced")] public bool? HasSynced { get; set; } @@ -34,7 +41,9 @@ public class SyncPriorityStatus public class SyncStatusOptions { - public SyncStatusOptions() {} + public SyncStatusOptions() + { + } public SyncStatusOptions(SyncStatusOptions options) { @@ -93,6 +102,21 @@ public class SyncStatus(SyncStatusOptions options) .OrderBy(entry => entry.Priority) .ToArray(); + /// + /// A realtime progress report on how many operations have been downloaded and + /// how many are necessary in total to complete the next sync iteration. + /// + public SyncProgress? DownloadProgress() + { + var internalProgress = Options.DataFlow?.DownloadProgress; + if (internalProgress == null) + { + return null; + } + + return new SyncProgress(internalProgress); + } + /// /// Reports the sync status (a pair of HasSynced and LastSyncedAt fields) /// for a specific bucket priority level. From 13622cb8b8f030e5c550af43f1a23017042ed958 Mon Sep 17 00:00:00 2001 From: Christiaan Landman Date: Mon, 26 May 2025 20:46:35 +0200 Subject: [PATCH 10/12] Working through sync implementation issue. --- .../Client/PowerSyncDatabase.cs | 6 +- .../Client/Sync/Bucket/SqliteBucketStorage.cs | 10 +- .../Client/Sync/Stream/CoreInstructions.cs | 7 +- .../Stream/StreamingSyncImplementation.cs | 99 ++++++++++--------- PowerSync/PowerSync.Common/DB/IDBAdapter.cs | 6 +- .../MDSQLite/MDSQLiteAdapter.cs | 12 +-- .../MDSQLite/MDSQLiteConnection.cs | 2 +- 7 files changed, 74 insertions(+), 68 deletions(-) diff --git a/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs b/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs index 8348398..c7dfdcb 100644 --- a/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs +++ b/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs @@ -65,11 +65,11 @@ public interface IPowerSyncDatabase : IEventStream Task Execute(string query, object[]? parameters = null); - Task GetAll(string sql, params object[]? parameters); + Task GetAll(string sql, object[]? parameters = null); - Task GetOptional(string sql, params object[]? parameters); + Task GetOptional(string sql, object[]? parameters = null); - Task Get(string sql, params object[]? parameters); + Task Get(string sql, object[]? parameters = null); Task ReadLock(Func> fn, DBLockOptions? options = null); diff --git a/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs b/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs index 3c25cdc..405f620 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs @@ -271,14 +271,14 @@ public async Task HasCrud() record ControlResult(string? r); - public async Task Control(string op, object? payload) + public async Task Control(string op, object? payload = null) { + Console.WriteLine("Calling control on extension "+ op + " - "); return await db.WriteTransaction(async tx => { - var result = await tx.Get("SELECT powersync_control(?, ?) AS r", [op, payload]); - - - return result.r; + var result = await tx.Get("SELECT powersync_control(?, ?) AS r", [op, payload ?? ""]); + Console.WriteLine("completed op: " + op + " - " + JsonConvert.SerializeObject(result)); + return result.r!; }); } } \ No newline at end of file diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/CoreInstructions.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/CoreInstructions.cs index 7842ff1..1dcd2c9 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Stream/CoreInstructions.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/CoreInstructions.cs @@ -18,7 +18,12 @@ public static Instruction[] ParseInstructions(string rawResponse) foreach (JObject item in jsonArray) { - instructions.Add(ParseInstruction(item)); + var instruction = ParseInstruction(item); + if (instruction == null) + { + throw new JsonSerializationException("Failed to parse instruction from JSON."); + } + instructions.Add(instruction); } return instructions.ToArray(); diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs index 1639ca1..50dbd66 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs @@ -137,7 +137,7 @@ public StreamingSyncImplementation(StreamingSyncImplementationOptions options) TriggerCrudUpload = () => { - if (!SyncStatus.Connected || SyncStatus.DataFlowStatus.Uploading) + if (!SyncStatus.Connected) { return; } @@ -280,8 +280,9 @@ protected async Task StreamingSync(CancellationToken? signal, PowerSyncConnectio { break; } - + Console.WriteLine("XXXX starting"); await StreamingSyncIteration(nestedCts.Token, options); + Console.WriteLine("XXXX ending"); // Continue immediately } catch (Exception ex) @@ -360,27 +361,10 @@ private async Task RustSyncIteration(CancellationToken? signal, RequiredPowerSyn var nestedCts = new CancellationTokenSource(); signal?.Register(() => { nestedCts.Cancel(); }); - - try - { - notifyCompletedUploads = () => { Task.Run(async () => await Control("completed_upload")); }; - - await Control("start", JsonConvert.SerializeObject(resolvedOptions.Params)); - if (receivingLines != null) - { - await receivingLines; - } - } - finally - { - notifyCompletedUploads = null; - await Stop(); - } - - return; - async Task Connect(EstablishSyncStream instruction) { + Console.WriteLine("----- We got het here again" + nestedCts.Token.IsCancellationRequested); + Console.WriteLine("-----" + JsonConvert.SerializeObject(instruction.Request)); var syncOptions = new SyncStreamOptions { Path = "/sync/stream", @@ -390,14 +374,19 @@ async Task Connect(EstablishSyncStream instruction) var stream = await Options.Remote.PostStreamRaw(syncOptions); using var reader = new StreamReader(stream, Encoding.UTF8); + + syncOptions.CancellationToken.Register(() => { + try { stream?.Close(); } catch { } + }); + string? line; while ((line = await reader.ReadLineAsync()) != null) { - logger.LogDebug("Parsing line for rust sync stream {message}", line); + logger.LogDebug("Parsing line for rust sync stream {message}", "xx"); await Control("line_text", line); - } + Console.WriteLine("Done"); } async Task Stop() @@ -410,31 +399,18 @@ async Task Control(string op, object? payload = null) logger.LogDebug("Control call {message}", op); var rawResponse = await Options.Adapter.Control(op, payload); - await HandleInstructions(Instruction.ParseInstructions(rawResponse)); + HandleInstructions(Instruction.ParseInstructions(rawResponse)); } - async Task HandleInstructions(Instruction[] instructions) + void HandleInstructions(Instruction[] instructions) { foreach (var instruction in instructions) { - await HandleInstruction(instruction); + HandleInstruction(instruction); } } - DB.Crud.SyncPriorityStatus CoreStatusToSyncStatus(SyncPriorityStatus status) - { - logger.LogWarning("Sync status {status}", - status?.LastSyncedAt != null ? new DateTime(status!.LastSyncedAt) : null); - return new DB.Crud.SyncPriorityStatus - { - Priority = status.Priority, - HasSynced = status.HasSynced ?? null, - // TODO check this value - LastSyncedAt = status?.LastSyncedAt != null ? new DateTime(status!.LastSyncedAt) : null - }; - } - - async Task HandleInstruction(Instruction instruction) + void HandleInstruction(Instruction instruction) { switch (instruction) { @@ -451,7 +427,6 @@ async Task HandleInstruction(Instruction instruction) logger.LogWarning("{message}", logLine.Line); break; } - break; case UpdateSyncStatus syncStatus: var info = syncStatus.Status; @@ -477,25 +452,20 @@ async Task HandleInstruction(Instruction instruction) ClearDownloadError = true, } ); - break; case EstablishSyncStream establishSyncStream: if (receivingLines != null) { - // Already connected, this shouldn't happen during a single iteration. throw new Exception("Unexpected request to establish sync stream, already connected"); } - receivingLines = Connect(establishSyncStream); break; - case FetchCredentials { DidExpire: true, }: - Options.Remote.InvalidateCredentials(); - break; - case FetchCredentials: + case FetchCredentials fetchCredentials: Options.Remote.InvalidateCredentials(); break; case CloseSyncStream: - CancellationTokenSource?.Cancel(); + nestedCts.Cancel(); + logger.LogWarning("Closing stream"); break; case FlushFileSystem: // ignore @@ -507,6 +477,27 @@ async Task HandleInstruction(Instruction instruction) break; } } + + try + { + notifyCompletedUploads = () => { Task.Run(async () => await Control("completed_upload")); }; + logger.LogError("START"); + await Control("start", JsonConvert.SerializeObject(resolvedOptions.Params)); + if (receivingLines != null) + { + await receivingLines; + logger.LogError("Done waiting"); + } + else + { + Console.WriteLine("No receiving lines task was started, this should not happen."); + } + } + finally + { + notifyCompletedUploads = null; + await Stop(); + } } public new void Close() @@ -668,6 +659,16 @@ private async Task DelayRetry() await Task.Delay(Options.RetryDelayMs.Value); } } + + private static DB.Crud.SyncPriorityStatus CoreStatusToSyncStatus(SyncPriorityStatus status) + { + return new DB.Crud.SyncPriorityStatus + { + Priority = status.Priority, + HasSynced = status.HasSynced ?? null, + LastSyncedAt = status?.LastSyncedAt != null ? new DateTime(status!.LastSyncedAt) : null + }; + } } enum LockType diff --git a/PowerSync/PowerSync.Common/DB/IDBAdapter.cs b/PowerSync/PowerSync.Common/DB/IDBAdapter.cs index b0c906b..a31ee49 100644 --- a/PowerSync/PowerSync.Common/DB/IDBAdapter.cs +++ b/PowerSync/PowerSync.Common/DB/IDBAdapter.cs @@ -30,13 +30,13 @@ public class QueryRows public interface IDBGetUtils { // Execute a read-only query and return results. - Task GetAll(string sql, params object[]? parameters); + Task GetAll(string sql, object[]? parameters = null); // Execute a read-only query and return the first result, or null if the ResultSet is empty. - Task GetOptional(string sql, params object[]? parameters); + Task GetOptional(string sql, object[]? parameters = null); // Execute a read-only query and return the first result, error if the ResultSet is empty. - Task Get(string sql, params object[]? parameters); + Task Get(string sql, object[]? parameters = null); } public interface ILockContext : IDBGetUtils diff --git a/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteAdapter.cs b/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteAdapter.cs index b29b788..a396540 100644 --- a/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteAdapter.cs +++ b/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteAdapter.cs @@ -155,18 +155,18 @@ public Task ExecuteBatch(string query, object[][]? parameters = nul throw new NotImplementedException(); } - public async Task Get(string sql, params object[]? parameters) + public async Task Get(string sql, object[]? parameters = null) { return await ReadLock((ctx) => ctx.Get(sql, parameters)); ; } - public async Task GetAll(string sql, params object[]? parameters) + public async Task GetAll(string sql, object[]? parameters = null) { return await ReadLock((ctx) => ctx.GetAll(sql, parameters)); } - public async Task GetOptional(string sql, params object[]? parameters) + public async Task GetOptional(string sql, object[]? parameters = null) { return await ReadLock((ctx) => ctx.GetOptional(sql, parameters)); } @@ -307,17 +307,17 @@ public Task Execute(string query, object[]? parameters = null) return connection.Execute(query, parameters); } - public Task Get(string sql, params object[]? parameters) + public Task Get(string sql, object[]? parameters = null) { return connection.Get(sql, parameters); } - public Task GetAll(string sql, params object[]? parameters) + public Task GetAll(string sql, object[]? parameters = null) { return connection.GetAll(sql, parameters); } - public Task GetOptional(string sql, params object[]? parameters) + public Task GetOptional(string sql, object[]? parameters = null) { return connection.GetOptional(sql, parameters); } diff --git a/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteConnection.cs b/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteConnection.cs index 16c5c30..6c73c8b 100644 --- a/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteConnection.cs +++ b/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteConnection.cs @@ -142,7 +142,7 @@ public async Task ExecuteQuery(string query, object[]? parameters = var row = new Dictionary(); for (int i = 0; i < reader.FieldCount; i++) { - row[reader.GetName(i)] = reader.IsDBNull(i) ? null : reader.GetValue(i); + row[reader.GetName(i)] = reader.IsDBNull(i) ? null! : reader.GetValue(i); } rows.Add(row); } From 8de793d546861d47335879df33a7cf2107eb169a Mon Sep 17 00:00:00 2001 From: Christiaan Landman Date: Mon, 26 May 2025 20:46:35 +0200 Subject: [PATCH 11/12] Working through sync implementation issue. --- .../Client/PowerSyncDatabase.cs | 6 +- .../Client/Sync/Bucket/SqliteBucketStorage.cs | 10 +- .../Client/Sync/Stream/CoreInstructions.cs | 7 +- .../Client/Sync/Stream/Remote.cs | 4 +- .../Stream/StreamingSyncImplementation.cs | 103 +++++++++--------- PowerSync/PowerSync.Common/DB/IDBAdapter.cs | 6 +- .../MDSQLite/MDSQLiteAdapter.cs | 12 +- .../MDSQLite/MDSQLiteConnection.cs | 2 +- 8 files changed, 77 insertions(+), 73 deletions(-) diff --git a/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs b/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs index 8348398..c7dfdcb 100644 --- a/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs +++ b/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs @@ -65,11 +65,11 @@ public interface IPowerSyncDatabase : IEventStream Task Execute(string query, object[]? parameters = null); - Task GetAll(string sql, params object[]? parameters); + Task GetAll(string sql, object[]? parameters = null); - Task GetOptional(string sql, params object[]? parameters); + Task GetOptional(string sql, object[]? parameters = null); - Task Get(string sql, params object[]? parameters); + Task Get(string sql, object[]? parameters = null); Task ReadLock(Func> fn, DBLockOptions? options = null); diff --git a/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs b/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs index 3c25cdc..405f620 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs @@ -271,14 +271,14 @@ public async Task HasCrud() record ControlResult(string? r); - public async Task Control(string op, object? payload) + public async Task Control(string op, object? payload = null) { + Console.WriteLine("Calling control on extension "+ op + " - "); return await db.WriteTransaction(async tx => { - var result = await tx.Get("SELECT powersync_control(?, ?) AS r", [op, payload]); - - - return result.r; + var result = await tx.Get("SELECT powersync_control(?, ?) AS r", [op, payload ?? ""]); + Console.WriteLine("completed op: " + op + " - " + JsonConvert.SerializeObject(result)); + return result.r!; }); } } \ No newline at end of file diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/CoreInstructions.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/CoreInstructions.cs index 7842ff1..1dcd2c9 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Stream/CoreInstructions.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/CoreInstructions.cs @@ -18,7 +18,12 @@ public static Instruction[] ParseInstructions(string rawResponse) foreach (JObject item in jsonArray) { - instructions.Add(ParseInstruction(item)); + var instruction = ParseInstruction(item); + if (instruction == null) + { + throw new JsonSerializationException("Failed to parse instruction from JSON."); + } + instructions.Add(instruction); } return instructions.ToArray(); diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/Remote.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/Remote.cs index 589bc36..2df27db 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Stream/Remote.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/Remote.cs @@ -1,17 +1,15 @@ namespace PowerSync.Common.Client.Sync.Stream; +using Connection; using System.IO; using System.Net.Http; using System.Reflection; using System.Text; using System.Threading; using System.Threading.Tasks; -using System.Text.RegularExpressions; using Newtonsoft.Json; -using Newtonsoft.Json.Linq; -using PowerSync.Common.Client.Connection; public class SyncStreamOptions { diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs index 1639ca1..69a6566 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs @@ -137,7 +137,7 @@ public StreamingSyncImplementation(StreamingSyncImplementationOptions options) TriggerCrudUpload = () => { - if (!SyncStatus.Connected || SyncStatus.DataFlowStatus.Uploading) + if (!SyncStatus.Connected) { return; } @@ -280,8 +280,9 @@ protected async Task StreamingSync(CancellationToken? signal, PowerSyncConnectio { break; } - + Console.WriteLine("XXXX starting"); await StreamingSyncIteration(nestedCts.Token, options); + Console.WriteLine("XXXX ending"); // Continue immediately } catch (Exception ex) @@ -346,41 +347,24 @@ await locks.ObtainLock(new LockOptions Params = options?.Params ?? DEFAULT_STREAM_CONNECTION_OPTIONS.Params }; - await RustSyncIteration(signal, resolvedOptions); + await SyncIteration(signal, resolvedOptions); return true; } }); } - private async Task RustSyncIteration(CancellationToken? signal, RequiredPowerSyncConnectionOptions resolvedOptions) + private async Task SyncIteration(CancellationToken? signal, RequiredPowerSyncConnectionOptions resolvedOptions) { Task? receivingLines = null; var nestedCts = new CancellationTokenSource(); signal?.Register(() => { nestedCts.Cancel(); }); - - try - { - notifyCompletedUploads = () => { Task.Run(async () => await Control("completed_upload")); }; - - await Control("start", JsonConvert.SerializeObject(resolvedOptions.Params)); - if (receivingLines != null) - { - await receivingLines; - } - } - finally - { - notifyCompletedUploads = null; - await Stop(); - } - - return; - async Task Connect(EstablishSyncStream instruction) { + Console.WriteLine("----- We got het here again" + nestedCts.Token.IsCancellationRequested); + Console.WriteLine("-----" + JsonConvert.SerializeObject(instruction.Request)); var syncOptions = new SyncStreamOptions { Path = "/sync/stream", @@ -390,14 +374,19 @@ async Task Connect(EstablishSyncStream instruction) var stream = await Options.Remote.PostStreamRaw(syncOptions); using var reader = new StreamReader(stream, Encoding.UTF8); + + syncOptions.CancellationToken.Register(() => { + try { stream?.Close(); } catch { } + }); + string? line; while ((line = await reader.ReadLineAsync()) != null) { - logger.LogDebug("Parsing line for rust sync stream {message}", line); + logger.LogDebug("Parsing line for rust sync stream {message}", "xx"); await Control("line_text", line); - } + Console.WriteLine("Done"); } async Task Stop() @@ -410,31 +399,18 @@ async Task Control(string op, object? payload = null) logger.LogDebug("Control call {message}", op); var rawResponse = await Options.Adapter.Control(op, payload); - await HandleInstructions(Instruction.ParseInstructions(rawResponse)); + HandleInstructions(Instruction.ParseInstructions(rawResponse)); } - async Task HandleInstructions(Instruction[] instructions) + void HandleInstructions(Instruction[] instructions) { foreach (var instruction in instructions) { - await HandleInstruction(instruction); + HandleInstruction(instruction); } } - DB.Crud.SyncPriorityStatus CoreStatusToSyncStatus(SyncPriorityStatus status) - { - logger.LogWarning("Sync status {status}", - status?.LastSyncedAt != null ? new DateTime(status!.LastSyncedAt) : null); - return new DB.Crud.SyncPriorityStatus - { - Priority = status.Priority, - HasSynced = status.HasSynced ?? null, - // TODO check this value - LastSyncedAt = status?.LastSyncedAt != null ? new DateTime(status!.LastSyncedAt) : null - }; - } - - async Task HandleInstruction(Instruction instruction) + void HandleInstruction(Instruction instruction) { switch (instruction) { @@ -451,7 +427,6 @@ async Task HandleInstruction(Instruction instruction) logger.LogWarning("{message}", logLine.Line); break; } - break; case UpdateSyncStatus syncStatus: var info = syncStatus.Status; @@ -477,25 +452,20 @@ async Task HandleInstruction(Instruction instruction) ClearDownloadError = true, } ); - break; case EstablishSyncStream establishSyncStream: if (receivingLines != null) { - // Already connected, this shouldn't happen during a single iteration. throw new Exception("Unexpected request to establish sync stream, already connected"); } - receivingLines = Connect(establishSyncStream); break; - case FetchCredentials { DidExpire: true, }: - Options.Remote.InvalidateCredentials(); - break; - case FetchCredentials: + case FetchCredentials fetchCredentials: Options.Remote.InvalidateCredentials(); break; case CloseSyncStream: - CancellationTokenSource?.Cancel(); + nestedCts.Cancel(); + logger.LogWarning("Closing stream"); break; case FlushFileSystem: // ignore @@ -507,6 +477,27 @@ async Task HandleInstruction(Instruction instruction) break; } } + + try + { + notifyCompletedUploads = () => { Task.Run(async () => await Control("completed_upload")); }; + logger.LogError("START"); + await Control("start", JsonConvert.SerializeObject(resolvedOptions.Params)); + if (receivingLines != null) + { + await receivingLines; + logger.LogError("Done waiting"); + } + else + { + Console.WriteLine("No receiving lines task was started, this should not happen."); + } + } + finally + { + notifyCompletedUploads = null; + await Stop(); + } } public new void Close() @@ -668,6 +659,16 @@ private async Task DelayRetry() await Task.Delay(Options.RetryDelayMs.Value); } } + + private static DB.Crud.SyncPriorityStatus CoreStatusToSyncStatus(SyncPriorityStatus status) + { + return new DB.Crud.SyncPriorityStatus + { + Priority = status.Priority, + HasSynced = status.HasSynced ?? null, + LastSyncedAt = status?.LastSyncedAt != null ? new DateTime(status!.LastSyncedAt) : null + }; + } } enum LockType diff --git a/PowerSync/PowerSync.Common/DB/IDBAdapter.cs b/PowerSync/PowerSync.Common/DB/IDBAdapter.cs index b0c906b..a31ee49 100644 --- a/PowerSync/PowerSync.Common/DB/IDBAdapter.cs +++ b/PowerSync/PowerSync.Common/DB/IDBAdapter.cs @@ -30,13 +30,13 @@ public class QueryRows public interface IDBGetUtils { // Execute a read-only query and return results. - Task GetAll(string sql, params object[]? parameters); + Task GetAll(string sql, object[]? parameters = null); // Execute a read-only query and return the first result, or null if the ResultSet is empty. - Task GetOptional(string sql, params object[]? parameters); + Task GetOptional(string sql, object[]? parameters = null); // Execute a read-only query and return the first result, error if the ResultSet is empty. - Task Get(string sql, params object[]? parameters); + Task Get(string sql, object[]? parameters = null); } public interface ILockContext : IDBGetUtils diff --git a/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteAdapter.cs b/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteAdapter.cs index b29b788..a396540 100644 --- a/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteAdapter.cs +++ b/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteAdapter.cs @@ -155,18 +155,18 @@ public Task ExecuteBatch(string query, object[][]? parameters = nul throw new NotImplementedException(); } - public async Task Get(string sql, params object[]? parameters) + public async Task Get(string sql, object[]? parameters = null) { return await ReadLock((ctx) => ctx.Get(sql, parameters)); ; } - public async Task GetAll(string sql, params object[]? parameters) + public async Task GetAll(string sql, object[]? parameters = null) { return await ReadLock((ctx) => ctx.GetAll(sql, parameters)); } - public async Task GetOptional(string sql, params object[]? parameters) + public async Task GetOptional(string sql, object[]? parameters = null) { return await ReadLock((ctx) => ctx.GetOptional(sql, parameters)); } @@ -307,17 +307,17 @@ public Task Execute(string query, object[]? parameters = null) return connection.Execute(query, parameters); } - public Task Get(string sql, params object[]? parameters) + public Task Get(string sql, object[]? parameters = null) { return connection.Get(sql, parameters); } - public Task GetAll(string sql, params object[]? parameters) + public Task GetAll(string sql, object[]? parameters = null) { return connection.GetAll(sql, parameters); } - public Task GetOptional(string sql, params object[]? parameters) + public Task GetOptional(string sql, object[]? parameters = null) { return connection.GetOptional(sql, parameters); } diff --git a/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteConnection.cs b/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteConnection.cs index 16c5c30..6c73c8b 100644 --- a/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteConnection.cs +++ b/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteConnection.cs @@ -142,7 +142,7 @@ public async Task ExecuteQuery(string query, object[]? parameters = var row = new Dictionary(); for (int i = 0; i < reader.FieldCount; i++) { - row[reader.GetName(i)] = reader.IsDBNull(i) ? null : reader.GetValue(i); + row[reader.GetName(i)] = reader.IsDBNull(i) ? null! : reader.GetValue(i); } rows.Add(row); } From 1468ce240c2258ddc11118da2818be24194e736e Mon Sep 17 00:00:00 2001 From: Christiaan Landman Date: Tue, 27 May 2025 13:27:59 +0200 Subject: [PATCH 12/12] Added new schema options (tracking metadata/previous data). --- .../Client/Sync/Bucket/SqliteBucketStorage.cs | 2 - .../Stream/StreamingSyncImplementation.cs | 11 --- .../PowerSync.Common/DB/Crud/CrudEntry.cs | 88 +++++++++-------- PowerSync/PowerSync.Common/DB/Schema/Table.cs | 96 ++++++++++++++++--- 4 files changed, 133 insertions(+), 64 deletions(-) diff --git a/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs b/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs index 405f620..1f78f2c 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs @@ -273,11 +273,9 @@ record ControlResult(string? r); public async Task Control(string op, object? payload = null) { - Console.WriteLine("Calling control on extension "+ op + " - "); return await db.WriteTransaction(async tx => { var result = await tx.Get("SELECT powersync_control(?, ?) AS r", [op, payload ?? ""]); - Console.WriteLine("completed op: " + op + " - " + JsonConvert.SerializeObject(result)); return result.r!; }); } diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs index 69a6566..3581b50 100644 --- a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs +++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs @@ -280,9 +280,7 @@ protected async Task StreamingSync(CancellationToken? signal, PowerSyncConnectio { break; } - Console.WriteLine("XXXX starting"); await StreamingSyncIteration(nestedCts.Token, options); - Console.WriteLine("XXXX ending"); // Continue immediately } catch (Exception ex) @@ -363,8 +361,6 @@ private async Task SyncIteration(CancellationToken? signal, RequiredPowerSyncCon async Task Connect(EstablishSyncStream instruction) { - Console.WriteLine("----- We got het here again" + nestedCts.Token.IsCancellationRequested); - Console.WriteLine("-----" + JsonConvert.SerializeObject(instruction.Request)); var syncOptions = new SyncStreamOptions { Path = "/sync/stream", @@ -386,7 +382,6 @@ async Task Connect(EstablishSyncStream instruction) logger.LogDebug("Parsing line for rust sync stream {message}", "xx"); await Control("line_text", line); } - Console.WriteLine("Done"); } async Task Stop() @@ -481,16 +476,10 @@ void HandleInstruction(Instruction instruction) try { notifyCompletedUploads = () => { Task.Run(async () => await Control("completed_upload")); }; - logger.LogError("START"); await Control("start", JsonConvert.SerializeObject(resolvedOptions.Params)); if (receivingLines != null) { await receivingLines; - logger.LogError("Done waiting"); - } - else - { - Console.WriteLine("No receiving lines task was started, this should not happen."); } } finally diff --git a/PowerSync/PowerSync.Common/DB/Crud/CrudEntry.cs b/PowerSync/PowerSync.Common/DB/Crud/CrudEntry.cs index 5fe8f02..8a3539d 100644 --- a/PowerSync/PowerSync.Common/DB/Crud/CrudEntry.cs +++ b/PowerSync/PowerSync.Common/DB/Crud/CrudEntry.cs @@ -5,65 +5,62 @@ namespace PowerSync.Common.DB.Crud; public enum UpdateType { - [JsonProperty("PUT")] - PUT, + [JsonProperty("PUT")] PUT, - [JsonProperty("PATCH")] - PATCH, + [JsonProperty("PATCH")] PATCH, - [JsonProperty("DELETE")] - DELETE + [JsonProperty("DELETE")] DELETE } public class CrudEntryJSON { - [JsonProperty("id")] - public string Id { get; set; } = null!; + [JsonProperty("id")] public string Id { get; set; } = null!; - [JsonProperty("data")] - public string Data { get; set; } = null!; - - [JsonProperty("tx_id")] - public long? TransactionId { get; set; } + [JsonProperty("data")] public string Data { get; set; } = null!; + + [JsonProperty("tx_id")] public long? TransactionId { get; set; } } public class CrudEntryDataJSON { - [JsonProperty("data")] - public Dictionary? Data { get; set; } - - [JsonProperty("op")] - public UpdateType Op { get; set; } - - [JsonProperty("type")] - public string Type { get; set; } = null!; - - [JsonProperty("id")] - public string Id { get; set; } = null!; + [JsonProperty("data")] public Dictionary? Data { get; set; } + + [JsonProperty("old")] public Dictionary? Old { get; set; } + + [JsonProperty("op")] public UpdateType Op { get; set; } + + [JsonProperty("type")] public string Type { get; set; } = null!; + + [JsonProperty("id")] public string Id { get; set; } = null!; + + [JsonProperty("metadata")] public string? Metadata { get; set; } } public class CrudEntryOutputJSON { - [JsonProperty("op_id")] - public int OpId { get; set; } + [JsonProperty("op_id")] public int OpId { get; set; } - [JsonProperty("op")] - public UpdateType Op { get; set; } + [JsonProperty("op")] public UpdateType Op { get; set; } - [JsonProperty("type")] - public string Type { get; set; } = null!; + [JsonProperty("type")] public string Type { get; set; } = null!; - [JsonProperty("id")] - public string Id { get; set; } = null!; + [JsonProperty("id")] public string Id { get; set; } = null!; - [JsonProperty("tx_id")] - public long? TransactionId { get; set; } + [JsonProperty("tx_id")] public long? TransactionId { get; set; } - [JsonProperty("data")] - public Dictionary? Data { get; set; } + [JsonProperty("data")] public Dictionary? Data { get; set; } } -public class CrudEntry(int clientId, UpdateType op, string table, string id, long? transactionId = null, Dictionary? opData = null) +public class CrudEntry( + int clientId, + UpdateType op, + string table, + string id, + long? transactionId = null, + Dictionary? opData = null, + Dictionary? previousValues = null, + string? metadata = null +) { public int ClientId { get; private set; } = clientId; public string Id { get; private set; } = id; @@ -72,6 +69,19 @@ public class CrudEntry(int clientId, UpdateType op, string table, string id, lon public string Table { get; private set; } = table; public long? TransactionId { get; private set; } = transactionId; + /// + /// Previous values before this change. + /// + public Dictionary? PreviousValues { get; private set; } = previousValues; + + /// + /// Client-side metadata attached with this write. + /// + /// This field is only available when the `trackMetadata` option was set to `true` when creating a table + /// and the insert or update statement set the `_metadata` column. + /// + public string? Metadata { get; private set; } = metadata; + public static CrudEntry FromRow(CrudEntryJSON dbRow) { var data = JsonConvert.DeserializeObject(dbRow.Data) @@ -83,7 +93,9 @@ public static CrudEntry FromRow(CrudEntryJSON dbRow) data.Type, data.Id, dbRow.TransactionId, - data.Data + data.Data, + data.Old, + data.Metadata ); } diff --git a/PowerSync/PowerSync.Common/DB/Schema/Table.cs b/PowerSync/PowerSync.Common/DB/Schema/Table.cs index cd9d2b1..2476e79 100644 --- a/PowerSync/PowerSync.Common/DB/Schema/Table.cs +++ b/PowerSync/PowerSync.Common/DB/Schema/Table.cs @@ -7,15 +7,57 @@ public class TableOptions( Dictionary>? indexes = null, bool? localOnly = null, bool? insertOnly = null, - string? viewName = null) + string? viewName = null, + bool? trackMetadata = null, + TrackPreviousOptions? trackPreviousOptions = null, + bool? ignoreEmptyUpdates = null +) { public Dictionary> Indexes { get; set; } = indexes ?? []; - public bool LocalOnly { get; set; } = localOnly ?? false; + public bool LocalOnly { get; } = localOnly ?? false; - public bool InsertOnly { get; set; } = insertOnly ?? false; + public bool InsertOnly { get; } = insertOnly ?? false; - public string? ViewName { get; set; } = viewName; + public string? ViewName { get; } = viewName; + + /// + /// Whether to add a hidden `_metadata` column that will be enabled for updates to attach custom + /// information about writes that will be reported through [CrudEntry.metadata]. + /// + public bool TrackMetadata { get; } = trackMetadata ?? false; + + /// + /// When set to a non-null value, track old values of columns + /// + public TrackPreviousOptions? TrackPreviousOptions { get; } = trackPreviousOptions ?? null; + + /// + /// Whether an `UPDATE` statement that doesn't change any values should be ignored when creating + /// CRUD entries. + /// + public bool IgnoreEmptyUpdates { get; } = ignoreEmptyUpdates ?? false; +} + +/// +/// Whether to include previous column values when PowerSync tracks local changes. +/// Including old values may be helpful for some backend connector implementations, +/// which is why it can be enabled on a per-table or per-column basis. +/// +public class TrackPreviousOptions +{ + /// + /// When defined, a list of column names for which old values should be tracked. + /// + [JsonProperty("columns")] + public List? Columns { get; set; } + + /// + /// Whether to only include old values when they were changed by an update, instead of always + /// including all old values, + /// + [JsonProperty("onlyWhenChanged")] + public bool? OnlyWhenChanged { get; set; } } public class Table @@ -35,16 +77,21 @@ public Table(Dictionary columns, TableOptions? options = nul { ConvertedColumns = [.. columns.Select(kv => new Column(new ColumnOptions(kv.Key, kv.Value)))]; - ConvertedIndexes = [.. (Options?.Indexes ?? []) + ConvertedIndexes = + [ + .. (Options?.Indexes ?? []) .Select(kv => new Index(new IndexOptions( kv.Key, - [.. kv.Value.Select(name => - new IndexedColumn(new IndexColumnOptions( - name.Replace("-", ""), !name.StartsWith("-"))) - )] + [ + .. kv.Value.Select(name => + new IndexedColumn(new IndexColumnOptions( + name.Replace("-", ""), !name.StartsWith("-"))) + ) + ] )) - )]; + ) + ]; Options = options ?? new TableOptions(); @@ -61,7 +108,18 @@ public void Validate() if (Columns.Count > Column.MAX_AMOUNT_OF_COLUMNS) { - throw new Exception($"Table has too many columns. The maximum number of columns is {Column.MAX_AMOUNT_OF_COLUMNS}."); + throw new Exception( + $"Table has too many columns. The maximum number of columns is {Column.MAX_AMOUNT_OF_COLUMNS}."); + } + + if (Options.TrackMetadata && Options.LocalOnly) + { + throw new Exception("Can't include metadata for local-only tables."); + } + + if (Options.TrackPreviousOptions != null && Options.LocalOnly) + { + throw new Exception("Can't include old values for local-only tables."); } var columnNames = new HashSet { "id" }; @@ -103,15 +161,27 @@ public void Validate() public string ToJSON(string Name = "") { + var trackPrevious = Options.TrackPreviousOptions; + var jsonObject = new { view_name = Options.ViewName ?? Name, local_only = Options.LocalOnly, insert_only = Options.InsertOnly, columns = ConvertedColumns.Select(c => JsonConvert.DeserializeObject(c.ToJSON())).ToList(), - indexes = ConvertedIndexes.Select(e => JsonConvert.DeserializeObject(e.ToJSON(this))).ToList() + indexes = ConvertedIndexes.Select(e => JsonConvert.DeserializeObject(e.ToJSON(this))).ToList(), + + include_metadata = Options.TrackMetadata, + ignore_empty_update = Options.IgnoreEmptyUpdates, + include_old = (object)(trackPrevious switch + { + null => false, + { Columns: null } => true, + { Columns: var cols } => cols + }), + include_old_only_when_changed = trackPrevious?.OnlyWhenChanged ?? false }; return JsonConvert.SerializeObject(jsonObject); } -} +} \ No newline at end of file