diff --git a/LiteDB.Shell/Shell/Display.cs b/LiteDB.Shell/Shell/Display.cs index cf741a006..dde69700e 100644 --- a/LiteDB.Shell/Shell/Display.cs +++ b/LiteDB.Shell/Shell/Display.cs @@ -46,6 +46,8 @@ public void WriteError(Exception ex) public void WriteResult(IBsonDataReader result, Env env) { + if (result == null) throw new ArgumentNullException(nameof(result)); + var index = 0; var writer = new JsonWriter(Console.Out) { @@ -53,19 +55,26 @@ public void WriteResult(IBsonDataReader result, Env env) Indent = 2 }; - foreach (var item in result.ToEnumerable()) + try { - if (env.Running == false) return; + while (result.Read()) + { + if (env.Running == false) return; - this.Write(ConsoleColor.Cyan, string.Format("[{0}]: ", ++index)); + this.Write(ConsoleColor.Cyan, string.Format("[{0}]: ", ++index)); - if (this.Pretty) Console.WriteLine(); + if (this.Pretty) Console.WriteLine(); - Console.ForegroundColor = ConsoleColor.DarkCyan; + Console.ForegroundColor = ConsoleColor.DarkCyan; - writer.Serialize(item); + writer.Serialize(result.Current); - Console.WriteLine(); + Console.WriteLine(); + } + } + finally + { + result.Dispose(); } } diff --git a/LiteDB.Tests/Database/Storage_Tests.cs b/LiteDB.Tests/Database/Storage_Tests.cs index de861d20c..bce9a85e7 100644 --- a/LiteDB.Tests/Database/Storage_Tests.cs +++ b/LiteDB.Tests/Database/Storage_Tests.cs @@ -1,7 +1,8 @@ -using System; +using System; +using System.Collections.Generic; using System.IO; -using System.Linq; using System.Security.Cryptography; +using System.Threading.Tasks; using FluentAssertions; using LiteDB.Tests.Utils; using Xunit; @@ -29,49 +30,64 @@ public Storage_Tests() } [Fact] - public void Storage_Upload_Download() + public async Task Storage_Upload_Download() { - using (var db = DatabaseFactory.Create()) - //using (var db = new LiteDatabase(@"c:\temp\file.db")) + await using (var db = DatabaseFactory.Create()) { var fs = db.GetStorage("_files", "_chunks"); - var small = fs.Upload(10, "photo_small.png", new MemoryStream(_smallFile)); - var big = fs.Upload(100, "photo_big.png", new MemoryStream(_bigFile)); + var small = await fs.UploadAsync(10, "photo_small.png", new MemoryStream(_smallFile)); + var big = await fs.UploadAsync(100, "photo_big.png", new MemoryStream(_bigFile)); - _smallFile.Length.Should().Be((int) small.Length); - _bigFile.Length.Should().Be((int) big.Length); + _smallFile.Length.Should().Be((int)small.Length); + _bigFile.Length.Should().Be((int)big.Length); - var f0 = fs.Find(x => x.Filename == "photo_small.png").First(); - var f1 = fs.Find(x => x.Filename == "photo_big.png").First(); + var f0 = await FirstAsync(fs.FindAsync(x => x.Filename == "photo_small.png")); + var f1 = await FirstAsync(fs.FindAsync(x => x.Filename == "photo_big.png")); - this.HashFile(f0.OpenRead()).Should().Be(_smallHash); - this.HashFile(f1.OpenRead()).Should().Be(_bigHash); + await using (var reader0 = await f0.OpenReadAsync()) + { + var hash = await this.HashFileAsync(reader0); + hash.Should().Be(_smallHash); + } - // now replace small content with big-content - var repl = fs.Upload(10, "new_photo.jpg", new MemoryStream(_bigFile)); + await using (var reader1 = await f1.OpenReadAsync()) + { + var hash = await this.HashFileAsync(reader1); + hash.Should().Be(_bigHash); + } - fs.Exists(10).Should().BeTrue(); + var repl = await fs.UploadAsync(10, "new_photo.jpg", new MemoryStream(_bigFile)); - var nrepl = fs.FindById(10); + (await fs.ExistsAsync(10)).Should().BeTrue(); + + var nrepl = await fs.FindByIdAsync(10); nrepl.Chunks.Should().Be(repl.Chunks); - // update metadata - fs.SetMetadata(100, new BsonDocument {["x"] = 100, ["y"] = 99}); + await fs.SetMetadataAsync(100, new BsonDocument { ["x"] = 100, ["y"] = 99 }); - // find using metadata - var md = fs.Find(x => x.Metadata["x"] == 100).FirstOrDefault(); + var md = await FirstAsync(fs.FindAsync(x => x.Metadata["x"] == 100)); md.Metadata["y"].AsInt32.Should().Be(99); } } - private string HashFile(Stream stream) + private static async Task> FirstAsync(IAsyncEnumerable> source) + { + await foreach (var item in source) + { + return item; + } + + return null; + } + + private async Task HashFileAsync(Stream stream) { - var m = new MemoryStream(); - stream.CopyTo(m); - return this.HashFile(m.ToArray()); + await using var memory = new MemoryStream(); + await stream.CopyToAsync(memory); + return this.HashFile(memory.ToArray()); } private string HashFile(byte[] input) @@ -83,4 +99,4 @@ private string HashFile(byte[] input) } } } -} \ No newline at end of file +} diff --git a/LiteDB.Tests/Issues/Issue2112_Tests.cs b/LiteDB.Tests/Issues/Issue2112_Tests.cs index 7853d6f09..b791d894c 100644 --- a/LiteDB.Tests/Issues/Issue2112_Tests.cs +++ b/LiteDB.Tests/Issues/Issue2112_Tests.cs @@ -29,7 +29,7 @@ public void Deserialize_covariant_collection_succeed() var deserialized = _mapper.Deserialize(serialized); - Assert.Equal(1, deserialized.Bs.Count); + Assert.Single(deserialized.Bs); } interface IA diff --git a/LiteDB.Tests/Issues/Issue2265_Tests.cs b/LiteDB.Tests/Issues/Issue2265_Tests.cs index 57257c12c..cdc3b83cc 100644 --- a/LiteDB.Tests/Issues/Issue2265_Tests.cs +++ b/LiteDB.Tests/Issues/Issue2265_Tests.cs @@ -3,6 +3,8 @@ using LiteDB.Tests.Utils; using Xunit; +#nullable enable + namespace LiteDB.Tests.Issues; // issue 2265 diff --git a/LiteDB.Tests/Issues/Issue2458_Tests.cs b/LiteDB.Tests/Issues/Issue2458_Tests.cs index e538e18e9..851ac46f1 100644 --- a/LiteDB.Tests/Issues/Issue2458_Tests.cs +++ b/LiteDB.Tests/Issues/Issue2458_Tests.cs @@ -1,5 +1,6 @@ -using System; +using System; using System.IO; +using System.Threading.Tasks; using LiteDB.Tests.Utils; using Xunit; @@ -8,48 +9,48 @@ namespace LiteDB.Tests.Issues; public class Issue2458_Tests { [Fact] - public void NegativeSeekFails() + public async Task NegativeSeekFails() { - using var db = DatabaseFactory.Create(); + await using var db = DatabaseFactory.Create(); var fs = db.FileStorage; - AddTestFile("test", 1, fs); - using Stream stream = fs.OpenRead("test"); + await AddTestFileAsync("test", 1, fs); + await using var stream = await fs.OpenReadAsync("test"); Assert.Throws(() => stream.Position = -1); } - //https://learn.microsoft.com/en-us/dotnet/api/system.io.stream.position?view=net-8.0 says seeking to a position - //beyond the end of a stream is supported, so implementations should support it (error on read). [Fact] - public void SeekPastFileSucceds() + public async Task SeekPastFileSucceds() { - using var db = DatabaseFactory.Create(); + await using var db = DatabaseFactory.Create(); var fs = db.FileStorage; - AddTestFile("test", 1, fs); - using Stream stream = fs.OpenRead("test"); - stream.Position = Int32.MaxValue; + await AddTestFileAsync("test", 1, fs); + await using var stream = await fs.OpenReadAsync("test"); + stream.Position = int.MaxValue; } [Fact] - public void SeekShortChunks() + public async Task SeekShortChunks() { - using var db = DatabaseFactory.Create(); + await using var db = DatabaseFactory.Create(); var fs = db.FileStorage; - using(Stream writeStream = fs.OpenWrite("test", "test")) + await using (var writeStream = await fs.OpenWriteAsync("test", "test")) { - writeStream.WriteByte(0); - writeStream.Flush(); //Create single-byte chunk just containing a 0 - writeStream.WriteByte(1); - writeStream.Flush(); - writeStream.WriteByte(2); + await writeStream.WriteAsync(new byte[] { 0 }, 0, 1); + await writeStream.FlushAsync(); + await writeStream.WriteAsync(new byte[] { 1 }, 0, 1); + await writeStream.FlushAsync(); + await writeStream.WriteAsync(new byte[] { 2 }, 0, 1); } - using Stream readStream = fs.OpenRead("test"); + + await using var readStream = await fs.OpenReadAsync("test"); readStream.Position = 2; Assert.Equal(2, readStream.ReadByte()); } - private void AddTestFile(string id, long length, ILiteStorage fs) + private static async Task AddTestFileAsync(string id, long length, ILiteStorage fs) { - using Stream writeStream = fs.OpenWrite(id, id); - writeStream.Write(new byte[length]); + await using var writeStream = await fs.OpenWriteAsync(id, id); + var buffer = new byte[length]; + await writeStream.WriteAsync(buffer, 0, buffer.Length); } -} \ No newline at end of file +} diff --git a/LiteDB.Tests/Issues/Issue2487_Tests.cs b/LiteDB.Tests/Issues/Issue2487_Tests.cs index cc3109f03..8b0f0ca1f 100644 --- a/LiteDB.Tests/Issues/Issue2487_Tests.cs +++ b/LiteDB.Tests/Issues/Issue2487_Tests.cs @@ -1,4 +1,5 @@ using FluentAssertions; +using LiteDB.Engine; using System.Diagnostics; @@ -42,4 +43,4 @@ public void Test_Contains_EmptyStrings() var shouldExecute = () => engine.Query("data", Query.All(Query.Contains("Foo", " "))); shouldExecute.Should().NotThrow(); } -} \ No newline at end of file +} diff --git a/LiteDB.Tests/Issues/Issue2506_Tests.cs b/LiteDB.Tests/Issues/Issue2506_Tests.cs index a06009a66..3370d728a 100644 --- a/LiteDB.Tests/Issues/Issue2506_Tests.cs +++ b/LiteDB.Tests/Issues/Issue2506_Tests.cs @@ -1,5 +1,6 @@ -using System.Collections.Generic; +using System.Collections.Generic; using System.IO; +using System.Threading.Tasks; using Xunit; namespace LiteDB.Tests.Issues; @@ -7,30 +8,34 @@ namespace LiteDB.Tests.Issues; public class Issue2506_Tests { [Fact] - public void Test() + public async Task Test() { - // Open database connection - using LiteDatabase dataBase = new("demo.db"); + await using LiteDatabase dataBase = new("demo.db"); - // Get the file metadata/chunks storage ILiteStorage fileStorage = dataBase.GetStorage("myFiles", "myChunks"); - // Upload empty test file to file storage using MemoryStream emptyStream = new(); - fileStorage.Upload("photos/2014/picture-01.jpg", "picture-01.jpg", emptyStream); + await fileStorage.UploadAsync("photos/2014/picture-01.jpg", "picture-01.jpg", emptyStream); - // Find file reference by its ID (returns null if not found) - LiteFileInfo file = fileStorage.FindById("photos/2014/picture-01.jpg"); + LiteFileInfo file = await fileStorage.FindByIdAsync("photos/2014/picture-01.jpg"); Assert.NotNull(file); - // Load and save file bytes to hard drive - file.SaveAs(Path.Combine(Path.GetTempPath(), "new-picture.jpg")); + await file.SaveAsAsync(Path.Combine(Path.GetTempPath(), "new-picture.jpg")); + + List> files = new(); + await foreach (var info in fileStorage.FindAsync("_id LIKE 'photos/2014/%'")) + { + files.Add(info); + } - // Find all files matching pattern - IEnumerable> files = fileStorage.Find("_id LIKE 'photos/2014/%'"); Assert.Single(files); - // Find all files matching pattern using parameters - IEnumerable> files2 = fileStorage.Find("_id LIKE @0", "photos/2014/%"); + + List> files2 = new(); + await foreach (var info in fileStorage.FindAsync("_id LIKE @0", cancellationToken: default, "photos/2014/%")) + { + files2.Add(info); + } + Assert.Single(files2); } -} \ No newline at end of file +} diff --git a/LiteDB/Client/Database/Collections/Aggregate.cs b/LiteDB/Client/Database/Collections/Aggregate.cs index b571348a6..f0769ccba 100644 --- a/LiteDB/Client/Database/Collections/Aggregate.cs +++ b/LiteDB/Client/Database/Collections/Aggregate.cs @@ -1,6 +1,8 @@ -using System; +using System; using System.Linq; using System.Linq.Expressions; +using System.Threading; +using System.Threading.Tasks; using static LiteDB.Constants; namespace LiteDB @@ -12,41 +14,54 @@ public partial class LiteCollection /// /// Get document count in collection /// - public int Count() + public Task CountAsync(CancellationToken cancellationToken = default) { - // do not use indexes - collections has DocumentCount property - return this.Query().Count(); + return this.Query().CountAsync(cancellationToken); } /// /// Get document count in collection using predicate filter expression /// - public int Count(BsonExpression predicate) + public Task CountAsync(BsonExpression predicate, CancellationToken cancellationToken = default) { if (predicate == null) throw new ArgumentNullException(nameof(predicate)); - return this.Query().Where(predicate).Count(); + return this.Query().Where(predicate).CountAsync(cancellationToken); } /// /// Get document count in collection using predicate filter expression /// - public int Count(string predicate, BsonDocument parameters) => this.Count(BsonExpression.Create(predicate, parameters)); + public Task CountAsync(string predicate, BsonDocument parameters, CancellationToken cancellationToken = default) + { + return this.CountAsync(BsonExpression.Create(predicate, parameters), cancellationToken); + } /// /// Get document count in collection using predicate filter expression /// - public int Count(string predicate, params BsonValue[] args) => this.Count(BsonExpression.Create(predicate, args)); + public Task CountAsync(string predicate, CancellationToken cancellationToken = default, params BsonValue[] args) + { + return this.CountAsync(BsonExpression.Create(predicate, args), cancellationToken); + } /// /// Count documents matching a query. This method does not deserialize any documents. Needs indexes on query expression /// - public int Count(Expression> predicate) => this.Count(_mapper.GetExpression(predicate)); + public Task CountAsync(Expression> predicate, CancellationToken cancellationToken = default) + { + return this.CountAsync(_mapper.GetExpression(predicate), cancellationToken); + } /// /// Get document count in collection using predicate filter expression /// - public int Count(Query query) => new LiteQueryable(_engine, _mapper, _collection, query).Count(); + public Task CountAsync(Query query, CancellationToken cancellationToken = default) + { + if (query == null) throw new ArgumentNullException(nameof(query)); + + return new LiteQueryable(_engine, _mapper, _collection, query).CountAsync(cancellationToken); + } #endregion @@ -55,40 +70,54 @@ public int Count(BsonExpression predicate) /// /// Get document count in collection /// - public long LongCount() + public Task LongCountAsync(CancellationToken cancellationToken = default) { - return this.Query().LongCount(); + return this.Query().LongCountAsync(cancellationToken); } /// /// Get document count in collection using predicate filter expression /// - public long LongCount(BsonExpression predicate) + public Task LongCountAsync(BsonExpression predicate, CancellationToken cancellationToken = default) { if (predicate == null) throw new ArgumentNullException(nameof(predicate)); - return this.Query().Where(predicate).LongCount(); + return this.Query().Where(predicate).LongCountAsync(cancellationToken); } /// /// Get document count in collection using predicate filter expression /// - public long LongCount(string predicate, BsonDocument parameters) => this.LongCount(BsonExpression.Create(predicate, parameters)); + public Task LongCountAsync(string predicate, BsonDocument parameters, CancellationToken cancellationToken = default) + { + return this.LongCountAsync(BsonExpression.Create(predicate, parameters), cancellationToken); + } /// /// Get document count in collection using predicate filter expression /// - public long LongCount(string predicate, params BsonValue[] args) => this.LongCount(BsonExpression.Create(predicate, args)); + public Task LongCountAsync(string predicate, CancellationToken cancellationToken = default, params BsonValue[] args) + { + return this.LongCountAsync(BsonExpression.Create(predicate, args), cancellationToken); + } /// /// Get document count in collection using predicate filter expression /// - public long LongCount(Expression> predicate) => this.LongCount(_mapper.GetExpression(predicate)); + public Task LongCountAsync(Expression> predicate, CancellationToken cancellationToken = default) + { + return this.LongCountAsync(_mapper.GetExpression(predicate), cancellationToken); + } /// /// Get document count in collection using predicate filter expression /// - public long LongCount(Query query) => new LiteQueryable(_engine, _mapper, _collection, query).Count(); + public Task LongCountAsync(Query query, CancellationToken cancellationToken = default) + { + if (query == null) throw new ArgumentNullException(nameof(query)); + + return new LiteQueryable(_engine, _mapper, _collection, query).LongCountAsync(cancellationToken); + } #endregion @@ -97,32 +126,46 @@ public long LongCount(BsonExpression predicate) /// /// Get true if collection contains at least 1 document that satisfies the predicate expression /// - public bool Exists(BsonExpression predicate) + public Task ExistsAsync(BsonExpression predicate, CancellationToken cancellationToken = default) { if (predicate == null) throw new ArgumentNullException(nameof(predicate)); - return this.Query().Where(predicate).Exists(); + return this.Query().Where(predicate).ExistsAsync(cancellationToken); } /// /// Get true if collection contains at least 1 document that satisfies the predicate expression /// - public bool Exists(string predicate, BsonDocument parameters) => this.Exists(BsonExpression.Create(predicate, parameters)); + public Task ExistsAsync(string predicate, BsonDocument parameters, CancellationToken cancellationToken = default) + { + return this.ExistsAsync(BsonExpression.Create(predicate, parameters), cancellationToken); + } /// /// Get true if collection contains at least 1 document that satisfies the predicate expression /// - public bool Exists(string predicate, params BsonValue[] args) => this.Exists(BsonExpression.Create(predicate, args)); + public Task ExistsAsync(string predicate, CancellationToken cancellationToken = default, params BsonValue[] args) + { + return this.ExistsAsync(BsonExpression.Create(predicate, args), cancellationToken); + } /// /// Get true if collection contains at least 1 document that satisfies the predicate expression /// - public bool Exists(Expression> predicate) => this.Exists(_mapper.GetExpression(predicate)); + public Task ExistsAsync(Expression> predicate, CancellationToken cancellationToken = default) + { + return this.ExistsAsync(_mapper.GetExpression(predicate), cancellationToken); + } /// /// Get true if collection contains at least 1 document that satisfies the predicate expression /// - public bool Exists(Query query) => new LiteQueryable(_engine, _mapper, _collection, query).Exists(); + public Task ExistsAsync(Query query, CancellationToken cancellationToken = default) + { + if (query == null) throw new ArgumentNullException(nameof(query)); + + return new LiteQueryable(_engine, _mapper, _collection, query).ExistsAsync(cancellationToken); + } #endregion @@ -131,35 +174,39 @@ public bool Exists(BsonExpression predicate) /// /// Returns the min value from specified key value in collection /// - public BsonValue Min(BsonExpression keySelector) + public async Task MinAsync(BsonExpression keySelector, CancellationToken cancellationToken = default) { if (string.IsNullOrEmpty(keySelector)) throw new ArgumentNullException(nameof(keySelector)); - var doc = this.Query() + await foreach (var doc in this.Query() .OrderBy(keySelector) .Select(keySelector) - .ToDocuments() - .First(); + .ToDocumentsAsync(cancellationToken) + .WithCancellation(cancellationToken)) + { + return doc[doc.Keys.First()]; + } - // return first field of first document - return doc[doc.Keys.First()]; + throw new InvalidOperationException("Sequence contains no elements."); } /// /// Returns the min value of _id index /// - public BsonValue Min() => this.Min("_id"); + public Task MinAsync(CancellationToken cancellationToken = default) + { + return this.MinAsync("_id", cancellationToken); + } /// /// Returns the min value from specified key value in collection /// - public K Min(Expression> keySelector) + public async Task MinAsync(Expression> keySelector, CancellationToken cancellationToken = default) { if (keySelector == null) throw new ArgumentNullException(nameof(keySelector)); var expr = _mapper.GetExpression(keySelector); - - var value = this.Min(expr); + var value = await this.MinAsync(expr, cancellationToken).ConfigureAwait(false); return (K)_mapper.Deserialize(typeof(K), value); } @@ -167,39 +214,43 @@ public K Min(Expression> keySelector) /// /// Returns the max value from specified key value in collection /// - public BsonValue Max(BsonExpression keySelector) + public async Task MaxAsync(BsonExpression keySelector, CancellationToken cancellationToken = default) { if (string.IsNullOrEmpty(keySelector)) throw new ArgumentNullException(nameof(keySelector)); - var doc = this.Query() + await foreach (var doc in this.Query() .OrderByDescending(keySelector) .Select(keySelector) - .ToDocuments() - .First(); + .ToDocumentsAsync(cancellationToken) + .WithCancellation(cancellationToken)) + { + return doc[doc.Keys.First()]; + } - // return first field of first document - return doc[doc.Keys.First()]; + throw new InvalidOperationException("Sequence contains no elements."); } /// /// Returns the max _id index key value /// - public BsonValue Max() => this.Max("_id"); + public Task MaxAsync(CancellationToken cancellationToken = default) + { + return this.MaxAsync("_id", cancellationToken); + } /// /// Returns the last/max field using a linq expression /// - public K Max(Expression> keySelector) + public async Task MaxAsync(Expression> keySelector, CancellationToken cancellationToken = default) { if (keySelector == null) throw new ArgumentNullException(nameof(keySelector)); var expr = _mapper.GetExpression(keySelector); - - var value = this.Max(expr); + var value = await this.MaxAsync(expr, cancellationToken).ConfigureAwait(false); return (K)_mapper.Deserialize(typeof(K), value); } #endregion } -} \ No newline at end of file +} diff --git a/LiteDB/Client/Database/Collections/Delete.cs b/LiteDB/Client/Database/Collections/Delete.cs index 280ed9736..02ac501c7 100644 --- a/LiteDB/Client/Database/Collections/Delete.cs +++ b/LiteDB/Client/Database/Collections/Delete.cs @@ -1,5 +1,7 @@ -using System; +using System; using System.Linq.Expressions; +using System.Threading; +using System.Threading.Tasks; using static LiteDB.Constants; namespace LiteDB @@ -9,44 +11,59 @@ public partial class LiteCollection /// /// Delete a single document on collection based on _id index. Returns true if document was deleted /// - public bool Delete(BsonValue id) + public async Task DeleteAsync(BsonValue id, CancellationToken cancellationToken = default) { if (id == null || id.IsNull) throw new ArgumentNullException(nameof(id)); + cancellationToken.ThrowIfCancellationRequested(); - return _engine.Delete(_collection, new [] { id }) == 1; + var result = await _engine.DeleteAsync(_collection, new[] { id }, cancellationToken).ConfigureAwait(false); + + return result == 1; } /// /// Delete all documents inside collection. Returns how many documents was deleted. Run inside current transaction /// - public int DeleteAll() + public Task DeleteAllAsync(CancellationToken cancellationToken = default) { - return _engine.DeleteMany(_collection, null); + cancellationToken.ThrowIfCancellationRequested(); + + return _engine.DeleteManyAsync(_collection, null, cancellationToken); } /// /// Delete all documents based on predicate expression. Returns how many documents was deleted /// - public int DeleteMany(BsonExpression predicate) + public Task DeleteManyAsync(BsonExpression predicate, CancellationToken cancellationToken = default) { if (predicate == null) throw new ArgumentNullException(nameof(predicate)); + cancellationToken.ThrowIfCancellationRequested(); - return _engine.DeleteMany(_collection, predicate); + return _engine.DeleteManyAsync(_collection, predicate, cancellationToken); } /// /// Delete all documents based on predicate expression. Returns how many documents was deleted /// - public int DeleteMany(string predicate, BsonDocument parameters) => this.DeleteMany(BsonExpression.Create(predicate, parameters)); + public Task DeleteManyAsync(string predicate, BsonDocument parameters, CancellationToken cancellationToken = default) + { + return this.DeleteManyAsync(BsonExpression.Create(predicate, parameters), cancellationToken); + } /// /// Delete all documents based on predicate expression. Returns how many documents was deleted /// - public int DeleteMany(string predicate, params BsonValue[] args) => this.DeleteMany(BsonExpression.Create(predicate, args)); + public Task DeleteManyAsync(string predicate, CancellationToken cancellationToken = default, params BsonValue[] args) + { + return this.DeleteManyAsync(BsonExpression.Create(predicate, args), cancellationToken); + } /// /// Delete all documents based on predicate expression. Returns how many documents was deleted /// - public int DeleteMany(Expression> predicate) => this.DeleteMany(_mapper.GetExpression(predicate)); + public Task DeleteManyAsync(Expression> predicate, CancellationToken cancellationToken = default) + { + return this.DeleteManyAsync(_mapper.GetExpression(predicate), cancellationToken); + } } -} \ No newline at end of file +} diff --git a/LiteDB/Client/Database/Collections/Find.cs b/LiteDB/Client/Database/Collections/Find.cs index e82df8754..a407964e3 100644 --- a/LiteDB/Client/Database/Collections/Find.cs +++ b/LiteDB/Client/Database/Collections/Find.cs @@ -1,8 +1,11 @@ -using LiteDB.Engine; +using LiteDB.Engine; using System; using System.Collections.Generic; using System.Linq; using System.Linq.Expressions; +using System.Threading; +using System.Threading.Tasks; +using System.Runtime.CompilerServices; using static LiteDB.Constants; namespace LiteDB @@ -22,36 +25,26 @@ public ILiteQueryable Query() /// /// Find documents inside a collection using predicate expression. /// - public IEnumerable Find(BsonExpression predicate, int skip = 0, int limit = int.MaxValue) + public IAsyncEnumerable FindAsync(BsonExpression predicate, int skip = 0, int limit = int.MaxValue, CancellationToken cancellationToken = default) { - if (predicate == null) throw new ArgumentNullException(nameof(predicate)); - - return this.Query() - .Include(_includes) - .Where(predicate) - .Skip(skip) - .Limit(limit) - .ToEnumerable(); + return ToAsyncEnumerable(this.FindSync(predicate, skip, limit), cancellationToken); } /// /// Find documents inside a collection using query definition. /// - public IEnumerable Find(Query query, int skip = 0, int limit = int.MaxValue) + public IAsyncEnumerable FindAsync(Query query, int skip = 0, int limit = int.MaxValue, CancellationToken cancellationToken = default) { - if (query == null) throw new ArgumentNullException(nameof(query)); - - if (skip != 0) query.Offset = skip; - if (limit != int.MaxValue) query.Limit = limit; - - return new LiteQueryable(_engine, _mapper, _collection, query) - .ToEnumerable(); + return ToAsyncEnumerable(this.FindSync(query, skip, limit), cancellationToken); } /// /// Find documents inside a collection using predicate expression. /// - public IEnumerable Find(Expression> predicate, int skip = 0, int limit = int.MaxValue) => this.Find(_mapper.GetExpression(predicate), skip, limit); + public IAsyncEnumerable FindAsync(Expression> predicate, int skip = 0, int limit = int.MaxValue, CancellationToken cancellationToken = default) + { + return ToAsyncEnumerable(this.FindSync(predicate, skip, limit), cancellationToken); + } #endregion @@ -60,43 +53,125 @@ public IEnumerable Find(Query query, int skip = 0, int limit = int.MaxValue) /// /// Find a document using Document Id. Returns null if not found. /// - public T FindById(BsonValue id) + public Task FindByIdAsync(BsonValue id, CancellationToken cancellationToken = default) { if (id == null || id.IsNull) throw new ArgumentNullException(nameof(id)); - return this.Find(BsonExpression.Create("_id = @0", id)).FirstOrDefault(); + return Task.FromResult(this.FindSync(BsonExpression.Create("_id = @0", id), cancellationToken: cancellationToken).FirstOrDefault()); } /// /// Find the first document using predicate expression. Returns null if not found /// - public T FindOne(BsonExpression predicate) => this.Find(predicate).FirstOrDefault(); + public Task FindOneAsync(BsonExpression predicate, CancellationToken cancellationToken = default) + { + if (predicate == null) throw new ArgumentNullException(nameof(predicate)); + + return Task.FromResult(this.FindSync(predicate, cancellationToken: cancellationToken).FirstOrDefault()); + } /// /// Find the first document using predicate expression. Returns null if not found /// - public T FindOne(string predicate, BsonDocument parameters) => this.FindOne(BsonExpression.Create(predicate, parameters)); + public Task FindOneAsync(string predicate, BsonDocument parameters, CancellationToken cancellationToken = default) + { + return Task.FromResult(this.FindSync(BsonExpression.Create(predicate, parameters), cancellationToken: cancellationToken).FirstOrDefault()); + } /// /// Find the first document using predicate expression. Returns null if not found /// - public T FindOne(BsonExpression predicate, params BsonValue[] args) => this.FindOne(BsonExpression.Create(predicate, args)); + public Task FindOneAsync(BsonExpression predicate, CancellationToken cancellationToken = default, params BsonValue[] args) + { + return Task.FromResult(this.FindSync(BsonExpression.Create(predicate, args), cancellationToken: cancellationToken).FirstOrDefault()); + } /// /// Find the first document using predicate expression. Returns null if not found /// - public T FindOne(Expression> predicate) => this.FindOne(_mapper.GetExpression(predicate)); + public Task FindOneAsync(Expression> predicate, CancellationToken cancellationToken = default) + { + return Task.FromResult(this.FindSync(predicate, cancellationToken: cancellationToken).FirstOrDefault()); + } /// /// Find the first document using defined query structure. Returns null if not found /// - public T FindOne(Query query) => this.Find(query).FirstOrDefault(); + public Task FindOneAsync(Query query, CancellationToken cancellationToken = default) + { + if (query == null) throw new ArgumentNullException(nameof(query)); + + return Task.FromResult(this.FindSync(query, cancellationToken: cancellationToken).FirstOrDefault()); + } /// /// Returns all documents inside collection order by _id index. /// - public IEnumerable FindAll() => this.Query().Include(_includes).ToEnumerable(); + public IAsyncEnumerable FindAllAsync(CancellationToken cancellationToken = default) + { + return ToAsyncEnumerable(this.FindAllSync(), cancellationToken); + } #endregion + + internal IEnumerable FindSync(BsonExpression predicate, int skip = 0, int limit = int.MaxValue, CancellationToken cancellationToken = default) + { + if (predicate == null) throw new ArgumentNullException(nameof(predicate)); + + if (cancellationToken.IsCancellationRequested) cancellationToken.ThrowIfCancellationRequested(); + + var liteQueryable = new LiteQueryable(_engine, _mapper, _collection, new Query()); + + if (_includes.Count > 0) + { + liteQueryable.Include(_includes); + } + + liteQueryable.Where(predicate); + + liteQueryable.Skip(skip); + liteQueryable.Limit(limit); + + return liteQueryable.Enumerate(); + } + + internal IEnumerable FindSync(Query query, int skip = 0, int limit = int.MaxValue, CancellationToken cancellationToken = default) + { + if (query == null) throw new ArgumentNullException(nameof(query)); + + if (skip != 0) query.Offset = skip; + if (limit != int.MaxValue) query.Limit = limit; + + var liteQueryable = new LiteQueryable(_engine, _mapper, _collection, query); + + return liteQueryable.Enumerate(); + } + + internal IEnumerable FindSync(Expression> predicate, int skip = 0, int limit = int.MaxValue, CancellationToken cancellationToken = default) + { + return this.FindSync(_mapper.GetExpression(predicate), skip, limit, cancellationToken); + } + + internal IEnumerable FindAllSync() + { + var liteQueryable = new LiteQueryable(_engine, _mapper, _collection, new Query()); + + if (_includes.Count > 0) + { + liteQueryable.Include(_includes); + } + + return liteQueryable.Enumerate(); + } + + private static async IAsyncEnumerable ToAsyncEnumerable(IEnumerable source, [EnumeratorCancellation] CancellationToken cancellationToken) + { + foreach (var item in source) + { + cancellationToken.ThrowIfCancellationRequested(); + yield return item; + await Task.Yield(); + } + } } -} \ No newline at end of file +} diff --git a/LiteDB/Client/Database/Collections/Index.cs b/LiteDB/Client/Database/Collections/Index.cs index aac550567..4a7146b34 100644 --- a/LiteDB/Client/Database/Collections/Index.cs +++ b/LiteDB/Client/Database/Collections/Index.cs @@ -1,8 +1,10 @@ -using System; +using System; using System.Collections.Generic; using System.Linq; using System.Linq.Expressions; using System.Text.RegularExpressions; +using System.Threading; +using System.Threading.Tasks; using static LiteDB.Constants; namespace LiteDB @@ -15,12 +17,14 @@ public partial class LiteCollection /// Index name - unique name for this collection /// Create a custom expression function to be indexed /// If is a unique index - public bool EnsureIndex(string name, BsonExpression expression, bool unique = false) + /// A token to cancel the asynchronous operation. + public Task EnsureIndexAsync(string name, BsonExpression expression, bool unique = false, CancellationToken cancellationToken = default) { if (string.IsNullOrEmpty(name)) throw new ArgumentNullException(nameof(name)); if (expression == null) throw new ArgumentNullException(nameof(expression)); + cancellationToken.ThrowIfCancellationRequested(); - return _engine.EnsureIndex(_collection, name, expression, unique); + return _engine.EnsureIndexAsync(_collection, name, expression, unique, cancellationToken); } /// @@ -28,13 +32,14 @@ public bool EnsureIndex(string name, BsonExpression expression, bool unique = fa /// /// Document field/expression /// If is a unique index - public bool EnsureIndex(BsonExpression expression, bool unique = false) + /// A token to cancel the asynchronous operation. + public Task EnsureIndexAsync(BsonExpression expression, bool unique = false, CancellationToken cancellationToken = default) { if (expression == null) throw new ArgumentNullException(nameof(expression)); - var name = Regex.Replace(expression.Source, @"[^a-z0-9]", "", RegexOptions.IgnoreCase | RegexOptions.Compiled); + var name = Regex.Replace(expression.Source, @"[^a-z0-9]", string.Empty, RegexOptions.IgnoreCase | RegexOptions.Compiled); - return this.EnsureIndex(name, expression, unique); + return this.EnsureIndexAsync(name, expression, unique, cancellationToken); } /// @@ -42,11 +47,12 @@ public bool EnsureIndex(BsonExpression expression, bool unique = false) /// /// LinqExpression to be converted into BsonExpression to be indexed /// Create a unique keys index? - public bool EnsureIndex(Expression> keySelector, bool unique = false) + /// A token to cancel the asynchronous operation. + public Task EnsureIndexAsync(Expression> keySelector, bool unique = false, CancellationToken cancellationToken = default) { var expression = this.GetIndexExpression(keySelector); - return this.EnsureIndex(expression, unique); + return this.EnsureIndexAsync(expression, unique, cancellationToken); } /// @@ -55,11 +61,12 @@ public bool EnsureIndex(Expression> keySelector, bool unique = fal /// Index name - unique name for this collection /// LinqExpression to be converted into BsonExpression to be indexed /// Create a unique keys index? - public bool EnsureIndex(string name, Expression> keySelector, bool unique = false) + /// A token to cancel the asynchronous operation. + public Task EnsureIndexAsync(string name, Expression> keySelector, bool unique = false, CancellationToken cancellationToken = default) { var expression = this.GetIndexExpression(keySelector); - return this.EnsureIndex(name, expression, unique); + return this.EnsureIndexAsync(name, expression, unique, cancellationToken); } /// @@ -73,9 +80,6 @@ private BsonExpression GetIndexExpression(Expression> keySelector) { if (expression.Type == BsonExpressionType.Path) { - // convert LINQ expression that returns an IEnumerable but expression returns a single value - // `x => x.Phones` --> `$.Phones[*]` - // works only if exression is a simple path expression = expression.Source + "[*]"; } else @@ -90,9 +94,13 @@ private BsonExpression GetIndexExpression(Expression> keySelector) /// /// Drop index and release slot for another index /// - public bool DropIndex(string name) + /// The index name to drop. + /// A token to cancel the asynchronous operation. + public Task DropIndexAsync(string name, CancellationToken cancellationToken = default) { - return _engine.DropIndex(_collection, name); + cancellationToken.ThrowIfCancellationRequested(); + + return _engine.DropIndexAsync(_collection, name, cancellationToken); } } -} \ No newline at end of file +} diff --git a/LiteDB/Client/Database/Collections/Insert.cs b/LiteDB/Client/Database/Collections/Insert.cs index f77e96688..e16ef54ee 100644 --- a/LiteDB/Client/Database/Collections/Insert.cs +++ b/LiteDB/Client/Database/Collections/Insert.cs @@ -1,6 +1,8 @@ using System; using System.Collections.Generic; using System.Linq; +using System.Threading; +using System.Threading.Tasks; using static LiteDB.Constants; namespace LiteDB @@ -10,21 +12,22 @@ public partial class LiteCollection /// /// Insert a new entity to this collection. Document Id must be a new value in collection - Returns document Id /// - public BsonValue Insert(T entity) + public async Task InsertAsync(T entity, CancellationToken cancellationToken = default) { if (entity == null) throw new ArgumentNullException(nameof(entity)); + cancellationToken.ThrowIfCancellationRequested(); var doc = _mapper.ToDocument(entity); var removed = this.RemoveDocId(doc); - _engine.Insert(_collection, new[] { doc }, _autoId); + await _engine.InsertAsync(_collection, new[] { doc }, _autoId, cancellationToken).ConfigureAwait(false); var id = doc["_id"]; // checks if must update _id value in entity if (removed) { - _id.Setter(entity, id.RawValue); + _id?.Setter(entity, id.RawValue); } return id; @@ -33,46 +36,51 @@ public BsonValue Insert(T entity) /// /// Insert a new document to this collection using passed id value. /// - public void Insert(BsonValue id, T entity) + public async Task InsertAsync(BsonValue id, T entity, CancellationToken cancellationToken = default) { if (entity == null) throw new ArgumentNullException(nameof(entity)); if (id == null || id.IsNull) throw new ArgumentNullException(nameof(id)); + cancellationToken.ThrowIfCancellationRequested(); var doc = _mapper.ToDocument(entity); doc["_id"] = id; - _engine.Insert(_collection, new [] { doc }, _autoId); + await _engine.InsertAsync(_collection, new [] { doc }, _autoId, cancellationToken).ConfigureAwait(false); + + return; } /// /// Insert an array of new documents to this collection. Document Id must be a new value in collection. Can be set buffer size to commit at each N documents /// - public int Insert(IEnumerable entities) + public Task InsertAsync(IEnumerable entities, CancellationToken cancellationToken = default) { if (entities == null) throw new ArgumentNullException(nameof(entities)); - return _engine.Insert(_collection, this.GetBsonDocs(entities), _autoId); + return _engine.InsertAsync(_collection, this.GetBsonDocs(entities, cancellationToken), _autoId, cancellationToken); } /// /// Implements bulk insert documents in a collection. Usefull when need lots of documents. /// - [Obsolete("Use normal Insert()")] - public int InsertBulk(IEnumerable entities, int batchSize = 5000) + [Obsolete("Use InsertAsync()")] + public Task InsertBulkAsync(IEnumerable entities, int batchSize = 5000, CancellationToken cancellationToken = default) { if (entities == null) throw new ArgumentNullException(nameof(entities)); - return _engine.Insert(_collection, this.GetBsonDocs(entities), _autoId); + return _engine.InsertAsync(_collection, this.GetBsonDocs(entities, cancellationToken), _autoId, cancellationToken); } /// /// Convert each T document in a BsonDocument, setting autoId for each one /// - private IEnumerable GetBsonDocs(IEnumerable documents) + private IEnumerable GetBsonDocs(IEnumerable documents, CancellationToken cancellationToken) { foreach (var document in documents) { + cancellationToken.ThrowIfCancellationRequested(); + var doc = _mapper.ToDocument(document); var removed = this.RemoveDocId(doc); diff --git a/LiteDB/Client/Database/Collections/Update.cs b/LiteDB/Client/Database/Collections/Update.cs index 7e02b0289..45ae966a8 100644 --- a/LiteDB/Client/Database/Collections/Update.cs +++ b/LiteDB/Client/Database/Collections/Update.cs @@ -1,7 +1,8 @@ -using System; +using System; using System.Collections.Generic; -using System.Linq; using System.Linq.Expressions; +using System.Threading; +using System.Threading.Tasks; using static LiteDB.Constants; namespace LiteDB @@ -11,68 +12,81 @@ public partial class LiteCollection /// /// Update a document in this collection. Returns false if not found document in collection /// - public bool Update(T entity) + public async Task UpdateAsync(T entity, CancellationToken cancellationToken = default) { if (entity == null) throw new ArgumentNullException(nameof(entity)); + cancellationToken.ThrowIfCancellationRequested(); - // get BsonDocument from object var doc = _mapper.ToDocument(entity); - return _engine.Update(_collection, new BsonDocument[] { doc }) > 0; + var result = await _engine.UpdateAsync(_collection, new BsonDocument[] { doc }, cancellationToken).ConfigureAwait(false); + + return result > 0; } /// /// Update a document in this collection. Returns false if not found document in collection /// - public bool Update(BsonValue id, T entity) + public async Task UpdateAsync(BsonValue id, T entity, CancellationToken cancellationToken = default) { if (entity == null) throw new ArgumentNullException(nameof(entity)); if (id == null || id.IsNull) throw new ArgumentNullException(nameof(id)); + cancellationToken.ThrowIfCancellationRequested(); - // get BsonDocument from object var doc = _mapper.ToDocument(entity); - // set document _id using id parameter doc["_id"] = id; - return _engine.Update(_collection, new BsonDocument[] { doc }) > 0; + var result = await _engine.UpdateAsync(_collection, new BsonDocument[] { doc }, cancellationToken).ConfigureAwait(false); + + return result > 0; } /// /// Update all documents /// - public int Update(IEnumerable entities) + public Task UpdateAsync(IEnumerable entities, CancellationToken cancellationToken = default) { if (entities == null) throw new ArgumentNullException(nameof(entities)); - return _engine.Update(_collection, entities.Select(x => _mapper.ToDocument(x))); + var docs = new List(); + + foreach (var entity in entities) + { + cancellationToken.ThrowIfCancellationRequested(); + docs.Add(_mapper.ToDocument(entity)); + } + + return _engine.UpdateAsync(_collection, docs, cancellationToken); } /// /// Update many documents based on transform expression. This expression must return a new document that will be replaced over current document (according with predicate). /// Eg: col.UpdateMany("{ Name: UPPER($.Name), Age }", "_id > 0") /// - public int UpdateMany(BsonExpression transform, BsonExpression predicate) + public Task UpdateManyAsync(BsonExpression transform, BsonExpression predicate, CancellationToken cancellationToken = default) { if (transform == null) throw new ArgumentNullException(nameof(transform)); if (predicate == null) throw new ArgumentNullException(nameof(predicate)); + cancellationToken.ThrowIfCancellationRequested(); if (transform.Type != BsonExpressionType.Document) { - throw new ArgumentException("Extend expression must return a document. Eg: `col.UpdateMany('{ Name: UPPER(Name) }', 'Age > 10')`"); + throw new ArgumentException("Extend expression must return a document. Eg: `col.UpdateMany('{ Name: UPPER(Name)}', 'Age > 10')`"); } - return _engine.UpdateMany(_collection, transform, predicate); + return _engine.UpdateManyAsync(_collection, transform, predicate, cancellationToken); } /// - /// Update many document based on merge current document with extend expression. Use your class with initializers. + /// Update many document based on merge current document with extend expression. Use your class with initializers. /// Eg: col.UpdateMany(x => new Customer { Name = x.Name.ToUpper(), Salary: 100 }, x => x.Name == "John") /// - public int UpdateMany(Expression> extend, Expression> predicate) + public Task UpdateManyAsync(Expression> extend, Expression> predicate, CancellationToken cancellationToken = default) { if (extend == null) throw new ArgumentNullException(nameof(extend)); if (predicate == null) throw new ArgumentNullException(nameof(predicate)); + cancellationToken.ThrowIfCancellationRequested(); var ext = _mapper.GetExpression(extend); var pred = _mapper.GetExpression(predicate); @@ -82,7 +96,7 @@ public int UpdateMany(Expression> extend, Expression> p throw new ArgumentException("Extend expression must return an anonymous class to be merge with entities. Eg: `col.UpdateMany(x => new { Name = x.Name.ToUpper() }, x => x.Age > 10)`"); } - return _engine.UpdateMany(_collection, ext, pred); + return _engine.UpdateManyAsync(_collection, ext, pred, cancellationToken); } } -} \ No newline at end of file +} diff --git a/LiteDB/Client/Database/Collections/Upsert.cs b/LiteDB/Client/Database/Collections/Upsert.cs index cc2befffc..73dfcbad3 100644 --- a/LiteDB/Client/Database/Collections/Upsert.cs +++ b/LiteDB/Client/Database/Collections/Upsert.cs @@ -1,6 +1,8 @@ using System; using System.Collections.Generic; using System.Linq; +using System.Threading; +using System.Threading.Tasks; using static LiteDB.Constants; namespace LiteDB @@ -10,30 +12,34 @@ public partial class LiteCollection /// /// Insert or Update a document in this collection. /// - public bool Upsert(T entity) + public async Task UpsertAsync(T entity, CancellationToken cancellationToken = default) { if (entity == null) throw new ArgumentNullException(nameof(entity)); + cancellationToken.ThrowIfCancellationRequested(); - return this.Upsert(new T[] { entity }) == 1; + var count = await this.UpsertAsync(new T[] { entity }, cancellationToken).ConfigureAwait(false); + + return count == 1; } /// /// Insert or Update all documents /// - public int Upsert(IEnumerable entities) + public Task UpsertAsync(IEnumerable entities, CancellationToken cancellationToken = default) { if (entities == null) throw new ArgumentNullException(nameof(entities)); - return _engine.Upsert(_collection, this.GetBsonDocs(entities), _autoId); + return _engine.UpsertAsync(_collection, this.GetBsonDocs(entities, cancellationToken), _autoId, cancellationToken); } /// /// Insert or Update a document in this collection. /// - public bool Upsert(BsonValue id, T entity) + public async Task UpsertAsync(BsonValue id, T entity, CancellationToken cancellationToken = default) { if (entity == null) throw new ArgumentNullException(nameof(entity)); if (id == null || id.IsNull) throw new ArgumentNullException(nameof(id)); + cancellationToken.ThrowIfCancellationRequested(); // get BsonDocument from object var doc = _mapper.ToDocument(entity); @@ -41,7 +47,9 @@ public bool Upsert(BsonValue id, T entity) // set document _id using id parameter doc["_id"] = id; - return _engine.Upsert(_collection, new[] { doc }, _autoId) > 0; + var result = await _engine.UpsertAsync(_collection, new[] { doc }, _autoId, cancellationToken).ConfigureAwait(false); + + return result > 0; } } } \ No newline at end of file diff --git a/LiteDB/Client/Database/ILiteCollection.cs b/LiteDB/Client/Database/ILiteCollection.cs index 96dce9f40..c69f44874 100644 --- a/LiteDB/Client/Database/ILiteCollection.cs +++ b/LiteDB/Client/Database/ILiteCollection.cs @@ -1,6 +1,8 @@ using System; using System.Collections.Generic; using System.Linq.Expressions; +using System.Threading; +using System.Threading.Tasks; namespace LiteDB { @@ -36,64 +38,94 @@ public interface ILiteCollection /// /// Insert or Update a document in this collection. /// - bool Upsert(T entity); + /// The entity to insert or update. + /// A token to cancel the asynchronous operation. + Task UpsertAsync(T entity, CancellationToken cancellationToken = default); /// /// Insert or Update all documents /// - int Upsert(IEnumerable entities); + /// The entities to insert or update. + /// A token to cancel the asynchronous operation. + Task UpsertAsync(IEnumerable entities, CancellationToken cancellationToken = default); /// /// Insert or Update a document in this collection. /// - bool Upsert(BsonValue id, T entity); + /// The identifier of the entity. + /// The entity to insert or update. + /// A token to cancel the asynchronous operation. + Task UpsertAsync(BsonValue id, T entity, CancellationToken cancellationToken = default); /// /// Update a document in this collection. Returns false if not found document in collection /// - bool Update(T entity); + /// The entity to update. + /// A token to cancel the asynchronous operation. + Task UpdateAsync(T entity, CancellationToken cancellationToken = default); /// /// Update a document in this collection. Returns false if not found document in collection /// - bool Update(BsonValue id, T entity); + /// The identifier of the entity. + /// The entity to update. + /// A token to cancel the asynchronous operation. + Task UpdateAsync(BsonValue id, T entity, CancellationToken cancellationToken = default); /// /// Update all documents /// - int Update(IEnumerable entities); + /// The entities to update. + /// A token to cancel the asynchronous operation. + Task UpdateAsync(IEnumerable entities, CancellationToken cancellationToken = default); /// /// Update many documents based on transform expression. This expression must return a new document that will be replaced over current document (according with predicate). /// Eg: col.UpdateMany("{ Name: UPPER($.Name), Age }", "_id > 0") /// - int UpdateMany(BsonExpression transform, BsonExpression predicate); + /// The expression that produces the replacement document. + /// The filter expression that selects documents to update. + /// A token to cancel the asynchronous operation. + Task UpdateManyAsync(BsonExpression transform, BsonExpression predicate, CancellationToken cancellationToken = default); /// - /// Update many document based on merge current document with extend expression. Use your class with initializers. + /// Update many document based on merge current document with extend expression. Use your class with initializers. /// Eg: col.UpdateMany(x => new Customer { Name = x.Name.ToUpper(), Salary: 100 }, x => x.Name == "John") /// - int UpdateMany(Expression> extend, Expression> predicate); + /// The expression that merges values into the existing document. + /// The filter expression that selects documents to update. + /// A token to cancel the asynchronous operation. + Task UpdateManyAsync(Expression> extend, Expression> predicate, CancellationToken cancellationToken = default); /// /// Insert a new entity to this collection. Document Id must be a new value in collection - Returns document Id /// - BsonValue Insert(T entity); + /// The entity to insert. + /// A token to cancel the asynchronous operation. + Task InsertAsync(T entity, CancellationToken cancellationToken = default); /// /// Insert a new document to this collection using passed id value. /// - void Insert(BsonValue id, T entity); + /// The identifier to assign to the new entity. + /// The entity to insert. + /// A token to cancel the asynchronous operation. + Task InsertAsync(BsonValue id, T entity, CancellationToken cancellationToken = default); /// /// Insert an array of new documents to this collection. Document Id must be a new value in collection. Can be set buffer size to commit at each N documents /// - int Insert(IEnumerable entities); + /// The entities to insert. + /// A token to cancel the asynchronous operation. + Task InsertAsync(IEnumerable entities, CancellationToken cancellationToken = default); /// /// Implements bulk insert documents in a collection. Usefull when need lots of documents. /// - int InsertBulk(IEnumerable entities, int batchSize = 5000); + /// The entities to insert. + /// The number of documents to batch together per transaction. + /// A token to cancel the asynchronous operation. + Task InsertBulkAsync(IEnumerable entities, int batchSize = 5000, CancellationToken cancellationToken = default); /// /// Create a new permanent index in all documents inside this collections if index not exists already. Returns true if index was created or false if already exits @@ -101,21 +133,24 @@ public interface ILiteCollection /// Index name - unique name for this collection /// Create a custom expression function to be indexed /// If is a unique index - bool EnsureIndex(string name, BsonExpression expression, bool unique = false); + /// A token to cancel the asynchronous operation. + Task EnsureIndexAsync(string name, BsonExpression expression, bool unique = false, CancellationToken cancellationToken = default); /// /// Create a new permanent index in all documents inside this collections if index not exists already. Returns true if index was created or false if already exits /// /// Document field/expression /// If is a unique index - bool EnsureIndex(BsonExpression expression, bool unique = false); + /// A token to cancel the asynchronous operation. + Task EnsureIndexAsync(BsonExpression expression, bool unique = false, CancellationToken cancellationToken = default); /// /// Create a new permanent index in all documents inside this collections if index not exists already. /// /// LinqExpression to be converted into BsonExpression to be indexed /// Create a unique keys index? - bool EnsureIndex(Expression> keySelector, bool unique = false); + /// A token to cancel the asynchronous operation. + Task EnsureIndexAsync(Expression> keySelector, bool unique = false, CancellationToken cancellationToken = default); /// /// Create a new permanent index in all documents inside this collections if index not exists already. @@ -123,12 +158,15 @@ public interface ILiteCollection /// Index name - unique name for this collection /// LinqExpression to be converted into BsonExpression to be indexed /// Create a unique keys index? - bool EnsureIndex(string name, Expression> keySelector, bool unique = false); + /// A token to cancel the asynchronous operation. + Task EnsureIndexAsync(string name, Expression> keySelector, bool unique = false, CancellationToken cancellationToken = default); /// /// Drop index and release slot for another index /// - bool DropIndex(string name); + /// The name of the index to drop. + /// A token to cancel the asynchronous operation. + Task DropIndexAsync(string name, CancellationToken cancellationToken = default); /// /// Return a new LiteQueryable to build more complex queries @@ -138,196 +176,284 @@ public interface ILiteCollection /// /// Find documents inside a collection using predicate expression. /// - IEnumerable Find(BsonExpression predicate, int skip = 0, int limit = int.MaxValue); + /// The filter expression to apply. + /// The number of documents to skip. + /// The maximum number of documents to return. + /// A token to cancel the asynchronous operation. + IAsyncEnumerable FindAsync(BsonExpression predicate, int skip = 0, int limit = int.MaxValue, CancellationToken cancellationToken = default); /// /// Find documents inside a collection using query definition. /// - IEnumerable Find(Query query, int skip = 0, int limit = int.MaxValue); + /// The query definition to execute. + /// The number of documents to skip. + /// The maximum number of documents to return. + /// A token to cancel the asynchronous operation. + IAsyncEnumerable FindAsync(Query query, int skip = 0, int limit = int.MaxValue, CancellationToken cancellationToken = default); /// /// Find documents inside a collection using predicate expression. /// - IEnumerable Find(Expression> predicate, int skip = 0, int limit = int.MaxValue); + /// The filter expression to apply. + /// The number of documents to skip. + /// The maximum number of documents to return. + /// A token to cancel the asynchronous operation. + IAsyncEnumerable FindAsync(Expression> predicate, int skip = 0, int limit = int.MaxValue, CancellationToken cancellationToken = default); /// /// Find a document using Document Id. Returns null if not found. /// - T FindById(BsonValue id); + /// The identifier of the document. + /// A token to cancel the asynchronous operation. + Task FindByIdAsync(BsonValue id, CancellationToken cancellationToken = default); /// /// Find the first document using predicate expression. Returns null if not found /// - T FindOne(BsonExpression predicate); + /// The filter expression to apply. + /// A token to cancel the asynchronous operation. + Task FindOneAsync(BsonExpression predicate, CancellationToken cancellationToken = default); /// /// Find the first document using predicate expression. Returns null if not found /// - T FindOne(string predicate, BsonDocument parameters); + /// The filter expression to apply. + /// Parameters used in the expression. + /// A token to cancel the asynchronous operation. + Task FindOneAsync(string predicate, BsonDocument parameters, CancellationToken cancellationToken = default); /// /// Find the first document using predicate expression. Returns null if not found /// - T FindOne(BsonExpression predicate, params BsonValue[] args); + /// The filter expression to apply. + /// Parameters used in the expression. + /// A token to cancel the asynchronous operation. + Task FindOneAsync(BsonExpression predicate, CancellationToken cancellationToken = default, params BsonValue[] args); /// /// Find the first document using predicate expression. Returns null if not found /// - T FindOne(Expression> predicate); + /// The filter expression to apply. + /// A token to cancel the asynchronous operation. + Task FindOneAsync(Expression> predicate, CancellationToken cancellationToken = default); /// /// Find the first document using defined query structure. Returns null if not found /// - T FindOne(Query query); + /// The query definition to execute. + /// A token to cancel the asynchronous operation. + Task FindOneAsync(Query query, CancellationToken cancellationToken = default); /// /// Returns all documents inside collection order by _id index. /// - IEnumerable FindAll(); + /// A token to cancel the asynchronous operation. + IAsyncEnumerable FindAllAsync(CancellationToken cancellationToken = default); /// /// Delete a single document on collection based on _id index. Returns true if document was deleted /// - bool Delete(BsonValue id); + /// The identifier of the document to delete. + /// A token to cancel the asynchronous operation. + Task DeleteAsync(BsonValue id, CancellationToken cancellationToken = default); /// /// Delete all documents inside collection. Returns how many documents was deleted. Run inside current transaction /// - int DeleteAll(); + /// A token to cancel the asynchronous operation. + Task DeleteAllAsync(CancellationToken cancellationToken = default); /// /// Delete all documents based on predicate expression. Returns how many documents was deleted /// - int DeleteMany(BsonExpression predicate); + /// The filter expression to apply. + /// A token to cancel the asynchronous operation. + Task DeleteManyAsync(BsonExpression predicate, CancellationToken cancellationToken = default); /// /// Delete all documents based on predicate expression. Returns how many documents was deleted /// - int DeleteMany(string predicate, BsonDocument parameters); + /// The filter expression to apply. + /// Parameters used in the expression. + /// A token to cancel the asynchronous operation. + Task DeleteManyAsync(string predicate, BsonDocument parameters, CancellationToken cancellationToken = default); /// /// Delete all documents based on predicate expression. Returns how many documents was deleted /// - int DeleteMany(string predicate, params BsonValue[] args); + /// The filter expression to apply. + /// Parameters used in the expression. + /// A token to cancel the asynchronous operation. + Task DeleteManyAsync(string predicate, CancellationToken cancellationToken = default, params BsonValue[] args); /// /// Delete all documents based on predicate expression. Returns how many documents was deleted /// - int DeleteMany(Expression> predicate); + /// The filter expression to apply. + /// A token to cancel the asynchronous operation. + Task DeleteManyAsync(Expression> predicate, CancellationToken cancellationToken = default); /// /// Get document count using property on collection. /// - int Count(); + /// A token to cancel the asynchronous operation. + Task CountAsync(CancellationToken cancellationToken = default); /// /// Count documents matching a query. This method does not deserialize any document. Needs indexes on query expression /// - int Count(BsonExpression predicate); + /// The filter expression to apply. + /// A token to cancel the asynchronous operation. + Task CountAsync(BsonExpression predicate, CancellationToken cancellationToken = default); /// /// Count documents matching a query. This method does not deserialize any document. Needs indexes on query expression /// - int Count(string predicate, BsonDocument parameters); + /// The filter expression to apply. + /// Parameters used in the expression. + /// A token to cancel the asynchronous operation. + Task CountAsync(string predicate, BsonDocument parameters, CancellationToken cancellationToken = default); /// /// Count documents matching a query. This method does not deserialize any document. Needs indexes on query expression /// - int Count(string predicate, params BsonValue[] args); + /// The filter expression to apply. + /// Parameters used in the expression. + /// A token to cancel the asynchronous operation. + Task CountAsync(string predicate, CancellationToken cancellationToken = default, params BsonValue[] args); /// /// Count documents matching a query. This method does not deserialize any documents. Needs indexes on query expression /// - int Count(Expression> predicate); + /// The filter expression to apply. + /// A token to cancel the asynchronous operation. + Task CountAsync(Expression> predicate, CancellationToken cancellationToken = default); /// /// Count documents matching a query. This method does not deserialize any documents. Needs indexes on query expression /// - int Count(Query query); + /// The query definition to execute. + /// A token to cancel the asynchronous operation. + Task CountAsync(Query query, CancellationToken cancellationToken = default); /// /// Get document count using property on collection. /// - long LongCount(); + /// A token to cancel the asynchronous operation. + Task LongCountAsync(CancellationToken cancellationToken = default); /// /// Count documents matching a query. This method does not deserialize any documents. Needs indexes on query expression /// - long LongCount(BsonExpression predicate); + /// The filter expression to apply. + /// A token to cancel the asynchronous operation. + Task LongCountAsync(BsonExpression predicate, CancellationToken cancellationToken = default); /// /// Count documents matching a query. This method does not deserialize any documents. Needs indexes on query expression /// - long LongCount(string predicate, BsonDocument parameters); + /// The filter expression to apply. + /// Parameters used in the expression. + /// A token to cancel the asynchronous operation. + Task LongCountAsync(string predicate, BsonDocument parameters, CancellationToken cancellationToken = default); /// /// Count documents matching a query. This method does not deserialize any documents. Needs indexes on query expression /// - long LongCount(string predicate, params BsonValue[] args); + /// The filter expression to apply. + /// Parameters used in the expression. + /// A token to cancel the asynchronous operation. + Task LongCountAsync(string predicate, CancellationToken cancellationToken = default, params BsonValue[] args); /// /// Count documents matching a query. This method does not deserialize any documents. Needs indexes on query expression /// - long LongCount(Expression> predicate); + /// The filter expression to apply. + /// A token to cancel the asynchronous operation. + Task LongCountAsync(Expression> predicate, CancellationToken cancellationToken = default); /// /// Count documents matching a query. This method does not deserialize any documents. Needs indexes on query expression /// - long LongCount(Query query); + /// The query definition to execute. + /// A token to cancel the asynchronous operation. + Task LongCountAsync(Query query, CancellationToken cancellationToken = default); /// /// Returns true if query returns any document. This method does not deserialize any document. Needs indexes on query expression /// - bool Exists(BsonExpression predicate); + /// The filter expression to apply. + /// A token to cancel the asynchronous operation. + Task ExistsAsync(BsonExpression predicate, CancellationToken cancellationToken = default); /// /// Returns true if query returns any document. This method does not deserialize any document. Needs indexes on query expression /// - bool Exists(string predicate, BsonDocument parameters); + /// The filter expression to apply. + /// Parameters used in the expression. + /// A token to cancel the asynchronous operation. + Task ExistsAsync(string predicate, BsonDocument parameters, CancellationToken cancellationToken = default); /// /// Returns true if query returns any document. This method does not deserialize any document. Needs indexes on query expression /// - bool Exists(string predicate, params BsonValue[] args); + /// The filter expression to apply. + /// Parameters used in the expression. + /// A token to cancel the asynchronous operation. + Task ExistsAsync(string predicate, CancellationToken cancellationToken = default, params BsonValue[] args); /// /// Returns true if query returns any document. This method does not deserialize any document. Needs indexes on query expression /// - bool Exists(Expression> predicate); + /// The filter expression to apply. + /// A token to cancel the asynchronous operation. + Task ExistsAsync(Expression> predicate, CancellationToken cancellationToken = default); /// /// Returns true if query returns any document. This method does not deserialize any document. Needs indexes on query expression /// - bool Exists(Query query); + /// The query definition to execute. + /// A token to cancel the asynchronous operation. + Task ExistsAsync(Query query, CancellationToken cancellationToken = default); /// /// Returns the min value from specified key value in collection /// - BsonValue Min(BsonExpression keySelector); + /// The expression that selects the field to evaluate. + /// A token to cancel the asynchronous operation. + Task MinAsync(BsonExpression keySelector, CancellationToken cancellationToken = default); /// /// Returns the min value of _id index /// - BsonValue Min(); + /// A token to cancel the asynchronous operation. + Task MinAsync(CancellationToken cancellationToken = default); /// /// Returns the min value from specified key value in collection /// - K Min(Expression> keySelector); + /// The expression that selects the field to evaluate. + /// A token to cancel the asynchronous operation. + Task MinAsync(Expression> keySelector, CancellationToken cancellationToken = default); /// /// Returns the max value from specified key value in collection /// - BsonValue Max(BsonExpression keySelector); + /// The expression that selects the field to evaluate. + /// A token to cancel the asynchronous operation. + Task MaxAsync(BsonExpression keySelector, CancellationToken cancellationToken = default); /// /// Returns the max _id index key value /// - BsonValue Max(); + /// A token to cancel the asynchronous operation. + Task MaxAsync(CancellationToken cancellationToken = default); /// /// Returns the last/max field using a linq expression /// - K Max(Expression> keySelector); + /// The expression that selects the field to evaluate. + /// A token to cancel the asynchronous operation. + Task MaxAsync(Expression> keySelector, CancellationToken cancellationToken = default); } } \ No newline at end of file diff --git a/LiteDB/Client/Database/ILiteDatabase.cs b/LiteDB/Client/Database/ILiteDatabase.cs index 0fb056c79..e28e4715e 100644 --- a/LiteDB/Client/Database/ILiteDatabase.cs +++ b/LiteDB/Client/Database/ILiteDatabase.cs @@ -1,11 +1,13 @@ using System; using System.Collections.Generic; using System.IO; +using System.Threading; +using System.Threading.Tasks; using LiteDB.Engine; namespace LiteDB { - public interface ILiteDatabase : IDisposable + public interface ILiteDatabase : IDisposable, IAsyncDisposable { /// /// Get current instance of BsonMapper used in this database instance (can be BsonMapper.Global) @@ -45,17 +47,17 @@ public interface ILiteDatabase : IDisposable /// Initialize a new transaction. Transaction are created "per-thread". There is only one single transaction per thread. /// Return true if transaction was created or false if current thread already in a transaction. /// - bool BeginTrans(); + Task BeginTransAsync(CancellationToken cancellationToken = default); /// /// Commit current transaction /// - bool Commit(); + Task CommitAsync(CancellationToken cancellationToken = default); /// /// Rollback current transaction /// - bool Rollback(); + Task RollbackAsync(CancellationToken cancellationToken = default); /// /// Get new instance of Storage using custom FileId type, custom "_files" collection name and custom "_chunks" collection. LiteDB support multiples file storages (using different files/chunks collection names) @@ -85,27 +87,32 @@ public interface ILiteDatabase : IDisposable /// /// Execute SQL commands and return as data reader. /// - IBsonDataReader Execute(TextReader commandReader, BsonDocument parameters = null); + Task ExecuteAsync(TextReader commandReader, BsonDocument parameters = null, CancellationToken cancellationToken = default); /// /// Execute SQL commands and return as data reader /// - IBsonDataReader Execute(string command, BsonDocument parameters = null); + Task ExecuteAsync(string command, BsonDocument parameters = null, CancellationToken cancellationToken = default); /// /// Execute SQL commands and return as data reader /// - IBsonDataReader Execute(string command, params BsonValue[] args); + Task ExecuteAsync(string command, CancellationToken cancellationToken, params BsonValue[] args); + + /// + /// Execute SQL commands and return as data reader + /// + Task ExecuteAsync(string command, params BsonValue[] args); /// /// Do database checkpoint. Copy all commited transaction from log file into datafile. /// - void Checkpoint(); + Task CheckpointAsync(CancellationToken cancellationToken = default); /// /// Rebuild all database to remove unused pages - reduce data file /// - long Rebuild(RebuildOptions options = null); + Task RebuildAsync(RebuildOptions options = null, CancellationToken cancellationToken = default); /// /// Get value from internal engine variables diff --git a/LiteDB/Client/Database/ILiteQueryable.cs b/LiteDB/Client/Database/ILiteQueryable.cs index 46de8e413..629f8a85f 100644 --- a/LiteDB/Client/Database/ILiteQueryable.cs +++ b/LiteDB/Client/Database/ILiteQueryable.cs @@ -2,6 +2,8 @@ using System.Collections.Generic; using System.Linq; using System.Linq.Expressions; +using System.Threading; +using System.Threading.Tasks; namespace LiteDB { @@ -39,22 +41,22 @@ public interface ILiteQueryableResult ILiteQueryableResult Offset(int offset); ILiteQueryableResult ForUpdate(); - BsonDocument GetPlan(); - IBsonDataReader ExecuteReader(); - IEnumerable ToDocuments(); - IEnumerable ToEnumerable(); - List ToList(); - T[] ToArray(); + Task GetPlanAsync(CancellationToken cancellationToken = default); + Task ExecuteReaderAsync(CancellationToken cancellationToken = default); + IAsyncEnumerable ToDocumentsAsync(CancellationToken cancellationToken = default); + IAsyncEnumerable ToAsyncEnumerable(CancellationToken cancellationToken = default); + Task> ToListAsync(CancellationToken cancellationToken = default); + Task ToArrayAsync(CancellationToken cancellationToken = default); - int Into(string newCollection, BsonAutoId autoId = BsonAutoId.ObjectId); + Task IntoAsync(string newCollection, BsonAutoId autoId = BsonAutoId.ObjectId, CancellationToken cancellationToken = default); - T First(); - T FirstOrDefault(); - T Single(); - T SingleOrDefault(); + Task FirstAsync(CancellationToken cancellationToken = default); + Task FirstOrDefaultAsync(CancellationToken cancellationToken = default); + Task SingleAsync(CancellationToken cancellationToken = default); + Task SingleOrDefaultAsync(CancellationToken cancellationToken = default); - int Count(); - long LongCount(); - bool Exists(); + Task CountAsync(CancellationToken cancellationToken = default); + Task LongCountAsync(CancellationToken cancellationToken = default); + Task ExistsAsync(CancellationToken cancellationToken = default); } } \ No newline at end of file diff --git a/LiteDB/Client/Database/LiteCollectionSyncExtensions.cs b/LiteDB/Client/Database/LiteCollectionSyncExtensions.cs new file mode 100644 index 000000000..36b40db1a --- /dev/null +++ b/LiteDB/Client/Database/LiteCollectionSyncExtensions.cs @@ -0,0 +1,527 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Linq.Expressions; +using System.Threading; + +namespace LiteDB +{ + /// + /// Temporary synchronous shims that bridge existing call sites to the new asynchronous-first collection contract. + /// + public static class LiteCollectionSyncExtensions + { + [Obsolete("Use UpsertAsync and await the result instead of blocking.")] + public static bool Upsert(this ILiteCollection collection, T entity) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.UpsertAsync(entity).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use UpsertAsync and await the result instead of blocking.")] + public static int Upsert(this ILiteCollection collection, IEnumerable entities) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.UpsertAsync(entities).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use UpsertAsync and await the result instead of blocking.")] + public static bool Upsert(this ILiteCollection collection, BsonValue id, T entity) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.UpsertAsync(id, entity).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use UpdateAsync and await the result instead of blocking.")] + public static bool Update(this ILiteCollection collection, T entity) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.UpdateAsync(entity).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use UpdateAsync and await the result instead of blocking.")] + public static bool Update(this ILiteCollection collection, BsonValue id, T entity) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.UpdateAsync(id, entity).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use UpdateAsync and await the result instead of blocking.")] + public static int Update(this ILiteCollection collection, IEnumerable entities) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.UpdateAsync(entities).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use UpdateManyAsync and await the result instead of blocking.")] + public static int UpdateMany(this ILiteCollection collection, BsonExpression transform, BsonExpression predicate) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.UpdateManyAsync(transform, predicate).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use UpdateManyAsync and await the result instead of blocking.")] + public static int UpdateMany(this ILiteCollection collection, Expression> extend, Expression> predicate) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.UpdateManyAsync(extend, predicate).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use InsertAsync and await the result instead of blocking.")] + public static BsonValue Insert(this ILiteCollection collection, T entity) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.InsertAsync(entity).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use InsertAsync and await the result instead of blocking.")] + public static void Insert(this ILiteCollection collection, BsonValue id, T entity) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + collection.InsertAsync(id, entity).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use InsertAsync and await the result instead of blocking.")] + public static int Insert(this ILiteCollection collection, IEnumerable entities) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.InsertAsync(entities).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use InsertBulkAsync and await the result instead of blocking.")] + public static int InsertBulk(this ILiteCollection collection, IEnumerable entities, int batchSize = 5000) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.InsertBulkAsync(entities, batchSize).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use EnsureIndexAsync and await the result instead of blocking.")] + public static bool EnsureIndex(this ILiteCollection collection, string name, BsonExpression expression, bool unique = false) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.EnsureIndexAsync(name, expression, unique).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use EnsureIndexAsync and await the result instead of blocking.")] + public static bool EnsureIndex(this ILiteCollection collection, BsonExpression expression, bool unique = false) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.EnsureIndexAsync(expression, unique).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use EnsureIndexAsync and await the result instead of blocking.")] + public static bool EnsureIndex(this ILiteCollection collection, Expression> keySelector, bool unique = false) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.EnsureIndexAsync(keySelector, unique).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use EnsureIndexAsync and await the result instead of blocking.")] + public static bool EnsureIndex(this ILiteCollection collection, string name, Expression> keySelector, bool unique = false) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.EnsureIndexAsync(name, keySelector, unique).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use DropIndexAsync and await the result instead of blocking.")] + public static bool DropIndex(this ILiteCollection collection, string name) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.DropIndexAsync(name).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use FindAsync and await the result instead of blocking.")] + public static IEnumerable Find(this ILiteCollection collection, BsonExpression predicate, int skip = 0, int limit = int.MaxValue) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + if (collection is LiteCollection liteCollection) + { + return liteCollection.FindSync(predicate, skip, limit); + } + + return Materialize(collection.FindAsync(predicate, skip, limit)); + } + + [Obsolete("Use FindAsync and await the result instead of blocking.")] + public static IEnumerable Find(this ILiteCollection collection, Query query, int skip = 0, int limit = int.MaxValue) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + if (collection is LiteCollection liteCollection) + { + return liteCollection.FindSync(query, skip, limit); + } + + return Materialize(collection.FindAsync(query, skip, limit)); + } + + [Obsolete("Use FindAsync and await the result instead of blocking.")] + public static IEnumerable Find(this ILiteCollection collection, Expression> predicate, int skip = 0, int limit = int.MaxValue) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + if (collection is LiteCollection liteCollection) + { + return liteCollection.FindSync(predicate, skip, limit); + } + + return Materialize(collection.FindAsync(predicate, skip, limit)); + } + + [Obsolete("Use FindByIdAsync and await the result instead of blocking.")] + public static T FindById(this ILiteCollection collection, BsonValue id) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.FindByIdAsync(id).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use FindOneAsync and await the result instead of blocking.")] + public static T FindOne(this ILiteCollection collection, BsonExpression predicate) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + if (collection is LiteCollection liteCollection) + { + return liteCollection.FindSync(predicate).FirstOrDefault(); + } + + return collection.FindOneAsync(predicate).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use FindOneAsync and await the result instead of blocking.")] + public static T FindOne(this ILiteCollection collection, string predicate, BsonDocument parameters) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + if (collection is LiteCollection liteCollection) + { + return liteCollection.FindSync(BsonExpression.Create(predicate, parameters)).FirstOrDefault(); + } + + return collection.FindOneAsync(predicate, parameters).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use FindOneAsync and await the result instead of blocking.")] + public static T FindOne(this ILiteCollection collection, BsonExpression predicate, params BsonValue[] args) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + if (collection is LiteCollection liteCollection) + { + return liteCollection.FindSync(BsonExpression.Create(predicate, args)).FirstOrDefault(); + } + + return collection.FindOneAsync(predicate, default, args).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use FindOneAsync and await the result instead of blocking.")] + public static T FindOne(this ILiteCollection collection, Expression> predicate) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + if (collection is LiteCollection liteCollection) + { + return liteCollection.FindSync(predicate).FirstOrDefault(); + } + + return collection.FindOneAsync(predicate).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use FindOneAsync and await the result instead of blocking.")] + public static T FindOne(this ILiteCollection collection, Query query) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + if (collection is LiteCollection liteCollection) + { + return liteCollection.FindSync(query).FirstOrDefault(); + } + + return collection.FindOneAsync(query).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use FindAllAsync and await the result instead of blocking.")] + public static IEnumerable FindAll(this ILiteCollection collection) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + if (collection is LiteCollection liteCollection) + { + return liteCollection.FindAllSync(); + } + + return Materialize(collection.FindAllAsync()); + } + + [Obsolete("Use DeleteAsync and await the result instead of blocking.")] + public static bool Delete(this ILiteCollection collection, BsonValue id) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.DeleteAsync(id).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use DeleteAllAsync and await the result instead of blocking.")] + public static int DeleteAll(this ILiteCollection collection) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.DeleteAllAsync().ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use DeleteManyAsync and await the result instead of blocking.")] + public static int DeleteMany(this ILiteCollection collection, BsonExpression predicate) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.DeleteManyAsync(predicate).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use DeleteManyAsync and await the result instead of blocking.")] + public static int DeleteMany(this ILiteCollection collection, string predicate, BsonDocument parameters) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.DeleteManyAsync(predicate, parameters).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use DeleteManyAsync and await the result instead of blocking.")] + public static int DeleteMany(this ILiteCollection collection, string predicate, params BsonValue[] args) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.DeleteManyAsync(predicate, default, args).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use DeleteManyAsync and await the result instead of blocking.")] + public static int DeleteMany(this ILiteCollection collection, Expression> predicate) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.DeleteManyAsync(predicate).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use CountAsync and await the result instead of blocking.")] + public static int Count(this ILiteCollection collection) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.CountAsync().ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use CountAsync and await the result instead of blocking.")] + public static int Count(this ILiteCollection collection, BsonExpression predicate) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.CountAsync(predicate).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use CountAsync and await the result instead of blocking.")] + public static int Count(this ILiteCollection collection, string predicate, BsonDocument parameters) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.CountAsync(predicate, parameters).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use CountAsync and await the result instead of blocking.")] + public static int Count(this ILiteCollection collection, string predicate, params BsonValue[] args) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.CountAsync(predicate, default, args).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use CountAsync and await the result instead of blocking.")] + public static int Count(this ILiteCollection collection, Expression> predicate) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.CountAsync(predicate).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use CountAsync and await the result instead of blocking.")] + public static int Count(this ILiteCollection collection, Query query) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.CountAsync(query).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use LongCountAsync and await the result instead of blocking.")] + public static long LongCount(this ILiteCollection collection) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.LongCountAsync().ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use LongCountAsync and await the result instead of blocking.")] + public static long LongCount(this ILiteCollection collection, BsonExpression predicate) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.LongCountAsync(predicate).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use LongCountAsync and await the result instead of blocking.")] + public static long LongCount(this ILiteCollection collection, string predicate, BsonDocument parameters) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.LongCountAsync(predicate, parameters).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use LongCountAsync and await the result instead of blocking.")] + public static long LongCount(this ILiteCollection collection, string predicate, params BsonValue[] args) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.LongCountAsync(predicate, default, args).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use LongCountAsync and await the result instead of blocking.")] + public static long LongCount(this ILiteCollection collection, Expression> predicate) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.LongCountAsync(predicate).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use LongCountAsync and await the result instead of blocking.")] + public static long LongCount(this ILiteCollection collection, Query query) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.LongCountAsync(query).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use ExistsAsync and await the result instead of blocking.")] + public static bool Exists(this ILiteCollection collection, BsonExpression predicate) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.ExistsAsync(predicate).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use ExistsAsync and await the result instead of blocking.")] + public static bool Exists(this ILiteCollection collection, string predicate, BsonDocument parameters) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.ExistsAsync(predicate, parameters).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use ExistsAsync and await the result instead of blocking.")] + public static bool Exists(this ILiteCollection collection, string predicate, params BsonValue[] args) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.ExistsAsync(predicate, default, args).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use ExistsAsync and await the result instead of blocking.")] + public static bool Exists(this ILiteCollection collection, Expression> predicate) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.ExistsAsync(predicate).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use ExistsAsync and await the result instead of blocking.")] + public static bool Exists(this ILiteCollection collection, Query query) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.ExistsAsync(query).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use MinAsync and await the result instead of blocking.")] + public static BsonValue Min(this ILiteCollection collection, BsonExpression keySelector) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.MinAsync(keySelector).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use MinAsync and await the result instead of blocking.")] + public static BsonValue Min(this ILiteCollection collection) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.MinAsync().ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use MinAsync and await the result instead of blocking.")] + public static K Min(this ILiteCollection collection, Expression> keySelector) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.MinAsync(keySelector).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use MaxAsync and await the result instead of blocking.")] + public static BsonValue Max(this ILiteCollection collection, BsonExpression keySelector) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.MaxAsync(keySelector).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use MaxAsync and await the result instead of blocking.")] + public static BsonValue Max(this ILiteCollection collection) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.MaxAsync().ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use MaxAsync and await the result instead of blocking.")] + public static K Max(this ILiteCollection collection, Expression> keySelector) + { + if (collection == null) throw new ArgumentNullException(nameof(collection)); + + return collection.MaxAsync(keySelector).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + private static IEnumerable Materialize(IAsyncEnumerable source) + { + var list = new List(); + var enumerator = source.GetAsyncEnumerator(CancellationToken.None); + + try + { + while (enumerator.MoveNextAsync().ConfigureAwait(false).GetAwaiter().GetResult()) + { + list.Add(enumerator.Current); + } + } + finally + { + enumerator.DisposeAsync().ConfigureAwait(false).GetAwaiter().GetResult(); + } + + return list; + } + } +} diff --git a/LiteDB/Client/Database/LiteDatabase.cs b/LiteDB/Client/Database/LiteDatabase.cs index 812ae629c..797fafbfe 100644 --- a/LiteDB/Client/Database/LiteDatabase.cs +++ b/LiteDB/Client/Database/LiteDatabase.cs @@ -4,6 +4,7 @@ using System.IO; using System.Linq; using System.Threading; +using System.Threading.Tasks; using LiteDB.Engine; using static LiteDB.Constants; @@ -128,17 +129,32 @@ public ILiteCollection GetCollection(string name, BsonAutoId autoI /// Initialize a new transaction. Transaction are created "per-thread". There is only one single transaction per thread. /// Return true if transaction was created or false if current thread already in a transaction. /// - public bool BeginTrans() => _engine.BeginTrans(); + public Task BeginTransAsync(CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + + return _engine.BeginTransAsync(cancellationToken); + } /// /// Commit current transaction /// - public bool Commit() => _engine.Commit(); + public Task CommitAsync(CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + + return _engine.CommitAsync(cancellationToken); + } /// /// Rollback current transaction /// - public bool Rollback() => _engine.Rollback(); + public Task RollbackAsync(CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + + return _engine.RollbackAsync(cancellationToken); + } #endregion @@ -199,7 +215,7 @@ public bool DropCollection(string name) { if (name.IsNullOrWhiteSpace()) throw new ArgumentNullException(nameof(name)); - return _engine.DropCollection(name); + return this.RunSync(() => _engine.DropCollectionAsync(name)); } /// @@ -210,7 +226,7 @@ public bool RenameCollection(string oldName, string newName) if (oldName.IsNullOrWhiteSpace()) throw new ArgumentNullException(nameof(oldName)); if (newName.IsNullOrWhiteSpace()) throw new ArgumentNullException(nameof(newName)); - return _engine.RenameCollection(oldName, newName); + return this.RunSync(() => _engine.RenameCollectionAsync(oldName, newName)); } #endregion @@ -220,46 +236,62 @@ public bool RenameCollection(string oldName, string newName) /// /// Execute SQL commands and return as data reader. /// - public IBsonDataReader Execute(TextReader commandReader, BsonDocument parameters = null) + public Task ExecuteAsync(TextReader commandReader, BsonDocument parameters = null, CancellationToken cancellationToken = default) { if (commandReader == null) throw new ArgumentNullException(nameof(commandReader)); + cancellationToken.ThrowIfCancellationRequested(); + var tokenizer = new Tokenizer(commandReader); var sql = new SqlParser(_engine, tokenizer, parameters); var reader = sql.Execute(); - return reader; + return Task.FromResult(reader); } /// /// Execute SQL commands and return as data reader /// - public IBsonDataReader Execute(string command, BsonDocument parameters = null) + public Task ExecuteAsync(string command, BsonDocument parameters = null, CancellationToken cancellationToken = default) { if (command == null) throw new ArgumentNullException(nameof(command)); + cancellationToken.ThrowIfCancellationRequested(); + var tokenizer = new Tokenizer(command); var sql = new SqlParser(_engine, tokenizer, parameters); var reader = sql.Execute(); - return reader; + return Task.FromResult(reader); } /// /// Execute SQL commands and return as data reader /// - public IBsonDataReader Execute(string command, params BsonValue[] args) + public Task ExecuteAsync(string command, CancellationToken cancellationToken, params BsonValue[] args) { + if (command == null) throw new ArgumentNullException(nameof(command)); + + cancellationToken.ThrowIfCancellationRequested(); + var p = new BsonDocument(); var index = 0; - foreach (var arg in args) + foreach (var arg in args ?? Array.Empty()) { p[index.ToString()] = arg; index++; } - return this.Execute(command, p); + return this.ExecuteAsync(command, p, cancellationToken); + } + + /// + /// Execute SQL commands and return as data reader + /// + public Task ExecuteAsync(string command, params BsonValue[] args) + { + return this.ExecuteAsync(command, default, args); } #endregion @@ -269,17 +301,21 @@ public IBsonDataReader Execute(string command, params BsonValue[] args) /// /// Do database checkpoint. Copy all commited transaction from log file into datafile. /// - public void Checkpoint() + public Task CheckpointAsync(CancellationToken cancellationToken = default) { - _engine.Checkpoint(); + cancellationToken.ThrowIfCancellationRequested(); + + return _engine.CheckpointAsync(cancellationToken); } /// /// Rebuild all database to remove unused pages - reduce data file /// - public long Rebuild(RebuildOptions options = null) + public Task RebuildAsync(RebuildOptions options = null, CancellationToken cancellationToken = default) { - return _engine.Rebuild(options ?? new RebuildOptions()); + cancellationToken.ThrowIfCancellationRequested(); + + return _engine.RebuildAsync(options ?? new RebuildOptions(), cancellationToken); } #endregion @@ -291,7 +327,7 @@ public long Rebuild(RebuildOptions options = null) /// public BsonValue Pragma(string name) { - return _engine.Pragma(name); + return this.RunSync(() => _engine.PragmaAsync(name)); } /// @@ -299,7 +335,7 @@ public BsonValue Pragma(string name) /// public BsonValue Pragma(string name, BsonValue value) { - return _engine.Pragma(name, value); + return this.RunSync(() => _engine.PragmaAsync(name, value)); } /// @@ -307,8 +343,8 @@ public BsonValue Pragma(string name, BsonValue value) /// public int UserVersion { - get => _engine.Pragma(Pragmas.USER_VERSION); - set => _engine.Pragma(Pragmas.USER_VERSION, value); + get => this.RunSync(() => _engine.PragmaAsync(Pragmas.USER_VERSION)); + set => this.RunSync(() => _engine.PragmaAsync(Pragmas.USER_VERSION, value)); } /// @@ -316,8 +352,8 @@ public int UserVersion /// public TimeSpan Timeout { - get => TimeSpan.FromSeconds(_engine.Pragma(Pragmas.TIMEOUT).AsInt32); - set => _engine.Pragma(Pragmas.TIMEOUT, (int)value.TotalSeconds); + get => TimeSpan.FromSeconds(this.RunSync(() => _engine.PragmaAsync(Pragmas.TIMEOUT)).AsInt32); + set => this.RunSync(() => _engine.PragmaAsync(Pragmas.TIMEOUT, (int)value.TotalSeconds)); } /// @@ -325,8 +361,8 @@ public TimeSpan Timeout /// public bool UtcDate { - get => _engine.Pragma(Pragmas.UTC_DATE); - set => _engine.Pragma(Pragmas.UTC_DATE, value); + get => this.RunSync(() => _engine.PragmaAsync(Pragmas.UTC_DATE)); + set => this.RunSync(() => _engine.PragmaAsync(Pragmas.UTC_DATE, value)); } /// @@ -334,8 +370,8 @@ public bool UtcDate /// public long LimitSize { - get => _engine.Pragma(Pragmas.LIMIT_SIZE); - set => _engine.Pragma(Pragmas.LIMIT_SIZE, value); + get => this.RunSync(() => _engine.PragmaAsync(Pragmas.LIMIT_SIZE)); + set => this.RunSync(() => _engine.PragmaAsync(Pragmas.LIMIT_SIZE, value)); } /// @@ -344,8 +380,8 @@ public long LimitSize /// public int CheckpointSize { - get => _engine.Pragma(Pragmas.CHECKPOINT); - set => _engine.Pragma(Pragmas.CHECKPOINT, value); + get => this.RunSync(() => _engine.PragmaAsync(Pragmas.CHECKPOINT)); + set => this.RunSync(() => _engine.PragmaAsync(Pragmas.CHECKPOINT, value)); } /// @@ -353,7 +389,7 @@ public int CheckpointSize /// public Collation Collation { - get => new Collation(_engine.Pragma(Pragmas.COLLATION).AsString); + get => new Collation(this.RunSync(() => _engine.PragmaAsync(Pragmas.COLLATION)).AsString); } #endregion @@ -364,6 +400,12 @@ public void Dispose() GC.SuppressFinalize(this); } + public ValueTask DisposeAsync() + { + this.Dispose(); + return default; + } + ~LiteDatabase() { this.Dispose(false); @@ -376,5 +418,10 @@ protected virtual void Dispose(bool disposing) _engine.Dispose(); } } + + private T RunSync(Func> action) + { + return action().ConfigureAwait(false).GetAwaiter().GetResult(); + } } } diff --git a/LiteDB/Client/Database/LiteDatabaseSyncExtensions.cs b/LiteDB/Client/Database/LiteDatabaseSyncExtensions.cs new file mode 100644 index 000000000..97893d009 --- /dev/null +++ b/LiteDB/Client/Database/LiteDatabaseSyncExtensions.cs @@ -0,0 +1,86 @@ +using System; +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using LiteDB.Engine; + +namespace LiteDB +{ + /// + /// Temporary synchronous shims that bridge existing call sites to the new asynchronous-first database contract. + /// + public static class LiteDatabaseSyncExtensions + { + [Obsolete("Use BeginTransAsync and await the result instead of blocking.")] + public static bool BeginTrans(this ILiteDatabase database, CancellationToken cancellationToken = default) + { + if (database == null) throw new ArgumentNullException(nameof(database)); + + return database.BeginTransAsync(cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use CommitAsync and await the result instead of blocking.")] + public static bool Commit(this ILiteDatabase database, CancellationToken cancellationToken = default) + { + if (database == null) throw new ArgumentNullException(nameof(database)); + + return database.CommitAsync(cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use RollbackAsync and await the result instead of blocking.")] + public static bool Rollback(this ILiteDatabase database, CancellationToken cancellationToken = default) + { + if (database == null) throw new ArgumentNullException(nameof(database)); + + return database.RollbackAsync(cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use ExecuteAsync(TextReader, ...) and await the result instead of blocking.")] + public static IBsonDataReader Execute(this ILiteDatabase database, TextReader commandReader, BsonDocument parameters = null, CancellationToken cancellationToken = default) + { + if (database == null) throw new ArgumentNullException(nameof(database)); + + return database.ExecuteAsync(commandReader, parameters, cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use ExecuteAsync(string, BsonDocument, ...) and await the result instead of blocking.")] + public static IBsonDataReader Execute(this ILiteDatabase database, string command, BsonDocument parameters = null, CancellationToken cancellationToken = default) + { + if (database == null) throw new ArgumentNullException(nameof(database)); + + return database.ExecuteAsync(command, parameters, cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use ExecuteAsync(string, params BsonValue[]) and await the result instead of blocking.")] + public static IBsonDataReader Execute(this ILiteDatabase database, string command, CancellationToken cancellationToken, params BsonValue[] args) + { + if (database == null) throw new ArgumentNullException(nameof(database)); + + return database.ExecuteAsync(command, cancellationToken, args).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use ExecuteAsync(string, params BsonValue[]) and await the result instead of blocking.")] + public static IBsonDataReader Execute(this ILiteDatabase database, string command, params BsonValue[] args) + { + if (database == null) throw new ArgumentNullException(nameof(database)); + + return database.ExecuteAsync(command, args).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use CheckpointAsync and await the result instead of blocking.")] + public static void Checkpoint(this ILiteDatabase database, CancellationToken cancellationToken = default) + { + if (database == null) throw new ArgumentNullException(nameof(database)); + + database.CheckpointAsync(cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use RebuildAsync and await the result instead of blocking.")] + public static long Rebuild(this ILiteDatabase database, RebuildOptions options = null, CancellationToken cancellationToken = default) + { + if (database == null) throw new ArgumentNullException(nameof(database)); + + return database.RebuildAsync(options, cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + } +} diff --git a/LiteDB/Client/Database/LiteQueryable.cs b/LiteDB/Client/Database/LiteQueryable.cs index 07624b34b..9182ce514 100644 --- a/LiteDB/Client/Database/LiteQueryable.cs +++ b/LiteDB/Client/Database/LiteQueryable.cs @@ -5,6 +5,9 @@ using System.Linq; using System.Linq.Expressions; using System.Reflection; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; using static LiteDB.Constants; namespace LiteDB @@ -268,71 +271,86 @@ public ILiteQueryableResult Limit(int limit) /// /// Execute query and returns resultset as generic BsonDataReader /// - public IBsonDataReader ExecuteReader() + public Task ExecuteReaderAsync(CancellationToken cancellationToken = default) { - _query.ExplainPlan = false; + cancellationToken.ThrowIfCancellationRequested(); - return _engine.Query(_collection, _query); + return _engine.QueryAsync(_collection, _query, cancellationToken); } /// - /// Execute query and return resultset as IEnumerable of documents + /// Execute query and return resultset as asynchronous sequence of documents /// - public IEnumerable ToDocuments() + public async IAsyncEnumerable ToDocumentsAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) { - using (var reader = this.ExecuteReader()) + await foreach (var document in this.EnumerateDocumentsAsync(cancellationToken).ConfigureAwait(false)) { - while (reader.Read()) - { - yield return reader.Current as BsonDocument; - } + yield return document; } } /// - /// Execute query and return resultset as IEnumerable of T. If T is a ValueType or String, return values only (not documents) + /// Execute query and return resultset as asynchronous sequence of T. If T is a ValueType or String, return values only (not documents) /// - public IEnumerable ToEnumerable() + public async IAsyncEnumerable ToAsyncEnumerable([EnumeratorCancellation] CancellationToken cancellationToken = default) { - if (_isSimpleType) + await foreach (var item in this.EnumerateAsync(cancellationToken).ConfigureAwait(false)) { - return this.ToDocuments() - .Select(x => x[x.Keys.First()]) - .Select(x => (T)_mapper.Deserialize(typeof(T), x)); - } - else - { - return this.ToDocuments() - .Select(x => (T)_mapper.Deserialize(typeof(T), x)); + yield return item; } } /// /// Execute query and return results as a List /// - public List ToList() + public async Task> ToListAsync(CancellationToken cancellationToken = default) { - return this.ToEnumerable().ToList(); + cancellationToken.ThrowIfCancellationRequested(); + + var list = new List(); + + await foreach (var item in this.ToAsyncEnumerable(cancellationToken).ConfigureAwait(false)) + { + list.Add(item); + } + + return list; } /// /// Execute query and return results as an Array /// - public T[] ToArray() + public async Task ToArrayAsync(CancellationToken cancellationToken = default) { - return this.ToEnumerable().ToArray(); + var list = await this.ToListAsync(cancellationToken).ConfigureAwait(false); + return list.ToArray(); } /// /// Get execution plan over current query definition to see how engine will execute query /// - public BsonDocument GetPlan() + public async Task GetPlanAsync(CancellationToken cancellationToken = default) { + cancellationToken.ThrowIfCancellationRequested(); + + var previousExplain = _query.ExplainPlan; _query.ExplainPlan = true; - var reader = _engine.Query(_collection, _query); + try + { + await using var reader = await _engine.QueryAsync(_collection, _query, cancellationToken).ConfigureAwait(false); + + if (await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) + { + return reader.Current.AsDocument; + } - return reader.ToEnumerable().FirstOrDefault()?.AsDocument; + return null; + } + finally + { + _query.ExplainPlan = previousExplain; + } } #endregion @@ -342,33 +360,84 @@ public BsonDocument GetPlan() /// /// Returns the only document of resultset, and throw an exception if there not exactly one document in the sequence /// - public T Single() + public async Task SingleAsync(CancellationToken cancellationToken = default) { - return this.ToEnumerable().Single(); + cancellationToken.ThrowIfCancellationRequested(); + + T value = default; + var found = false; + + await foreach (var item in this.ToAsyncEnumerable(cancellationToken).ConfigureAwait(false)) + { + if (found) + { + throw new InvalidOperationException("Sequence contains more than one element"); + } + + found = true; + value = item; + } + + if (!found) + { + throw new InvalidOperationException("Sequence contains no elements"); + } + + return value; } /// /// Returns the only document of resultset, or null if resultset are empty; this method throw an exception if there not exactly one document in the sequence /// - public T SingleOrDefault() + public async Task SingleOrDefaultAsync(CancellationToken cancellationToken = default) { - return this.ToEnumerable().SingleOrDefault(); + cancellationToken.ThrowIfCancellationRequested(); + + T value = default; + var found = false; + + await foreach (var item in this.ToAsyncEnumerable(cancellationToken).ConfigureAwait(false)) + { + if (found) + { + throw new InvalidOperationException("Sequence contains more than one element"); + } + + found = true; + value = item; + } + + return value; } /// /// Returns first document of resultset /// - public T First() + public async Task FirstAsync(CancellationToken cancellationToken = default) { - return this.ToEnumerable().First(); + cancellationToken.ThrowIfCancellationRequested(); + + await foreach (var item in this.ToAsyncEnumerable(cancellationToken).ConfigureAwait(false)) + { + return item; + } + + throw new InvalidOperationException("Sequence contains no elements"); } /// /// Returns first document of resultset or null if resultset are empty /// - public T FirstOrDefault() + public async Task FirstOrDefaultAsync(CancellationToken cancellationToken = default) { - return this.ToEnumerable().FirstOrDefault(); + cancellationToken.ThrowIfCancellationRequested(); + + await foreach (var item in this.ToAsyncEnumerable(cancellationToken).ConfigureAwait(false)) + { + return item; + } + + return default; } #endregion @@ -376,18 +445,24 @@ public T FirstOrDefault() #region Execute Count /// - /// Execute Count methos in filter query + /// Execute Count method in filter query /// - public int Count() + public async Task CountAsync(CancellationToken cancellationToken = default) { + cancellationToken.ThrowIfCancellationRequested(); + var oldSelect = _query.Select; try { this.Select($"{{ count: COUNT(*._id) }}"); - var ret = this.ToDocuments().Single()["count"].AsInt32; - return ret; + await foreach (var doc in this.ToDocumentsAsync(cancellationToken).ConfigureAwait(false)) + { + return doc["count"].AsInt32; + } + + return 0; } finally { @@ -396,18 +471,24 @@ public int Count() } /// - /// Execute Count methos in filter query + /// Execute Count method in filter query /// - public long LongCount() + public async Task LongCountAsync(CancellationToken cancellationToken = default) { + cancellationToken.ThrowIfCancellationRequested(); + var oldSelect = _query.Select; try { this.Select($"{{ count: COUNT(*._id) }}"); - var ret = this.ToDocuments().Single()["count"].AsInt64; - return ret; + await foreach (var doc in this.ToDocumentsAsync(cancellationToken).ConfigureAwait(false)) + { + return doc["count"].AsInt64; + } + + return 0L; } finally { @@ -418,16 +499,22 @@ public long LongCount() /// /// Returns true/false if query returns any result /// - public bool Exists() + public async Task ExistsAsync(CancellationToken cancellationToken = default) { + cancellationToken.ThrowIfCancellationRequested(); + var oldSelect = _query.Select; try { this.Select($"{{ exists: ANY(*._id) }}"); - var ret = this.ToDocuments().Single()["exists"].AsBoolean; - return ret; + await foreach (var doc in this.ToDocumentsAsync(cancellationToken).ConfigureAwait(false)) + { + return doc["exists"].AsBoolean; + } + + return false; } finally { @@ -439,17 +526,89 @@ public bool Exists() #region Execute Into - public int Into(string newCollection, BsonAutoId autoId = BsonAutoId.ObjectId) + public async Task IntoAsync(string newCollection, BsonAutoId autoId = BsonAutoId.ObjectId, CancellationToken cancellationToken = default) { + cancellationToken.ThrowIfCancellationRequested(); + _query.Into = newCollection; _query.IntoAutoId = autoId; - using (var reader = this.ExecuteReader()) + await using var reader = await this.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); + + return reader.Current.AsInt32; + } + + #endregion + + private IBsonDataReader ExecuteReaderCore() + { + _query.ExplainPlan = false; + + return _engine.QueryAsync(_collection, _query).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + internal async IAsyncEnumerable EnumerateDocumentsAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + { + await using var reader = await this.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); + + while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) { - return reader.Current.AsInt32; + cancellationToken.ThrowIfCancellationRequested(); + + if (reader.Current is BsonDocument document) + { + yield return document; + } + else + { + yield return reader.Current.AsDocument; + } } } - #endregion + internal async IAsyncEnumerable EnumerateAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + { + if (_isSimpleType) + { + await foreach (var doc in this.EnumerateDocumentsAsync(cancellationToken).ConfigureAwait(false)) + { + var value = doc[doc.Keys.First()]; + yield return (T)_mapper.Deserialize(typeof(T), value); + } + } + else + { + await foreach (var doc in this.EnumerateDocumentsAsync(cancellationToken).ConfigureAwait(false)) + { + yield return (T)_mapper.Deserialize(typeof(T), doc); + } + } + } + + internal IEnumerable EnumerateDocuments() + { + using (var reader = this.ExecuteReaderCore()) + { + while (reader.Read()) + { + yield return reader.Current as BsonDocument; + } + } + } + + internal IEnumerable Enumerate() + { + if (_isSimpleType) + { + return this.EnumerateDocuments() + .Select(x => x[x.Keys.First()]) + .Select(x => (T)_mapper.Deserialize(typeof(T), x)); + } + else + { + return this.EnumerateDocuments() + .Select(x => (T)_mapper.Deserialize(typeof(T), x)); + } + } } } \ No newline at end of file diff --git a/LiteDB/Client/Database/LiteQueryableSyncExtensions.cs b/LiteDB/Client/Database/LiteQueryableSyncExtensions.cs new file mode 100644 index 000000000..276e15222 --- /dev/null +++ b/LiteDB/Client/Database/LiteQueryableSyncExtensions.cs @@ -0,0 +1,156 @@ +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; + +namespace LiteDB +{ + /// + /// Temporary synchronous shims that bridge the asynchronous queryable surface with legacy callers. + /// + public static class LiteQueryableSyncExtensions + { + [Obsolete("Use ExecuteReaderAsync and await the result instead of blocking.")] + public static IBsonDataReader ExecuteReader(this ILiteQueryableResult queryable, CancellationToken cancellationToken = default) + { + if (queryable == null) throw new ArgumentNullException(nameof(queryable)); + + return queryable.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use ToDocumentsAsync and await the result instead of blocking.")] + public static IEnumerable ToDocuments(this ILiteQueryableResult queryable, CancellationToken cancellationToken = default) + { + if (queryable == null) throw new ArgumentNullException(nameof(queryable)); + + if (queryable is LiteQueryable liteQueryable) + { + cancellationToken.ThrowIfCancellationRequested(); + + return liteQueryable.EnumerateDocuments(); + } + + return Materialize(queryable.ToDocumentsAsync(cancellationToken)); + } + + [Obsolete("Use ToAsyncEnumerable and await the result instead of blocking.")] + public static IEnumerable ToEnumerable(this ILiteQueryableResult queryable, CancellationToken cancellationToken = default) + { + if (queryable == null) throw new ArgumentNullException(nameof(queryable)); + + if (queryable is LiteQueryable liteQueryable) + { + cancellationToken.ThrowIfCancellationRequested(); + + return liteQueryable.Enumerate(); + } + + return Materialize(queryable.ToAsyncEnumerable(cancellationToken)); + } + + [Obsolete("Use ToListAsync and await the result instead of blocking.")] + public static List ToList(this ILiteQueryableResult queryable, CancellationToken cancellationToken = default) + { + if (queryable == null) throw new ArgumentNullException(nameof(queryable)); + + return queryable.ToListAsync(cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use ToArrayAsync and await the result instead of blocking.")] + public static T[] ToArray(this ILiteQueryableResult queryable, CancellationToken cancellationToken = default) + { + if (queryable == null) throw new ArgumentNullException(nameof(queryable)); + + return queryable.ToArrayAsync(cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use GetPlanAsync and await the result instead of blocking.")] + public static BsonDocument GetPlan(this ILiteQueryableResult queryable, CancellationToken cancellationToken = default) + { + if (queryable == null) throw new ArgumentNullException(nameof(queryable)); + + return queryable.GetPlanAsync(cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use FirstAsync and await the result instead of blocking.")] + public static T First(this ILiteQueryableResult queryable, CancellationToken cancellationToken = default) + { + if (queryable == null) throw new ArgumentNullException(nameof(queryable)); + + return queryable.FirstAsync(cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use FirstOrDefaultAsync and await the result instead of blocking.")] + public static T FirstOrDefault(this ILiteQueryableResult queryable, CancellationToken cancellationToken = default) + { + if (queryable == null) throw new ArgumentNullException(nameof(queryable)); + + return queryable.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use SingleAsync and await the result instead of blocking.")] + public static T Single(this ILiteQueryableResult queryable, CancellationToken cancellationToken = default) + { + if (queryable == null) throw new ArgumentNullException(nameof(queryable)); + + return queryable.SingleAsync(cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use SingleOrDefaultAsync and await the result instead of blocking.")] + public static T SingleOrDefault(this ILiteQueryableResult queryable, CancellationToken cancellationToken = default) + { + if (queryable == null) throw new ArgumentNullException(nameof(queryable)); + + return queryable.SingleOrDefaultAsync(cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use CountAsync and await the result instead of blocking.")] + public static int Count(this ILiteQueryableResult queryable, CancellationToken cancellationToken = default) + { + if (queryable == null) throw new ArgumentNullException(nameof(queryable)); + + return queryable.CountAsync(cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use LongCountAsync and await the result instead of blocking.")] + public static long LongCount(this ILiteQueryableResult queryable, CancellationToken cancellationToken = default) + { + if (queryable == null) throw new ArgumentNullException(nameof(queryable)); + + return queryable.LongCountAsync(cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use ExistsAsync and await the result instead of blocking.")] + public static bool Exists(this ILiteQueryableResult queryable, CancellationToken cancellationToken = default) + { + if (queryable == null) throw new ArgumentNullException(nameof(queryable)); + + return queryable.ExistsAsync(cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use IntoAsync and await the result instead of blocking.")] + public static int Into(this ILiteQueryableResult queryable, string newCollection, BsonAutoId autoId = BsonAutoId.ObjectId, CancellationToken cancellationToken = default) + { + if (queryable == null) throw new ArgumentNullException(nameof(queryable)); + + return queryable.IntoAsync(newCollection, autoId, cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + private static IEnumerable Materialize(IAsyncEnumerable source) + { + return MaterializeAsync(source).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + private static async Task> MaterializeAsync(IAsyncEnumerable source) + { + var list = new List(); + + await foreach (var item in source) + { + list.Add(item); + } + + return list; + } + } +} diff --git a/LiteDB/Client/Shared/SharedDataReader.cs b/LiteDB/Client/Shared/SharedDataReader.cs index a81a961ee..84a4bc8ec 100644 --- a/LiteDB/Client/Shared/SharedDataReader.cs +++ b/LiteDB/Client/Shared/SharedDataReader.cs @@ -3,6 +3,8 @@ using System.Collections.Generic; using System.Linq; using System.Linq.Expressions; +using System.Threading; +using System.Threading.Tasks; namespace LiteDB { @@ -29,6 +31,8 @@ public SharedDataReader(IBsonDataReader reader, Action dispose) public bool Read() => _reader.Read(); + public ValueTask ReadAsync(CancellationToken cancellationToken = default) => _reader.ReadAsync(cancellationToken); + public void Dispose() { this.Dispose(true); @@ -52,5 +56,26 @@ protected virtual void Dispose(bool disposing) _dispose(); } } + + public async ValueTask DisposeAsync() + { + if (_disposed) + { + return; + } + + _disposed = true; + + GC.SuppressFinalize(this); + + try + { + _dispose(); + } + finally + { + await _reader.DisposeAsync().ConfigureAwait(false); + } + } } } \ No newline at end of file diff --git a/LiteDB/Client/Shared/SharedEngine.cs b/LiteDB/Client/Shared/SharedEngine.cs index c25e7d591..867568058 100644 --- a/LiteDB/Client/Shared/SharedEngine.cs +++ b/LiteDB/Client/Shared/SharedEngine.cs @@ -1,8 +1,9 @@ -using LiteDB.Engine; +using LiteDB.Engine; using System; using System.Collections.Generic; using System.IO; using System.Threading; +using System.Threading.Tasks; #if NETFRAMEWORK using System.Security.AccessControl; using System.Security.Principal; @@ -95,13 +96,15 @@ private void CloseDatabase() #region Transaction Operations - public bool BeginTrans() + public async Task BeginTransAsync(CancellationToken cancellationToken = default) { + cancellationToken.ThrowIfCancellationRequested(); + OpenDatabase(); try { - _transactionRunning = _engine.BeginTrans(); + _transactionRunning = await _engine.BeginTransAsync(cancellationToken).ConfigureAwait(false); return _transactionRunning; } @@ -112,13 +115,15 @@ public bool BeginTrans() } } - public bool Commit() + public async Task CommitAsync(CancellationToken cancellationToken = default) { + cancellationToken.ThrowIfCancellationRequested(); + if (_engine == null) return false; try { - return _engine.Commit(); + return await _engine.CommitAsync(cancellationToken).ConfigureAwait(false); } finally { @@ -127,13 +132,15 @@ public bool Commit() } } - public bool Rollback() + public async Task RollbackAsync(CancellationToken cancellationToken = default) { + cancellationToken.ThrowIfCancellationRequested(); + if (_engine == null) return false; try { - return _engine.Rollback(); + return await _engine.RollbackAsync(cancellationToken).ConfigureAwait(false); } finally { @@ -146,93 +153,107 @@ public bool Rollback() #region Read Operation - public IBsonDataReader Query(string collection, Query query) + public async Task QueryAsync(string collection, Query query, CancellationToken cancellationToken = default) { - bool opened = OpenDatabase(); + cancellationToken.ThrowIfCancellationRequested(); - var reader = _engine.Query(collection, query); + var opened = OpenDatabase(); - return new SharedDataReader(reader, () => + try + { + var reader = await _engine.QueryAsync(collection, query, cancellationToken).ConfigureAwait(false); + + return new SharedDataReader(reader, () => + { + if (opened) + { + CloseDatabase(); + } + }); + } + catch { if (opened) { CloseDatabase(); } - }); + + throw; + } } - public BsonValue Pragma(string name) + public Task PragmaAsync(string name, CancellationToken cancellationToken = default) { - return QueryDatabase(() => _engine.Pragma(name)); + return this.QueryDatabaseAsync(() => _engine.PragmaAsync(name, cancellationToken), cancellationToken); } - public bool Pragma(string name, BsonValue value) + public Task PragmaAsync(string name, BsonValue value, CancellationToken cancellationToken = default) { - return QueryDatabase(() => _engine.Pragma(name, value)); + return this.QueryDatabaseAsync(() => _engine.PragmaAsync(name, value, cancellationToken), cancellationToken); } #endregion #region Write Operations - public int Checkpoint() + public Task CheckpointAsync(CancellationToken cancellationToken = default) { - return QueryDatabase(() => _engine.Checkpoint()); + return this.QueryDatabaseAsync(() => _engine.CheckpointAsync(cancellationToken), cancellationToken); } - public long Rebuild(RebuildOptions options) + public Task RebuildAsync(RebuildOptions options, CancellationToken cancellationToken = default) { - return QueryDatabase(() => _engine.Rebuild(options)); + return this.QueryDatabaseAsync(() => _engine.RebuildAsync(options, cancellationToken), cancellationToken); } - public int Insert(string collection, IEnumerable docs, BsonAutoId autoId) + public Task InsertAsync(string collection, IEnumerable docs, BsonAutoId autoId, CancellationToken cancellationToken = default) { - return QueryDatabase(() => _engine.Insert(collection, docs, autoId)); + return this.QueryDatabaseAsync(() => _engine.InsertAsync(collection, docs, autoId, cancellationToken), cancellationToken); } - public int Update(string collection, IEnumerable docs) + public Task UpdateAsync(string collection, IEnumerable docs, CancellationToken cancellationToken = default) { - return QueryDatabase(() => _engine.Update(collection, docs)); + return this.QueryDatabaseAsync(() => _engine.UpdateAsync(collection, docs, cancellationToken), cancellationToken); } - public int UpdateMany(string collection, BsonExpression extend, BsonExpression predicate) + public Task UpdateManyAsync(string collection, BsonExpression extend, BsonExpression predicate, CancellationToken cancellationToken = default) { - return QueryDatabase(() => _engine.UpdateMany(collection, extend, predicate)); + return this.QueryDatabaseAsync(() => _engine.UpdateManyAsync(collection, extend, predicate, cancellationToken), cancellationToken); } - public int Upsert(string collection, IEnumerable docs, BsonAutoId autoId) + public Task UpsertAsync(string collection, IEnumerable docs, BsonAutoId autoId, CancellationToken cancellationToken = default) { - return QueryDatabase(() => _engine.Upsert(collection, docs, autoId)); + return this.QueryDatabaseAsync(() => _engine.UpsertAsync(collection, docs, autoId, cancellationToken), cancellationToken); } - public int Delete(string collection, IEnumerable ids) + public Task DeleteAsync(string collection, IEnumerable ids, CancellationToken cancellationToken = default) { - return QueryDatabase(() => _engine.Delete(collection, ids)); + return this.QueryDatabaseAsync(() => _engine.DeleteAsync(collection, ids, cancellationToken), cancellationToken); } - public int DeleteMany(string collection, BsonExpression predicate) + public Task DeleteManyAsync(string collection, BsonExpression predicate, CancellationToken cancellationToken = default) { - return QueryDatabase(() => _engine.DeleteMany(collection, predicate)); + return this.QueryDatabaseAsync(() => _engine.DeleteManyAsync(collection, predicate, cancellationToken), cancellationToken); } - public bool DropCollection(string name) + public Task DropCollectionAsync(string name, CancellationToken cancellationToken = default) { - return QueryDatabase(() => _engine.DropCollection(name)); + return this.QueryDatabaseAsync(() => _engine.DropCollectionAsync(name, cancellationToken), cancellationToken); } - public bool RenameCollection(string name, string newName) + public Task RenameCollectionAsync(string name, string newName, CancellationToken cancellationToken = default) { - return QueryDatabase(() => _engine.RenameCollection(name, newName)); + return this.QueryDatabaseAsync(() => _engine.RenameCollectionAsync(name, newName, cancellationToken), cancellationToken); } - public bool DropIndex(string collection, string name) + public Task DropIndexAsync(string collection, string name, CancellationToken cancellationToken = default) { - return QueryDatabase(() => _engine.DropIndex(collection, name)); + return this.QueryDatabaseAsync(() => _engine.DropIndexAsync(collection, name, cancellationToken), cancellationToken); } - public bool EnsureIndex(string collection, string name, BsonExpression expression, bool unique) + public Task EnsureIndexAsync(string collection, string name, BsonExpression expression, bool unique, CancellationToken cancellationToken = default) { - return QueryDatabase(() => _engine.EnsureIndex(collection, name, expression, unique)); + return this.QueryDatabaseAsync(() => _engine.EnsureIndexAsync(collection, name, expression, unique, cancellationToken), cancellationToken); } #endregion @@ -261,12 +282,26 @@ protected virtual void Dispose(bool disposing) } } - private T QueryDatabase(Func Query) + public async ValueTask DisposeAsync() + { + if (_engine != null) + { + await _engine.DisposeAsync().ConfigureAwait(false); + _engine = null; + _mutex.ReleaseMutex(); + } + + GC.SuppressFinalize(this); + } + + private async Task QueryDatabaseAsync(Func> query, CancellationToken cancellationToken) { - bool opened = OpenDatabase(); + cancellationToken.ThrowIfCancellationRequested(); + + var opened = OpenDatabase(); try { - return Query(); + return await query().ConfigureAwait(false); } finally { @@ -277,4 +312,4 @@ private T QueryDatabase(Func Query) } } } -} \ No newline at end of file +} diff --git a/LiteDB/Client/SqlParser/Commands/Begin.cs b/LiteDB/Client/SqlParser/Commands/Begin.cs index 6003f0e73..8ae4072af 100644 --- a/LiteDB/Client/SqlParser/Commands/Begin.cs +++ b/LiteDB/Client/SqlParser/Commands/Begin.cs @@ -22,7 +22,7 @@ private BsonDataReader ParseBegin() _tokenizer.ReadToken().Expect(TokenType.EOF, TokenType.SemiColon); } - var transactionId = _engine.BeginTrans(); + var transactionId = _engine.BeginTransAsync().ConfigureAwait(false).GetAwaiter().GetResult(); return new BsonDataReader(transactionId); } diff --git a/LiteDB/Client/SqlParser/Commands/Checkpoint.cs b/LiteDB/Client/SqlParser/Commands/Checkpoint.cs index f6e7152c1..6dbba5c49 100644 --- a/LiteDB/Client/SqlParser/Commands/Checkpoint.cs +++ b/LiteDB/Client/SqlParser/Commands/Checkpoint.cs @@ -18,7 +18,7 @@ private BsonDataReader ParseCheckpoint() // read or ; _tokenizer.ReadToken().Expect(TokenType.EOF, TokenType.SemiColon); - var result = _engine.Checkpoint(); + var result = _engine.CheckpointAsync().ConfigureAwait(false).GetAwaiter().GetResult(); return new BsonDataReader(result); } diff --git a/LiteDB/Client/SqlParser/Commands/Commit.cs b/LiteDB/Client/SqlParser/Commands/Commit.cs index 7e34577f7..dd6f39747 100644 --- a/LiteDB/Client/SqlParser/Commands/Commit.cs +++ b/LiteDB/Client/SqlParser/Commands/Commit.cs @@ -22,7 +22,7 @@ private BsonDataReader ParseCommit() _tokenizer.ReadToken().Expect(TokenType.EOF, TokenType.SemiColon); } - var result = _engine.Commit(); + var result = _engine.CommitAsync().ConfigureAwait(false).GetAwaiter().GetResult(); return new BsonDataReader(result); } diff --git a/LiteDB/Client/SqlParser/Commands/Create.cs b/LiteDB/Client/SqlParser/Commands/Create.cs index f71564222..7debb3115 100644 --- a/LiteDB/Client/SqlParser/Commands/Create.cs +++ b/LiteDB/Client/SqlParser/Commands/Create.cs @@ -45,7 +45,7 @@ private BsonDataReader ParseCreate() // read EOF or ; _tokenizer.ReadToken().Expect(TokenType.EOF, TokenType.SemiColon); - var result = _engine.EnsureIndex(collection, name, expr, unique); + var result = _engine.EnsureIndexAsync(collection, name, expr, unique).ConfigureAwait(false).GetAwaiter().GetResult(); return new BsonDataReader(result); } diff --git a/LiteDB/Client/SqlParser/Commands/Delete.cs b/LiteDB/Client/SqlParser/Commands/Delete.cs index ec4318f2a..b18da89da 100644 --- a/LiteDB/Client/SqlParser/Commands/Delete.cs +++ b/LiteDB/Client/SqlParser/Commands/Delete.cs @@ -31,7 +31,7 @@ private BsonDataReader ParseDelete() _tokenizer.ReadToken(); - var result = _engine.DeleteMany(collection, where); + var result = _engine.DeleteManyAsync(collection, where).ConfigureAwait(false).GetAwaiter().GetResult(); return new BsonDataReader(result); } diff --git a/LiteDB/Client/SqlParser/Commands/Drop.cs b/LiteDB/Client/SqlParser/Commands/Drop.cs index 4797316e8..7409c0b3f 100644 --- a/LiteDB/Client/SqlParser/Commands/Drop.cs +++ b/LiteDB/Client/SqlParser/Commands/Drop.cs @@ -26,7 +26,7 @@ private BsonDataReader ParseDrop() _tokenizer.ReadToken().Expect(TokenType.EOF, TokenType.SemiColon); - var result = _engine.DropIndex(collection, name); + var result = _engine.DropIndexAsync(collection, name).ConfigureAwait(false).GetAwaiter().GetResult(); return new BsonDataReader(result); } @@ -36,7 +36,7 @@ private BsonDataReader ParseDrop() _tokenizer.ReadToken().Expect(TokenType.EOF, TokenType.SemiColon); - var result = _engine.DropCollection(collection); + var result = _engine.DropCollectionAsync(collection).ConfigureAwait(false).GetAwaiter().GetResult(); return new BsonDataReader(result); } diff --git a/LiteDB/Client/SqlParser/Commands/Insert.cs b/LiteDB/Client/SqlParser/Commands/Insert.cs index eb5de3796..eb6727232 100644 --- a/LiteDB/Client/SqlParser/Commands/Insert.cs +++ b/LiteDB/Client/SqlParser/Commands/Insert.cs @@ -26,7 +26,7 @@ private BsonDataReader ParseInsert() // will validate EOF or ; var docs = this.ParseListOfDocuments(); - var result = _engine.Insert(collection, docs, autoId); + var result = _engine.InsertAsync(collection, docs, autoId).ConfigureAwait(false).GetAwaiter().GetResult(); return new BsonDataReader(result); } diff --git a/LiteDB/Client/SqlParser/Commands/Pragma.cs b/LiteDB/Client/SqlParser/Commands/Pragma.cs index 6d26cb673..3685ad2db 100644 --- a/LiteDB/Client/SqlParser/Commands/Pragma.cs +++ b/LiteDB/Client/SqlParser/Commands/Pragma.cs @@ -25,7 +25,7 @@ private IBsonDataReader ParsePragma() { _tokenizer.ReadToken(); - var result = _engine.Pragma(name); + var result = _engine.PragmaAsync(name).ConfigureAwait(false).GetAwaiter().GetResult(); return new BsonDataReader(result); } @@ -41,7 +41,7 @@ private IBsonDataReader ParsePragma() // read last ; \ _tokenizer.ReadToken().Expect(TokenType.EOF, TokenType.SemiColon); - var result = _engine.Pragma(name, value); + var result = _engine.PragmaAsync(name, value).ConfigureAwait(false).GetAwaiter().GetResult(); return new BsonDataReader(result); } diff --git a/LiteDB/Client/SqlParser/Commands/Rebuild.cs b/LiteDB/Client/SqlParser/Commands/Rebuild.cs index 92f03fcb3..4f7bdaa7f 100644 --- a/LiteDB/Client/SqlParser/Commands/Rebuild.cs +++ b/LiteDB/Client/SqlParser/Commands/Rebuild.cs @@ -44,7 +44,7 @@ private BsonDataReader ParseRebuild() } } - var diff = _engine.Rebuild(options); + var diff = _engine.RebuildAsync(options).ConfigureAwait(false).GetAwaiter().GetResult(); return new BsonDataReader((int)diff); } diff --git a/LiteDB/Client/SqlParser/Commands/Rename.cs b/LiteDB/Client/SqlParser/Commands/Rename.cs index a5949c767..10f89cdc9 100644 --- a/LiteDB/Client/SqlParser/Commands/Rename.cs +++ b/LiteDB/Client/SqlParser/Commands/Rename.cs @@ -24,7 +24,7 @@ private BsonDataReader ParseRename() _tokenizer.ReadToken().Expect(TokenType.EOF, TokenType.SemiColon); - var result = _engine.RenameCollection(collection, newName); + var result = _engine.RenameCollectionAsync(collection, newName).ConfigureAwait(false).GetAwaiter().GetResult(); return new BsonDataReader(result); } diff --git a/LiteDB/Client/SqlParser/Commands/Rollback.cs b/LiteDB/Client/SqlParser/Commands/Rollback.cs index 3255c9a98..c764c4a24 100644 --- a/LiteDB/Client/SqlParser/Commands/Rollback.cs +++ b/LiteDB/Client/SqlParser/Commands/Rollback.cs @@ -22,7 +22,7 @@ private BsonDataReader ParseRollback() _tokenizer.ReadToken().Expect(TokenType.EOF, TokenType.SemiColon); } - var result = _engine.Rollback(); + var result = _engine.RollbackAsync().ConfigureAwait(false).GetAwaiter().GetResult(); return new BsonDataReader(result); } diff --git a/LiteDB/Client/SqlParser/Commands/Select.cs b/LiteDB/Client/SqlParser/Commands/Select.cs index 5e09a1d06..d3a0f480e 100644 --- a/LiteDB/Client/SqlParser/Commands/Select.cs +++ b/LiteDB/Client/SqlParser/Commands/Select.cs @@ -188,7 +188,7 @@ private IBsonDataReader ParseSelect() // read eof/; _tokenizer.ReadToken().Expect(TokenType.EOF, TokenType.SemiColon); - return _engine.Query(collection, query); + return _engine.QueryAsync(collection, query).ConfigureAwait(false).GetAwaiter().GetResult(); } /// diff --git a/LiteDB/Client/SqlParser/Commands/Update.cs b/LiteDB/Client/SqlParser/Commands/Update.cs index 4104b0f9a..f535d9cd9 100644 --- a/LiteDB/Client/SqlParser/Commands/Update.cs +++ b/LiteDB/Client/SqlParser/Commands/Update.cs @@ -39,7 +39,7 @@ private BsonDataReader ParseUpdate() // read eof _tokenizer.ReadToken().Expect(TokenType.EOF, TokenType.SemiColon); - var result = _engine.UpdateMany(collection, transform, where); + var result = _engine.UpdateManyAsync(collection, transform, where).ConfigureAwait(false).GetAwaiter().GetResult(); return new BsonDataReader(result); } diff --git a/LiteDB/Client/SqlParser/SqlParser.cs b/LiteDB/Client/SqlParser/SqlParser.cs index 53515928e..2f0c35388 100644 --- a/LiteDB/Client/SqlParser/SqlParser.cs +++ b/LiteDB/Client/SqlParser/SqlParser.cs @@ -22,7 +22,7 @@ public SqlParser(ILiteEngine engine, Tokenizer tokenizer, BsonDocument parameter _engine = engine; _tokenizer = tokenizer; _parameters = parameters ?? new BsonDocument(); - _collation = new Lazy(() => new Collation(_engine.Pragma(Pragmas.COLLATION))); + _collation = new Lazy(() => new Collation(_engine.PragmaAsync(Pragmas.COLLATION).ConfigureAwait(false).GetAwaiter().GetResult())); } public IBsonDataReader Execute() diff --git a/LiteDB/Client/Storage/ILiteStorage.cs b/LiteDB/Client/Storage/ILiteStorage.cs index 02a62806b..57556a8a5 100644 --- a/LiteDB/Client/Storage/ILiteStorage.cs +++ b/LiteDB/Client/Storage/ILiteStorage.cs @@ -2,6 +2,8 @@ using System.Collections.Generic; using System.IO; using System.Linq.Expressions; +using System.Threading; +using System.Threading.Tasks; namespace LiteDB { @@ -10,76 +12,76 @@ public interface ILiteStorage /// /// Find a file inside datafile and returns LiteFileInfo instance. Returns null if not found /// - LiteFileInfo FindById(TFileId id); + Task> FindByIdAsync(TFileId id, CancellationToken cancellationToken = default); /// /// Find all files that match with predicate expression. /// - IEnumerable> Find(BsonExpression predicate); + IAsyncEnumerable> FindAsync(BsonExpression predicate, CancellationToken cancellationToken = default); /// /// Find all files that match with predicate expression. /// - IEnumerable> Find(string predicate, BsonDocument parameters); + IAsyncEnumerable> FindAsync(string predicate, BsonDocument parameters, CancellationToken cancellationToken = default); /// /// Find all files that match with predicate expression. /// - IEnumerable> Find(string predicate, params BsonValue[] args); + IAsyncEnumerable> FindAsync(string predicate, CancellationToken cancellationToken = default, params BsonValue[] args); /// /// Find all files that match with predicate expression. /// - IEnumerable> Find(Expression, bool>> predicate); + IAsyncEnumerable> FindAsync(Expression, bool>> predicate, CancellationToken cancellationToken = default); /// /// Find all files inside file collections /// - IEnumerable> FindAll(); + IAsyncEnumerable> FindAllAsync(CancellationToken cancellationToken = default); /// /// Returns if a file exisits in database /// - bool Exists(TFileId id); + Task ExistsAsync(TFileId id, CancellationToken cancellationToken = default); /// /// Open/Create new file storage and returns linked Stream to write operations. /// - LiteFileStream OpenWrite(TFileId id, string filename, BsonDocument metadata = null); + Task> OpenWriteAsync(TFileId id, string filename, BsonDocument metadata = null, CancellationToken cancellationToken = default); /// /// Upload a file based on stream data /// - LiteFileInfo Upload(TFileId id, string filename, Stream stream, BsonDocument metadata = null); + Task> UploadAsync(TFileId id, string filename, Stream stream, BsonDocument metadata = null, CancellationToken cancellationToken = default); /// /// Upload a file based on file system data /// - LiteFileInfo Upload(TFileId id, string filename); + Task> UploadAsync(TFileId id, string filename, CancellationToken cancellationToken = default); /// /// Update metadata on a file. File must exist. /// - bool SetMetadata(TFileId id, BsonDocument metadata); + Task SetMetadataAsync(TFileId id, BsonDocument metadata, CancellationToken cancellationToken = default); /// /// Load data inside storage and returns as Stream /// - LiteFileStream OpenRead(TFileId id); + Task> OpenReadAsync(TFileId id, CancellationToken cancellationToken = default); /// /// Copy all file content to a steam /// - LiteFileInfo Download(TFileId id, Stream stream); + Task> DownloadAsync(TFileId id, Stream stream, CancellationToken cancellationToken = default); /// /// Copy all file content to a file /// - LiteFileInfo Download(TFileId id, string filename, bool overwritten); + Task> DownloadAsync(TFileId id, string filename, bool overwritten, CancellationToken cancellationToken = default); /// /// Delete a file inside datafile and all metadata related /// - bool Delete(TFileId id); + Task DeleteAsync(TFileId id, CancellationToken cancellationToken = default); } -} \ No newline at end of file +} diff --git a/LiteDB/Client/Storage/LiteFileInfo.cs b/LiteDB/Client/Storage/LiteFileInfo.cs index ab4a6523c..8071e4111 100644 --- a/LiteDB/Client/Storage/LiteFileInfo.cs +++ b/LiteDB/Client/Storage/LiteFileInfo.cs @@ -2,6 +2,8 @@ using System.Collections.Generic; using System.IO; using System.Text.RegularExpressions; +using System.Threading; +using System.Threading.Tasks; using static LiteDB.Constants; namespace LiteDB @@ -46,46 +48,57 @@ internal void SetReference(BsonValue fileId, ILiteCollection /// Open file stream to read from database /// - public LiteFileStream OpenRead() + public Task> OpenReadAsync(CancellationToken cancellationToken = default) { - return new LiteFileStream(_files, _chunks, this, _fileId, FileAccess.Read); + return Task.FromResult(new LiteFileStream(_files, _chunks, this, _fileId, FileAccess.Read)); } /// /// Open file stream to write to database /// - public LiteFileStream OpenWrite() + public async Task> OpenWriteAsync(CancellationToken cancellationToken = default) { + if (this.Length > 0) + { + var deleted = await _chunks.DeleteManyAsync("_id BETWEEN { f: @0, n: 0 } AND { f: @0, n: @1 }", cancellationToken, _fileId, int.MaxValue).ConfigureAwait(false); + + ENSURE(deleted == this.Chunks); + + this.Length = 0; + this.Chunks = 0; + } + return new LiteFileStream(_files, _chunks, this, _fileId, FileAccess.Write); } /// /// Copy file content to another stream /// - public void CopyTo(Stream stream) + public async Task CopyToAsync(Stream stream, CancellationToken cancellationToken = default) { if (stream == null) throw new ArgumentNullException(nameof(stream)); - using (var reader = this.OpenRead()) - { - reader.CopyTo(stream); - } + await using var reader = await this.OpenReadAsync(cancellationToken).ConfigureAwait(false); + + cancellationToken.ThrowIfCancellationRequested(); + await reader.CopyToAsync(stream).ConfigureAwait(false); + cancellationToken.ThrowIfCancellationRequested(); } /// /// Save file content to a external file /// - public void SaveAs(string filename, bool overwritten = true) + public async Task SaveAsAsync(string filename, bool overwritten = true, CancellationToken cancellationToken = default) { if (filename.IsNullOrWhiteSpace()) throw new ArgumentNullException(nameof(filename)); - using (var file = File.Open(filename, overwritten ? FileMode.Create : FileMode.CreateNew)) - { - using (var stream = this.OpenRead()) - { - stream.CopyTo(file); - } - } + using var file = new FileStream(filename, overwritten ? FileMode.Create : FileMode.CreateNew, FileAccess.Write, FileShare.None, bufferSize: 81920, FileOptions.Asynchronous | FileOptions.SequentialScan); + + await using var stream = await this.OpenReadAsync(cancellationToken).ConfigureAwait(false); + + cancellationToken.ThrowIfCancellationRequested(); + await stream.CopyToAsync(file).ConfigureAwait(false); + cancellationToken.ThrowIfCancellationRequested(); } } } \ No newline at end of file diff --git a/LiteDB/Client/Storage/LiteFileStream.Read.cs b/LiteDB/Client/Storage/LiteFileStream.Read.cs index 0b6236736..32c9a49dd 100644 --- a/LiteDB/Client/Storage/LiteFileStream.Read.cs +++ b/LiteDB/Client/Storage/LiteFileStream.Read.cs @@ -1,15 +1,21 @@ -using System; +using System; using System.Collections.Generic; using System.IO; -using System.Linq; -using static LiteDB.Constants; +using System.Threading; +using System.Threading.Tasks; namespace LiteDB { public partial class LiteFileStream : Stream { - private Dictionary _chunkLengths = new Dictionary(); + private readonly Dictionary _chunkLengths = new Dictionary(); + public override int Read(byte[] buffer, int offset, int count) + { + return this.ReadAsync(buffer, offset, count, CancellationToken.None).GetAwaiter().GetResult(); + } + + public override async Task ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) { if (_mode != FileAccess.Read) throw new NotSupportedException(); if (_streamPosition == Length) @@ -17,6 +23,11 @@ public override int Read(byte[] buffer, int offset, int count) return 0; } + if (_currentChunkData == null) + { + _currentChunkData = await this.GetChunkDataAsync(_currentChunkIndex, cancellationToken).ConfigureAwait(false); + } + var bytesLeft = count; while (_currentChunkData != null && bytesLeft > 0) @@ -33,46 +44,32 @@ public override int Read(byte[] buffer, int offset, int count) if (_positionInChunk >= _currentChunkData.Length) { _positionInChunk = 0; - - _currentChunkData = this.GetChunkData(++_currentChunkIndex); + _currentChunkData = await this.GetChunkDataAsync(++_currentChunkIndex, cancellationToken).ConfigureAwait(false); } } return count - bytesLeft; } - private byte[] GetChunkData(int index) - { - // check if there is no more chunks in this file - var chunk = _chunks - .FindOne("_id = { f: @0, n: @1 }", _fileId, index); - - // if chunk is null there is no more chunks - byte[] result = chunk?["data"].AsBinary; - if (result != null) - { - _chunkLengths[index] = result.Length; - } - return result; - } - private void SetReadStreamPosition(long newPosition) { if (newPosition < 0) { throw new ArgumentOutOfRangeException(); } + if (newPosition >= Length) { _streamPosition = Length; return; } + _streamPosition = newPosition; - // calculate new chunk position long seekStreamPosition = 0; int loadedChunk = _currentChunkIndex; int newChunkIndex = 0; + while (seekStreamPosition <= _streamPosition) { if (_chunkLengths.TryGetValue(newChunkIndex, out long length)) @@ -82,20 +79,56 @@ private void SetReadStreamPosition(long newPosition) else { loadedChunk = newChunkIndex; - _currentChunkData = GetChunkData(newChunkIndex); + _currentChunkData = GetChunkDataSync(newChunkIndex); + if (_currentChunkData == null) + { + break; + } + seekStreamPosition += _currentChunkData.Length; } + newChunkIndex++; } - + newChunkIndex--; - seekStreamPosition -= _chunkLengths[newChunkIndex]; + + if (newChunkIndex >= 0 && _chunkLengths.TryGetValue(newChunkIndex, out long chunkLength)) + { + seekStreamPosition -= chunkLength; + } + _positionInChunk = (int)(_streamPosition - seekStreamPosition); - _currentChunkIndex = newChunkIndex; + _currentChunkIndex = Math.Max(0, newChunkIndex); + if (loadedChunk != _currentChunkIndex) { - _currentChunkData = GetChunkData(_currentChunkIndex); + _currentChunkData = GetChunkDataSync(_currentChunkIndex); + } + } + + private async Task GetChunkDataAsync(int index, CancellationToken cancellationToken) + { + var chunkId = new BsonDocument + { + ["f"] = _fileId, + ["n"] = index + }; + + var chunk = await _chunks.FindByIdAsync(chunkId, cancellationToken).ConfigureAwait(false); + + byte[] result = chunk?["data"].AsBinary; + if (result != null) + { + _chunkLengths[index] = result.Length; } + + return result; + } + + private byte[] GetChunkDataSync(int index) + { + return this.GetChunkDataAsync(index, CancellationToken.None).GetAwaiter().GetResult(); } } -} \ No newline at end of file +} diff --git a/LiteDB/Client/Storage/LiteFileStream.Write.cs b/LiteDB/Client/Storage/LiteFileStream.Write.cs index a24c05c2e..ccfe74189 100644 --- a/LiteDB/Client/Storage/LiteFileStream.Write.cs +++ b/LiteDB/Client/Storage/LiteFileStream.Write.cs @@ -1,7 +1,7 @@ -using System; +using System; using System.IO; -using System.Linq; -using static LiteDB.Constants; +using System.Threading; +using System.Threading.Tasks; namespace LiteDB { @@ -9,69 +9,88 @@ public partial class LiteFileStream : Stream { public override void Write(byte[] buffer, int offset, int count) { + this.WriteAsync(buffer, offset, count, CancellationToken.None).GetAwaiter().GetResult(); + } + + public override async Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) + { + if (_mode != FileAccess.Write) throw new NotSupportedException(); + _streamPosition += count; - _buffer.Write(buffer, offset, count); + await _buffer.WriteAsync(buffer, offset, count, cancellationToken).ConfigureAwait(false); if (_buffer.Length >= MAX_CHUNK_SIZE) { - this.WriteChunks(false); + await this.WriteChunksAsync(flush: false, cancellationToken).ConfigureAwait(false); } } public override void Flush() { - // write last unsaved chunks - this.WriteChunks(true); + this.FlushAsync(CancellationToken.None).GetAwaiter().GetResult(); } - /// - /// Consume all _buffer bytes and write to chunk collection - /// - private void WriteChunks(bool flush) + public override Task FlushAsync(CancellationToken cancellationToken) { - var buffer = new byte[MAX_CHUNK_SIZE]; - var read = 0; + return this.WriteChunksAsync(flush: true, cancellationToken); + } + + private async Task WriteChunksAsync(bool flush, CancellationToken cancellationToken) + { + if (_buffer == null || _buffer.Length == 0) + { + if (flush && _buffer != null) + { + _buffer.SetLength(0); + _buffer.Position = 0; + } + + if (flush) + { + _file.UploadDate = DateTime.Now; + _file.Length = _streamPosition; + + await _files.UpsertAsync(_file, cancellationToken).ConfigureAwait(false); + } + + return; + } + + var chunkBuffer = new byte[MAX_CHUNK_SIZE]; _buffer.Seek(0, SeekOrigin.Begin); - while ((read = _buffer.Read(buffer, 0, MAX_CHUNK_SIZE)) > 0) + int read; + while ((read = await _buffer.ReadAsync(chunkBuffer, 0, MAX_CHUNK_SIZE, cancellationToken).ConfigureAwait(false)) > 0) { var chunk = new BsonDocument { ["_id"] = new BsonDocument { ["f"] = _fileId, - ["n"] = _file.Chunks++ // zero-based index + ["n"] = _file.Chunks++ } }; - // get chunk byte array part - if (read != MAX_CHUNK_SIZE) - { - var bytes = new byte[read]; - Buffer.BlockCopy(buffer, 0, bytes, 0, read); - chunk["data"] = bytes; - } - else - { - chunk["data"] = buffer; - } + var bytes = new byte[read]; + Buffer.BlockCopy(chunkBuffer, 0, bytes, 0, read); + + chunk["data"] = bytes; - // insert chunk part - _chunks.Insert(chunk); + await _chunks.InsertAsync(chunk, cancellationToken).ConfigureAwait(false); } - // if stream was closed/flush, update file too if (flush) { _file.UploadDate = DateTime.Now; _file.Length = _streamPosition; - _files.Upsert(_file); + await _files.UpsertAsync(_file, cancellationToken).ConfigureAwait(false); } - _buffer = new MemoryStream(); + _buffer.SetLength(0); + _buffer.Position = 0; } } -} \ No newline at end of file +} diff --git a/LiteDB/Client/Storage/LiteFileStream.cs b/LiteDB/Client/Storage/LiteFileStream.cs index 1b476f645..f4b6831fd 100644 --- a/LiteDB/Client/Storage/LiteFileStream.cs +++ b/LiteDB/Client/Storage/LiteFileStream.cs @@ -1,11 +1,12 @@ -using System; +using System; using System.IO; -using System.Linq; +using System.Threading; +using System.Threading.Tasks; using static LiteDB.Constants; namespace LiteDB { - public partial class LiteFileStream : Stream + public partial class LiteFileStream : Stream, IAsyncDisposable { /// /// Number of bytes on each chunk document to store @@ -32,46 +33,39 @@ internal LiteFileStream(ILiteCollection> files, ILiteColle _fileId = fileId; _mode = mode; - if (mode == FileAccess.Read) - { - // initialize first data block - _currentChunkData = this.GetChunkData(_currentChunkIndex); - } - else if(mode == FileAccess.Write) + if (mode == FileAccess.Write) { _buffer = new MemoryStream(MAX_CHUNK_SIZE); - - if (_file.Length > 0) - { - // delete all chunks before re-write - var count = _chunks.DeleteMany("_id BETWEEN { f: @0, n: 0 } AND { f: @0, n: 99999999 }", _fileId); - - ENSURE(count == _file.Chunks); - - // clear file content length+chunks - _file.Length = 0; - _file.Chunks = 0; - } } } /// /// Get file information /// - public LiteFileInfo FileInfo { get { return _file; } } + public LiteFileInfo FileInfo => _file; - public override long Length { get { return _file.Length; } } + public override long Length => _file.Length; - public override bool CanRead { get { return _mode == FileAccess.Read; } } + public override bool CanRead => _mode == FileAccess.Read; - public override bool CanWrite { get { return _mode == FileAccess.Write; } } + public override bool CanWrite => _mode == FileAccess.Write; - public override bool CanSeek { get { return _mode == FileAccess.Read; } } + public override bool CanSeek => _mode == FileAccess.Read; public override long Position { - get { return _streamPosition; } - set { if (_mode == FileAccess.Read) { this.SetReadStreamPosition(value); } else { throw new NotSupportedException(); } } + get => _streamPosition; + set + { + if (_mode == FileAccess.Read) + { + this.SetReadStreamPosition(value); + } + else + { + throw new NotSupportedException(); + } + } } public override long Seek(long offset, SeekOrigin origin) @@ -93,6 +87,7 @@ public override long Seek(long offset, SeekOrigin origin) this.SetReadStreamPosition(Length + offset); break; } + return _streamPosition; } @@ -102,18 +97,77 @@ public override long Seek(long offset, SeekOrigin origin) protected override void Dispose(bool disposing) { + if (_disposed) + { + base.Dispose(disposing); + return; + } + + if (disposing && this.CanWrite) + { + this.FlushAsync(CancellationToken.None).GetAwaiter().GetResult(); +#if NETSTANDARD2_0 + _buffer?.Dispose(); +#else + if (_buffer != null) + { + _buffer.Dispose(); + } +#endif + } + + _disposed = true; + base.Dispose(disposing); + } - if (_disposed) return; +#if !NETSTANDARD2_0 + public override async ValueTask DisposeAsync() + { + if (_disposed) + { + await base.DisposeAsync().ConfigureAwait(false); + return; + } - if (disposing && this.CanWrite) + if (this.CanWrite) { - this.Flush(); + await this.FlushAsync(CancellationToken.None).ConfigureAwait(false); +#if NETSTANDARD2_0 _buffer?.Dispose(); +#else + if (_buffer != null) + { + await _buffer.DisposeAsync().ConfigureAwait(false); + } +#endif } _disposed = true; + + await base.DisposeAsync().ConfigureAwait(false); + } +#endif + +#if NETSTANDARD2_0 + public async ValueTask DisposeAsync() + { + if (_disposed) + { + return; + } + + if (this.CanWrite) + { + await this.FlushAsync(CancellationToken.None).ConfigureAwait(false); + _buffer?.Dispose(); + } + + _disposed = true; + + base.Dispose(false); } +#endif #endregion @@ -126,4 +180,4 @@ public override void SetLength(long value) #endregion } -} \ No newline at end of file +} diff --git a/LiteDB/Client/Storage/LiteStorage.cs b/LiteDB/Client/Storage/LiteStorage.cs index 237543ad7..17eb9e3da 100644 --- a/LiteDB/Client/Storage/LiteStorage.cs +++ b/LiteDB/Client/Storage/LiteStorage.cs @@ -3,6 +3,9 @@ using System.IO; using System.Linq; using System.Linq.Expressions; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; using static LiteDB.Constants; namespace LiteDB @@ -28,13 +31,13 @@ public LiteStorage(ILiteDatabase db, string filesCollection, string chunksCollec /// /// Find a file inside datafile and returns LiteFileInfo instance. Returns null if not found /// - public LiteFileInfo FindById(TFileId id) + public async Task> FindByIdAsync(TFileId id, CancellationToken cancellationToken = default) { if (id == null) throw new ArgumentNullException(nameof(id)); var fileId = _db.Mapper.Serialize(typeof(TFileId), id); - var file = _files.FindById(fileId); + var file = await _files.FindByIdAsync(fileId, cancellationToken).ConfigureAwait(false); if (file == null) return null; @@ -46,7 +49,7 @@ public LiteFileInfo FindById(TFileId id) /// /// Find all files that match with predicate expression. /// - public IEnumerable> Find(BsonExpression predicate) + public async IAsyncEnumerable> FindAsync(BsonExpression predicate, [EnumeratorCancellation] CancellationToken cancellationToken = default) { var query = _files.Query(); @@ -55,7 +58,7 @@ public IEnumerable> Find(BsonExpression predicate) query = query.Where(predicate); } - foreach (var file in query.ToEnumerable()) + await foreach (var file in query.ToAsyncEnumerable(cancellationToken).ConfigureAwait(false)) { var fileId = _db.Mapper.Serialize(typeof(TFileId), file.Id); @@ -68,33 +71,33 @@ public IEnumerable> Find(BsonExpression predicate) /// /// Find all files that match with predicate expression. /// - public IEnumerable> Find(string predicate, BsonDocument parameters) => this.Find(BsonExpression.Create(predicate, parameters)); + public IAsyncEnumerable> FindAsync(string predicate, BsonDocument parameters, CancellationToken cancellationToken = default) => this.FindAsync(BsonExpression.Create(predicate, parameters), cancellationToken); /// /// Find all files that match with predicate expression. /// - public IEnumerable> Find(string predicate, params BsonValue[] args) => this.Find(BsonExpression.Create(predicate, args)); + public IAsyncEnumerable> FindAsync(string predicate, CancellationToken cancellationToken = default, params BsonValue[] args) => this.FindAsync(BsonExpression.Create(predicate, args), cancellationToken); /// /// Find all files that match with predicate expression. /// - public IEnumerable> Find(Expression, bool>> predicate) => this.Find(_db.Mapper.GetExpression(predicate)); + public IAsyncEnumerable> FindAsync(Expression, bool>> predicate, CancellationToken cancellationToken = default) => this.FindAsync(_db.Mapper.GetExpression(predicate), cancellationToken); /// /// Find all files inside file collections /// - public IEnumerable> FindAll() => this.Find((BsonExpression)null); + public IAsyncEnumerable> FindAllAsync(CancellationToken cancellationToken = default) => this.FindAsync((BsonExpression)null, cancellationToken); /// /// Returns if a file exisits in database /// - public bool Exists(TFileId id) + public Task ExistsAsync(TFileId id, CancellationToken cancellationToken = default) { if (id == null) throw new ArgumentNullException(nameof(id)); var fileId = _db.Mapper.Serialize(typeof(TFileId), id); - return _files.Exists("_id = @0", fileId); + return _files.ExistsAsync("_id = @0", cancellationToken, fileId); } #endregion @@ -104,13 +107,13 @@ public bool Exists(TFileId id) /// /// Open/Create new file storage and returns linked Stream to write operations. /// - public LiteFileStream OpenWrite(TFileId id, string filename, BsonDocument metadata = null) + public async Task> OpenWriteAsync(TFileId id, string filename, BsonDocument metadata = null, CancellationToken cancellationToken = default) { // get _id as BsonValue var fileId = _db.Mapper.Serialize(typeof(TFileId), id); // checks if file exists - var file = this.FindById(id); + var file = await this.FindByIdAsync(id, cancellationToken).ConfigureAwait(false); if (file == null) { @@ -133,47 +136,49 @@ public LiteFileStream OpenWrite(TFileId id, string filename, BsonDocume file.Metadata = metadata ?? file.Metadata; } - return file.OpenWrite(); + return await file.OpenWriteAsync(cancellationToken).ConfigureAwait(false); } /// /// Upload a file based on stream data /// - public LiteFileInfo Upload(TFileId id, string filename, Stream stream, BsonDocument metadata = null) + public async Task> UploadAsync(TFileId id, string filename, Stream stream, BsonDocument metadata = null, CancellationToken cancellationToken = default) { - using (var writer = this.OpenWrite(id, filename, metadata)) - { - stream.CopyTo(writer); + if (stream == null) throw new ArgumentNullException(nameof(stream)); - return writer.FileInfo; - } + await using var writer = await this.OpenWriteAsync(id, filename, metadata, cancellationToken).ConfigureAwait(false); + + cancellationToken.ThrowIfCancellationRequested(); + await stream.CopyToAsync(writer).ConfigureAwait(false); + cancellationToken.ThrowIfCancellationRequested(); + + return writer.FileInfo; } /// /// Upload a file based on file system data /// - public LiteFileInfo Upload(TFileId id, string filename) + public async Task> UploadAsync(TFileId id, string filename, CancellationToken cancellationToken = default) { if (filename.IsNullOrWhiteSpace()) throw new ArgumentNullException(nameof(filename)); - using (var stream = File.OpenRead(filename)) - { - return this.Upload(id, Path.GetFileName(filename), stream); - } + using var stream = new FileStream(filename, FileMode.Open, FileAccess.Read, FileShare.Read, bufferSize: 81920, FileOptions.Asynchronous | FileOptions.SequentialScan); + + return await this.UploadAsync(id, Path.GetFileName(filename), stream, null, cancellationToken).ConfigureAwait(false); } /// /// Update metadata on a file. File must exist. /// - public bool SetMetadata(TFileId id, BsonDocument metadata) + public async Task SetMetadataAsync(TFileId id, BsonDocument metadata, CancellationToken cancellationToken = default) { - var file = this.FindById(id); + var file = await this.FindByIdAsync(id, cancellationToken).ConfigureAwait(false); if (file == null) return false; file.Metadata = metadata ?? new BsonDocument(); - _files.Update(file); + await _files.UpdateAsync(file, cancellationToken).ConfigureAwait(false); return true; } @@ -185,23 +190,27 @@ public bool SetMetadata(TFileId id, BsonDocument metadata) /// /// Load data inside storage and returns as Stream /// - public LiteFileStream OpenRead(TFileId id) + public async Task> OpenReadAsync(TFileId id, CancellationToken cancellationToken = default) { - var file = this.FindById(id); + var file = await this.FindByIdAsync(id, cancellationToken).ConfigureAwait(false); if (file == null) throw LiteException.FileNotFound(id.ToString()); - return file.OpenRead(); + return await file.OpenReadAsync(cancellationToken).ConfigureAwait(false); } /// /// Copy all file content to a steam /// - public LiteFileInfo Download(TFileId id, Stream stream) + public async Task> DownloadAsync(TFileId id, Stream stream, CancellationToken cancellationToken = default) { - var file = this.FindById(id) ?? throw LiteException.FileNotFound(id.ToString()); + if (stream == null) throw new ArgumentNullException(nameof(stream)); + + var file = await this.FindByIdAsync(id, cancellationToken).ConfigureAwait(false) ?? throw LiteException.FileNotFound(id.ToString()); - file.CopyTo(stream); + cancellationToken.ThrowIfCancellationRequested(); + await file.CopyToAsync(stream).ConfigureAwait(false); + cancellationToken.ThrowIfCancellationRequested(); return file; } @@ -209,11 +218,13 @@ public LiteFileInfo Download(TFileId id, Stream stream) /// /// Copy all file content to a file /// - public LiteFileInfo Download(TFileId id, string filename, bool overwritten) + public async Task> DownloadAsync(TFileId id, string filename, bool overwritten, CancellationToken cancellationToken = default) { - var file = this.FindById(id) ?? throw LiteException.FileNotFound(id.ToString()); + if (filename.IsNullOrWhiteSpace()) throw new ArgumentNullException(nameof(filename)); + + var file = await this.FindByIdAsync(id, cancellationToken).ConfigureAwait(false) ?? throw LiteException.FileNotFound(id.ToString()); - file.SaveAs(filename, overwritten); + await file.SaveAsAsync(filename, overwritten, cancellationToken).ConfigureAwait(false); return file; } @@ -225,7 +236,7 @@ public LiteFileInfo Download(TFileId id, string filename, bool overwrit /// /// Delete a file inside datafile and all metadata related /// - public bool Delete(TFileId id) + public async Task DeleteAsync(TFileId id, CancellationToken cancellationToken = default) { if (id == null) throw new ArgumentNullException(nameof(id)); @@ -233,12 +244,12 @@ public bool Delete(TFileId id) var fileId = _db.Mapper.Serialize(typeof(TFileId), id); // remove file reference - var deleted = _files.Delete(fileId); + var deleted = await _files.DeleteAsync(fileId, cancellationToken).ConfigureAwait(false); if (deleted) { // delete all chunks - _chunks.DeleteMany("_id BETWEEN { f: @0, n: 0} AND {f: @0, n: @1 }", fileId, int.MaxValue); + await _chunks.DeleteManyAsync("_id BETWEEN { f: @0, n: 0} AND {f: @0, n: @1 }", cancellationToken, fileId, int.MaxValue).ConfigureAwait(false); } return deleted; diff --git a/LiteDB/Document/DataReader/BsonDataReader.cs b/LiteDB/Document/DataReader/BsonDataReader.cs index 33131362a..67f37eb0a 100644 --- a/LiteDB/Document/DataReader/BsonDataReader.cs +++ b/LiteDB/Document/DataReader/BsonDataReader.cs @@ -3,6 +3,8 @@ using System.Collections; using System.Collections.Generic; using System.Linq; +using System.Threading; +using System.Threading.Tasks; using static LiteDB.Constants; namespace LiteDB @@ -86,36 +88,12 @@ internal BsonDataReader(IEnumerable values, string collection, Engine /// public bool Read() { - if (!_hasValues) return false; + return this.ReadCore(default); + } - if (_isFirst) - { - _isFirst = false; - return true; - } - else - { - if (_source != null) - { - _state.Validate(); // checks if engine still open - - try - { - var read = _source.MoveNext(); // can throw any error here - _current = _state.ReadTransform(_collection, _source.Current); - return read; - } - catch (Exception ex) - { - _state.Handle(ex); - throw ex; - } - } - else - { - return false; - } - } + public ValueTask ReadAsync(CancellationToken cancellationToken = default) + { + return new ValueTask(this.ReadCore(cancellationToken)); } public BsonValue this[string field] @@ -132,6 +110,12 @@ public void Dispose() GC.SuppressFinalize(this); } + public ValueTask DisposeAsync() + { + this.Dispose(); + return default; + } + ~BsonDataReader() { this.Dispose(false); @@ -148,5 +132,37 @@ protected virtual void Dispose(bool disposing) _source?.Dispose(); } } + + private bool ReadCore(CancellationToken cancellationToken) + { + if (!_hasValues) return false; + + cancellationToken.ThrowIfCancellationRequested(); + + if (_isFirst) + { + _isFirst = false; + return true; + } + + if (_source != null) + { + _state.Validate(); + + try + { + var read = _source.MoveNext(); + _current = _state.ReadTransform(_collection, _source.Current); + return read; + } + catch (Exception ex) + { + _state.Handle(ex); + throw; + } + } + + return false; + } } } \ No newline at end of file diff --git a/LiteDB/Document/DataReader/BsonDataReaderExtensions.cs b/LiteDB/Document/DataReader/BsonDataReaderExtensions.cs index f7ba9ab72..8a118fa7a 100644 --- a/LiteDB/Document/DataReader/BsonDataReaderExtensions.cs +++ b/LiteDB/Document/DataReader/BsonDataReaderExtensions.cs @@ -1,19 +1,122 @@ -using LiteDB.Engine; using System; -using System.Collections; using System.Collections.Generic; using System.Linq; -using static LiteDB.Constants; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; namespace LiteDB { /// - /// Implement some Enumerable methods to IBsonDataReader + /// Asynchronous helpers for consuming instances. /// public static class BsonDataReaderExtensions { + public static async IAsyncEnumerable ToAsyncEnumerable( + this IBsonDataReader reader, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + if (reader == null) throw new ArgumentNullException(nameof(reader)); + + try + { + while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) + { + yield return reader.Current; + } + } + finally + { + await reader.DisposeAsync().ConfigureAwait(false); + } + } + + public static async Task ToArrayAsync(this IBsonDataReader reader, CancellationToken cancellationToken = default) + { + var list = await reader.ToListAsync(cancellationToken).ConfigureAwait(false); + return list.ToArray(); + } + + public static async Task> ToListAsync(this IBsonDataReader reader, CancellationToken cancellationToken = default) + { + var list = new List(); + + await foreach (var item in reader.ToAsyncEnumerable(cancellationToken).ConfigureAwait(false)) + { + list.Add(item); + } + + return list; + } + + public static async Task FirstAsync(this IBsonDataReader reader, CancellationToken cancellationToken = default) + { + await foreach (var item in reader.ToAsyncEnumerable(cancellationToken).ConfigureAwait(false)) + { + return item; + } + + throw new InvalidOperationException("Sequence contains no elements"); + } + + public static async Task FirstOrDefaultAsync(this IBsonDataReader reader, CancellationToken cancellationToken = default) + { + await foreach (var item in reader.ToAsyncEnumerable(cancellationToken).ConfigureAwait(false)) + { + return item; + } + + return null; + } + + public static async Task SingleAsync(this IBsonDataReader reader, CancellationToken cancellationToken = default) + { + BsonValue value = null; + var found = false; + + await foreach (var item in reader.ToAsyncEnumerable(cancellationToken).ConfigureAwait(false)) + { + if (found) + { + throw new InvalidOperationException("Sequence contains more than one element"); + } + + found = true; + value = item; + } + + if (!found) + { + throw new InvalidOperationException("Sequence contains no elements"); + } + + return value!; + } + + public static async Task SingleOrDefaultAsync(this IBsonDataReader reader, CancellationToken cancellationToken = default) + { + BsonValue value = null; + var found = false; + + await foreach (var item in reader.ToAsyncEnumerable(cancellationToken).ConfigureAwait(false)) + { + if (found) + { + throw new InvalidOperationException("Sequence contains more than one element"); + } + + found = true; + value = item; + } + + return value; + } + + [Obsolete("Use ToAsyncEnumerable and await the result instead of blocking.")] public static IEnumerable ToEnumerable(this IBsonDataReader reader) { + if (reader == null) throw new ArgumentNullException(nameof(reader)); + try { while (reader.Read()) @@ -27,16 +130,22 @@ public static IEnumerable ToEnumerable(this IBsonDataReader reader) } } - public static BsonValue[] ToArray(this IBsonDataReader reader) => ToEnumerable(reader).ToArray(); - + [Obsolete("Use ToListAsync and await the result instead of blocking.")] public static IList ToList(this IBsonDataReader reader) => ToEnumerable(reader).ToList(); + [Obsolete("Use ToArrayAsync and await the result instead of blocking.")] + public static BsonValue[] ToArray(this IBsonDataReader reader) => ToEnumerable(reader).ToArray(); + + [Obsolete("Use FirstAsync and await the result instead of blocking.")] public static BsonValue First(this IBsonDataReader reader) => ToEnumerable(reader).First(); + [Obsolete("Use FirstOrDefaultAsync and await the result instead of blocking.")] public static BsonValue FirstOrDefault(this IBsonDataReader reader) => ToEnumerable(reader).FirstOrDefault(); + [Obsolete("Use SingleAsync and await the result instead of blocking.")] public static BsonValue Single(this IBsonDataReader reader) => ToEnumerable(reader).Single(); + [Obsolete("Use SingleOrDefaultAsync and await the result instead of blocking.")] public static BsonValue SingleOrDefault(this IBsonDataReader reader) => ToEnumerable(reader).SingleOrDefault(); } -} \ No newline at end of file +} diff --git a/LiteDB/Document/DataReader/IBsonDataReader.cs b/LiteDB/Document/DataReader/IBsonDataReader.cs index 535f926f7..d0361d7e6 100644 --- a/LiteDB/Document/DataReader/IBsonDataReader.cs +++ b/LiteDB/Document/DataReader/IBsonDataReader.cs @@ -1,8 +1,10 @@ -using System; +using System; +using System.Threading; +using System.Threading.Tasks; namespace LiteDB { - public interface IBsonDataReader : IDisposable + public interface IBsonDataReader : IDisposable, IAsyncDisposable { BsonValue this[string field] { get; } @@ -11,5 +13,7 @@ public interface IBsonDataReader : IDisposable bool HasValues { get; } bool Read(); + + ValueTask ReadAsync(CancellationToken cancellationToken = default); } -} \ No newline at end of file +} diff --git a/LiteDB/Engine/Disk/DiskService.cs b/LiteDB/Engine/Disk/DiskService.cs index 73e7910b5..461a41598 100644 --- a/LiteDB/Engine/Disk/DiskService.cs +++ b/LiteDB/Engine/Disk/DiskService.cs @@ -3,6 +3,7 @@ using System.Collections.Generic; using System.IO; using System.Threading; +using System.Threading.Tasks; using static LiteDB.Constants; namespace LiteDB.Engine @@ -21,6 +22,8 @@ internal class DiskService : IDisposable private StreamPool _dataPool; private readonly StreamPool _logPool; + private readonly SemaphoreSlim _dataWriterLock = new SemaphoreSlim(1, 1); + private readonly SemaphoreSlim _logWriterLock = new SemaphoreSlim(1, 1); private readonly Lazy _writer; private long _dataLength; @@ -164,49 +167,52 @@ public PageBuffer NewPage() /// /// Write all pages inside log file in a thread safe operation /// - public int WriteLogDisk(IEnumerable pages) + public async ValueTask WriteLogDiskAsync(IEnumerable pages, CancellationToken cancellationToken = default) { var count = 0; var stream = _writer.Value; - // do a global write lock - only 1 thread can write on disk at time - lock(stream) + await _logWriterLock.WaitAsync(cancellationToken).ConfigureAwait(false); + + try { foreach (var page in pages) { + cancellationToken.ThrowIfCancellationRequested(); + ENSURE(page.ShareCounter == BUFFER_WRITABLE, "to enqueue page, page must be writable"); - // adding this page into file AS new page (at end of file) - // must add into cache to be sure that new readers can see this page page.Position = Interlocked.Add(ref _logLength, PAGE_SIZE); - - // should mark page origin to log because async queue works only for log file - // if this page came from data file, must be changed before MoveToReadable page.Origin = FileOrigin.Log; - // mark this page as readable and get cached paged to enqueue var readable = _cache.MoveToReadable(page); - // set log stream position to page stream.Position = page.Position; #if DEBUG _state.SimulateDiskWriteFail?.Invoke(page); #endif - // and write to disk in a sync mode - stream.Write(page.Array, page.Offset, PAGE_SIZE); + await stream.WriteAsync(page.Array, page.Offset, PAGE_SIZE, cancellationToken).ConfigureAwait(false); - // release page here (no page use after this) page.Release(); count++; } } + finally + { + _logWriterLock.Release(); + } return count; } + public int WriteLogDisk(IEnumerable pages) + { + return this.WriteLogDiskAsync(pages).GetAwaiter().GetResult(); + } + /// /// Get file length based on data/log length variables (no direct on disk) /// @@ -287,41 +293,72 @@ public IEnumerable ReadFull(FileOrigin origin) /// /// Write pages DIRECT in disk. This pages are not cached and are not shared - WORKS FOR DATA FILE ONLY /// - public void WriteDataDisk(IEnumerable pages) + public async ValueTask WriteDataDiskAsync(IEnumerable pages, CancellationToken cancellationToken = default) { var stream = _dataPool.Writer.Value; - foreach (var page in pages) + await _dataWriterLock.WaitAsync(cancellationToken).ConfigureAwait(false); + + try { - ENSURE(page.ShareCounter == 0, "this page can't be shared to use sync operation - do not use cached pages"); + foreach (var page in pages) + { + cancellationToken.ThrowIfCancellationRequested(); + + ENSURE(page.ShareCounter == 0, "this page can't be shared to use sync operation - do not use cached pages"); - _dataLength = Math.Max(_dataLength, page.Position); + _dataLength = Math.Max(_dataLength, page.Position); - stream.Position = page.Position; + stream.Position = page.Position; + + await stream.WriteAsync(page.Array, page.Offset, PAGE_SIZE, cancellationToken).ConfigureAwait(false); + } - stream.Write(page.Array, page.Offset, PAGE_SIZE); + await stream.FlushToDiskAsync(cancellationToken).ConfigureAwait(false); } + finally + { + _dataWriterLock.Release(); + } + } - stream.FlushToDisk(); + public void WriteDataDisk(IEnumerable pages) + { + this.WriteDataDiskAsync(pages).GetAwaiter().GetResult(); } /// /// Set new length for file in sync mode. Queue must be empty before set length /// - public void SetLength(long length, FileOrigin origin) + public async ValueTask SetLengthAsync(long length, FileOrigin origin, CancellationToken cancellationToken = default) { var stream = origin == FileOrigin.Log ? _logPool.Writer : _dataPool.Writer; + var gate = origin == FileOrigin.Log ? _logWriterLock : _dataWriterLock; - if (origin == FileOrigin.Log) + await gate.WaitAsync(cancellationToken).ConfigureAwait(false); + + try { - Interlocked.Exchange(ref _logLength, length - PAGE_SIZE); + if (origin == FileOrigin.Log) + { + Interlocked.Exchange(ref _logLength, length - PAGE_SIZE); + } + else + { + Interlocked.Exchange(ref _dataLength, length - PAGE_SIZE); + } + + stream.Value.SetLength(length); } - else + finally { - Interlocked.Exchange(ref _dataLength, length - PAGE_SIZE); + gate.Release(); } + } - stream.Value.SetLength(length); + public void SetLength(long length, FileOrigin origin) + { + this.SetLengthAsync(length, origin).GetAwaiter().GetResult(); } /// @@ -344,6 +381,9 @@ public void Dispose() _dataPool.Dispose(); _logPool.Dispose(); + _dataWriterLock.Dispose(); + _logWriterLock.Dispose(); + if (delete) _logFactory.Delete(); // other disposes diff --git a/LiteDB/Engine/Disk/StreamFactory/FileStreamFactory.cs b/LiteDB/Engine/Disk/StreamFactory/FileStreamFactory.cs index cc3e95bb4..60369d31f 100644 --- a/LiteDB/Engine/Disk/StreamFactory/FileStreamFactory.cs +++ b/LiteDB/Engine/Disk/StreamFactory/FileStreamFactory.cs @@ -43,7 +43,7 @@ public Stream GetStream(bool canWrite, bool sequencial) var fileMode = _readonly ? System.IO.FileMode.Open : System.IO.FileMode.OpenOrCreate; var fileAccess = write ? FileAccess.ReadWrite : FileAccess.Read; var fileShare = write ? FileShare.Read : FileShare.ReadWrite; - var fileOptions = sequencial ? FileOptions.SequentialScan : FileOptions.RandomAccess; + var fileOptions = (sequencial ? FileOptions.SequentialScan : FileOptions.RandomAccess) | FileOptions.Asynchronous; var isNewFile = write && this.Exists() == false; diff --git a/LiteDB/Engine/Disk/Streams/AesStream.cs b/LiteDB/Engine/Disk/Streams/AesStream.cs index fe12526bb..3d0207d6c 100644 --- a/LiteDB/Engine/Disk/Streams/AesStream.cs +++ b/LiteDB/Engine/Disk/Streams/AesStream.cs @@ -3,6 +3,8 @@ using System.IO; using System.Linq; using System.Security.Cryptography; +using System.Threading; +using System.Threading.Tasks; using static LiteDB.Constants; namespace LiteDB.Engine @@ -197,6 +199,28 @@ public override void Write(byte[] array, int offset, int count) _writer.Write(array, offset, count); } + public override async Task ReadAsync(byte[] array, int offset, int count, CancellationToken cancellationToken) + { + ENSURE(this.Position % PAGE_SIZE == 0, "AesRead: position must be in PAGE_SIZE module. Position={0}, File={1}", this.Position, _name); + + var r = await _reader.ReadAsync(array, offset, count, cancellationToken).ConfigureAwait(false); + + if (this.IsBlank(array, offset)) + { + array.Fill(0, offset, count); + } + + return r; + } + + public override async Task WriteAsync(byte[] array, int offset, int count, CancellationToken cancellationToken) + { + ENSURE(count == PAGE_SIZE || count == 1, "buffer size must be PAGE_SIZE"); + ENSURE(this.Position == HeaderPage.P_INVALID_DATAFILE_STATE || this.Position % PAGE_SIZE == 0, "AesWrite: position must be in PAGE_SIZE module. Position={0}, File={1}", this.Position, _name); + + await _writer.WriteAsync(array, offset, count, cancellationToken).ConfigureAwait(false); + } + protected override void Dispose(bool disposing) { base.Dispose(disposing); @@ -229,6 +253,11 @@ public override void Flush() _stream.Flush(); } + public override Task FlushAsync(CancellationToken cancellationToken) + { + return _stream.FlushAsync(cancellationToken); + } + public override long Seek(long offset, SeekOrigin origin) { return _stream.Seek(offset + PAGE_SIZE, origin); diff --git a/LiteDB/Engine/Disk/Streams/ConcurrentStream.cs b/LiteDB/Engine/Disk/Streams/ConcurrentStream.cs index b3b0659ff..d311efabf 100644 --- a/LiteDB/Engine/Disk/Streams/ConcurrentStream.cs +++ b/LiteDB/Engine/Disk/Streams/ConcurrentStream.cs @@ -1,5 +1,7 @@ using System; using System.IO; +using System.Threading; +using System.Threading.Tasks; using static LiteDB.Constants; namespace LiteDB.Engine @@ -14,6 +16,7 @@ internal class ConcurrentStream : Stream private readonly bool _canWrite; private long _position = 0; + private readonly SemaphoreSlim _mutex = new SemaphoreSlim(1, 1); public ConcurrentStream(Stream stream, bool canWrite) { @@ -31,15 +34,38 @@ public ConcurrentStream(Stream stream, bool canWrite) public override long Position { get => _position; set => _position = value; } - public override void Flush() => _stream.Flush(); + public override void Flush() + { + _mutex.Wait(); + + try + { + _stream.Flush(); + } + finally + { + _mutex.Release(); + } + } + + public override Task FlushAsync(CancellationToken cancellationToken) + { + return this.WithLockAsync(() => _stream.FlushAsync(cancellationToken), cancellationToken); + } public override void SetLength(long value) => _stream.SetLength(value); - protected override void Dispose(bool disposing) => _stream.Dispose(); + protected override void Dispose(bool disposing) + { + _stream.Dispose(); + _mutex.Dispose(); + } public override long Seek(long offset, SeekOrigin origin) { - lock(_stream) + _mutex.Wait(); + + try { var position = origin == SeekOrigin.Begin ? offset : @@ -50,31 +76,94 @@ public override long Seek(long offset, SeekOrigin origin) return _position; } + finally + { + _mutex.Release(); + } } public override int Read(byte[] buffer, int offset, int count) { - // lock internal stream and set position before read - lock (_stream) + _mutex.Wait(); + + try { _stream.Position = _position; var read = _stream.Read(buffer, offset, count); _position = _stream.Position; return read; } + finally + { + _mutex.Release(); + } } public override void Write(byte[] buffer, int offset, int count) { if (_canWrite == false) throw new NotSupportedException("Current stream are readonly"); - // lock internal stream and set position before write - lock (_stream) + _mutex.Wait(); + + try { _stream.Position = _position; _stream.Write(buffer, offset, count); _position = _stream.Position; } + finally + { + _mutex.Release(); + } + } + + public override async Task ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) + { + await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false); + + try + { + _stream.Position = _position; + var read = await _stream.ReadAsync(buffer, offset, count, cancellationToken).ConfigureAwait(false); + _position = _stream.Position; + return read; + } + finally + { + _mutex.Release(); + } + } + + public override async Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) + { + if (_canWrite == false) throw new NotSupportedException("Current stream are readonly"); + + await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false); + + try + { + _stream.Position = _position; + await _stream.WriteAsync(buffer, offset, count, cancellationToken).ConfigureAwait(false); + _position = _stream.Position; + } + finally + { + _mutex.Release(); + } + } + + private async Task WithLockAsync(Func body, CancellationToken cancellationToken) + { + await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false); + + try + { + await body().ConfigureAwait(false); + } + finally + { + _mutex.Release(); + } } } } \ No newline at end of file diff --git a/LiteDB/Engine/Engine/Collection.cs b/LiteDB/Engine/Engine/Collection.cs index b132e2e6a..4ba4baa24 100644 --- a/LiteDB/Engine/Engine/Collection.cs +++ b/LiteDB/Engine/Engine/Collection.cs @@ -2,6 +2,8 @@ using System.Collections.Generic; using System.Linq; using System.Text; +using System.Threading; +using System.Threading.Tasks; using static LiteDB.Constants; namespace LiteDB.Engine @@ -19,19 +21,21 @@ public IEnumerable GetCollectionNames() /// /// Drop collection including all documents, indexes and extended pages (do not support transactions) /// - public bool DropCollection(string name) + public Task DropCollectionAsync(string name, CancellationToken cancellationToken = default) { if (name.IsNullOrWhiteSpace()) throw new ArgumentNullException(nameof(name)); + cancellationToken.ThrowIfCancellationRequested(); + // drop collection is possible only in exclusive transaction for this if (_locker.IsInTransaction) throw LiteException.AlreadyExistsTransaction(); - return this.AutoTransaction(transaction => + return this.AutoTransactionAsync((transaction, token) => { var snapshot = transaction.CreateSnapshot(LockMode.Write, name, false); // if collection do not exist, just exit - if (snapshot.CollectionPage == null) return false; + if (snapshot.CollectionPage == null) return new ValueTask(false); LOG($"drop collection `{name}`", "COMMAND"); @@ -41,14 +45,14 @@ public bool DropCollection(string name) // remove sequence number (if exists) _sequences.TryRemove(name, out var dummy); - return true; - }); + return new ValueTask(true); + }, cancellationToken); } /// /// Rename a collection (do not support transactions) /// - public bool RenameCollection(string collection, string newName) + public Task RenameCollectionAsync(string collection, string newName, CancellationToken cancellationToken = default) { if (collection.IsNullOrWhiteSpace()) throw new ArgumentNullException(nameof(collection)); if (newName.IsNullOrWhiteSpace()) throw new ArgumentNullException(nameof(newName)); @@ -65,12 +69,14 @@ public bool RenameCollection(string collection, string newName) // rename collection is possible only in exclusive transaction for this if (_locker.IsInTransaction) throw LiteException.AlreadyExistsTransaction(); - return this.AutoTransaction(transaction => + cancellationToken.ThrowIfCancellationRequested(); + + return this.AutoTransactionAsync((transaction, token) => { var currentSnapshot = transaction.CreateSnapshot(LockMode.Write, collection, false); var newSnapshot = transaction.CreateSnapshot(LockMode.Write, newName, false); - if (currentSnapshot.CollectionPage == null) return false; + if (currentSnapshot.CollectionPage == null) return new ValueTask(false); // checks if do not already exists this collection name if (_header.GetCollectionPageID(newName) != uint.MaxValue) @@ -84,8 +90,8 @@ public bool RenameCollection(string collection, string newName) h.RenameCollection(collection, newName); }; - return true; - }); + return new ValueTask(true); + }, cancellationToken); } } } \ No newline at end of file diff --git a/LiteDB/Engine/Engine/Delete.cs b/LiteDB/Engine/Engine/Delete.cs index 1f8a5471d..2b064b0a4 100644 --- a/LiteDB/Engine/Engine/Delete.cs +++ b/LiteDB/Engine/Engine/Delete.cs @@ -1,6 +1,8 @@ using System; using System.Collections.Generic; using System.Linq; +using System.Threading; +using System.Threading.Tasks; using static LiteDB.Constants; namespace LiteDB.Engine @@ -10,19 +12,21 @@ public partial class LiteEngine /// /// Implements delete based on IDs enumerable /// - public int Delete(string collection, IEnumerable ids) + public Task DeleteAsync(string collection, IEnumerable ids, CancellationToken cancellationToken = default) { if (collection.IsNullOrWhiteSpace()) throw new ArgumentNullException(nameof(collection)); if (ids == null) throw new ArgumentNullException(nameof(ids)); - return this.AutoTransaction(transaction => + cancellationToken.ThrowIfCancellationRequested(); + + return this.AutoTransactionAsync((transaction, token) => { var snapshot = transaction.CreateSnapshot(LockMode.Write, collection, false); var collectionPage = snapshot.CollectionPage; var data = new DataService(snapshot, _disk.MAX_ITEMS_COUNT); var indexer = new IndexService(snapshot, _header.Pragmas.Collation, _disk.MAX_ITEMS_COUNT); - if (collectionPage == null) return 0; + if (collectionPage == null) return new ValueTask(0); LOG($"delete `{collection}`", "COMMAND"); @@ -31,6 +35,8 @@ public int Delete(string collection, IEnumerable ids) foreach (var id in ids) { + token.ThrowIfCancellationRequested(); + var pkNode = indexer.Find(pk, id, false, LiteDB.Query.Ascending); // if pk not found, continue @@ -49,57 +55,57 @@ public int Delete(string collection, IEnumerable ids) count++; } - return count; - }); + return new ValueTask(count); + }, cancellationToken); } /// /// Implements delete based on filter expression /// - public int DeleteMany(string collection, BsonExpression predicate) + public async Task DeleteManyAsync(string collection, BsonExpression predicate, CancellationToken cancellationToken = default) { if (collection.IsNullOrWhiteSpace()) throw new ArgumentNullException(nameof(collection)); + cancellationToken.ThrowIfCancellationRequested(); // do optimization for when using "_id = value" key if (predicate != null && - predicate.Type == BsonExpressionType.Equal && - predicate.Left.Type == BsonExpressionType.Path && - predicate.Left.Source == "$._id" && + predicate.Type == BsonExpressionType.Equal && + predicate.Left.Type == BsonExpressionType.Path && + predicate.Left.Source == "$._id" && predicate.Right.IsValue) { var id = predicate.Right.Execute(_header.Pragmas.Collation).First(); - return this.Delete(collection, new BsonValue[] { id }); + return await this.DeleteAsync(collection, new BsonValue[] { id }, cancellationToken).ConfigureAwait(false); } else { - IEnumerable getIds() + // this is interesting: if _id returns a document (like in FileStorage) you can't run direct _id + // field because "reader.Current" will return _id document - but not - { _id: [document] } + // create inner document to ensure _id will be a document + var query = new Query { Select = "{ i: _id }", ForUpdate = true }; + + if (predicate != null) { - // this is intresting: if _id returns an document (like in FileStorage) you can't run direct _id - // field because "reader.Current" will return _id document - but not - { _id: [document] } - // create inner document to ensure _id will be a document - var query = new Query { Select = "{ i: _id }", ForUpdate = true }; + query.Where.Add(predicate); + } - if(predicate != null) - { - query.Where.Add(predicate); - } + var ids = new List(); - using (var reader = this.Query(collection, query)) + await using (var reader = await this.QueryAsync(collection, query, cancellationToken).ConfigureAwait(false)) + { + while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) { - while (reader.Read()) - { - var value = reader.Current["i"]; + var value = reader.Current["i"]; - if (value != BsonValue.Null) - { - yield return value; - } + if (value != BsonValue.Null) + { + ids.Add(value); } } } - return this.Delete(collection, getIds()); + return await this.DeleteAsync(collection, ids, cancellationToken).ConfigureAwait(false); } } } diff --git a/LiteDB/Engine/Engine/Index.cs b/LiteDB/Engine/Engine/Index.cs index 157542848..9494ea076 100644 --- a/LiteDB/Engine/Engine/Index.cs +++ b/LiteDB/Engine/Engine/Index.cs @@ -2,6 +2,8 @@ using System.Collections.Generic; using System.Linq; using System.Text.RegularExpressions; +using System.Threading; +using System.Threading.Tasks; using static LiteDB.Constants; namespace LiteDB.Engine @@ -11,7 +13,7 @@ public partial class LiteEngine /// /// Create a new index (or do nothing if already exists) to a collection/field /// - public bool EnsureIndex(string collection, string name, BsonExpression expression, bool unique) + public Task EnsureIndexAsync(string collection, string name, BsonExpression expression, bool unique, CancellationToken cancellationToken = default) { if (collection.IsNullOrWhiteSpace()) throw new ArgumentNullException(nameof(collection)); if (name.IsNullOrWhiteSpace()) throw new ArgumentNullException(nameof(name)); @@ -23,9 +25,11 @@ public bool EnsureIndex(string collection, string name, BsonExpression expressio if (name.StartsWith("$")) throw LiteException.InvalidIndexName(name, collection, "Index name can't start with `$`"); if (expression.IsScalar == false && unique) throw new LiteException(0, "Multikey index expression do not support unique option"); - if (expression.Source == "$._id") return false; // always exists + if (expression.Source == "$._id") return Task.FromResult(false); // always exists - return this.AutoTransaction(transaction => + cancellationToken.ThrowIfCancellationRequested(); + + return this.AutoTransactionAsync((transaction, token) => { var snapshot = transaction.CreateSnapshot(LockMode.Write, collection, true); var collectionPage = snapshot.CollectionPage; @@ -41,7 +45,7 @@ public bool EnsureIndex(string collection, string name, BsonExpression expressio // but if expression are different, throw error if (current.Expression != expression.Source) throw LiteException.IndexAlreadyExist(name); - return false; + return new ValueTask(false); } LOG($"create index `{collection}.{name}`", "COMMAND"); @@ -53,6 +57,8 @@ public bool EnsureIndex(string collection, string name, BsonExpression expressio // read all objects (read from PK index) foreach (var pkNode in new IndexAll("_id", LiteDB.Query.Ascending).Run(collectionPage, indexer)) { + token.ThrowIfCancellationRequested(); + using (var reader = new BufferReader(data.Read(pkNode.DataBlock))) { var doc = reader.ReadDocument(expression.Fields).GetValue(); @@ -67,6 +73,8 @@ public bool EnsureIndex(string collection, string name, BsonExpression expressio // adding index node for each value foreach (var key in keys) { + token.ThrowIfCancellationRequested(); + _state.Validate(); // insert new index node @@ -90,43 +98,45 @@ public bool EnsureIndex(string collection, string name, BsonExpression expressio transaction.Safepoint(); } - return true; - }); + return new ValueTask(true); + }, cancellationToken); } /// /// Drop an index from a collection /// - public bool DropIndex(string collection, string name) + public Task DropIndexAsync(string collection, string name, CancellationToken cancellationToken = default) { if (collection.IsNullOrWhiteSpace()) throw new ArgumentNullException(nameof(collection)); if (name.IsNullOrWhiteSpace()) throw new ArgumentNullException(nameof(name)); if (name == "_id") throw LiteException.IndexDropId(); - return this.AutoTransaction(transaction => + cancellationToken.ThrowIfCancellationRequested(); + + return this.AutoTransactionAsync((transaction, token) => { var snapshot = transaction.CreateSnapshot(LockMode.Write, collection, false); var col = snapshot.CollectionPage; var indexer = new IndexService(snapshot, _header.Pragmas.Collation, _disk.MAX_ITEMS_COUNT); - + // no collection, no index - if (col == null) return false; - + if (col == null) return new ValueTask(false); + // search for index reference var index = col.GetCollectionIndex(name); - + // no index, no drop - if (index == null) return false; + if (index == null) return new ValueTask(false); // delete all data pages + indexes pages indexer.DropIndex(index); // remove index entry in collection page snapshot.CollectionPage.DeleteCollectionIndex(name); - - return true; - }); + + return new ValueTask(true); + }, cancellationToken); } } } \ No newline at end of file diff --git a/LiteDB/Engine/Engine/Insert.cs b/LiteDB/Engine/Engine/Insert.cs index 7f3097df0..532148d01 100644 --- a/LiteDB/Engine/Engine/Insert.cs +++ b/LiteDB/Engine/Engine/Insert.cs @@ -3,6 +3,8 @@ using System.Collections.Generic; using System.Diagnostics; using System.Linq; +using System.Threading; +using System.Threading.Tasks; using static LiteDB.Constants; namespace LiteDB.Engine @@ -12,12 +14,14 @@ public partial class LiteEngine /// /// Insert all documents in collection. If document has no _id, use AutoId generation. /// - public int Insert(string collection, IEnumerable docs, BsonAutoId autoId) + public Task InsertAsync(string collection, IEnumerable docs, BsonAutoId autoId, CancellationToken cancellationToken = default) { if (collection.IsNullOrWhiteSpace()) throw new ArgumentNullException(nameof(collection)); if (docs == null) throw new ArgumentNullException(nameof(docs)); - return this.AutoTransaction(transaction => + cancellationToken.ThrowIfCancellationRequested(); + + return this.AutoTransactionAsync((transaction, token) => { var snapshot = transaction.CreateSnapshot(LockMode.Write, collection, true); var count = 0; @@ -28,6 +32,7 @@ public int Insert(string collection, IEnumerable docs, BsonAutoId foreach (var doc in docs) { + token.ThrowIfCancellationRequested(); _state.Validate(); transaction.Safepoint(); @@ -37,8 +42,8 @@ public int Insert(string collection, IEnumerable docs, BsonAutoId count++; } - return count; - }); + return new ValueTask(count); + }, cancellationToken); } /// diff --git a/LiteDB/Engine/Engine/Pragma.cs b/LiteDB/Engine/Engine/Pragma.cs index bc7d10f91..beda6e35a 100644 --- a/LiteDB/Engine/Engine/Pragma.cs +++ b/LiteDB/Engine/Engine/Pragma.cs @@ -2,6 +2,8 @@ using System.Collections.Concurrent; using System.Collections.Generic; using System.Linq; +using System.Threading; +using System.Threading.Tasks; using static LiteDB.Constants; namespace LiteDB.Engine @@ -11,30 +13,34 @@ public partial class LiteEngine /// /// Get engine internal pragma value /// - public BsonValue Pragma(string name) + public Task PragmaAsync(string name, CancellationToken cancellationToken = default) { - return _header.Pragmas.Get(name); + cancellationToken.ThrowIfCancellationRequested(); + + return Task.FromResult(_header.Pragmas.Get(name)); } /// /// Set engine pragma new value (some pragmas will be affected only after realod) /// - public bool Pragma(string name, BsonValue value) + public Task PragmaAsync(string name, BsonValue value, CancellationToken cancellationToken = default) { - if (this.Pragma(name) == value) return false; + cancellationToken.ThrowIfCancellationRequested(); + + if (_header.Pragmas.Get(name) == value) return Task.FromResult(false); if (_locker.IsInTransaction) throw LiteException.AlreadyExistsTransaction(); - // do a inside transaction to edit pragma on commit event - return this.AutoTransaction(transaction => + // do a inside transaction to edit pragma on commit event + return this.AutoTransactionAsync((transaction, token) => { transaction.Pages.Commit += (h) => { h.Pragmas.Set(name, value, true); }; - return true; - }); + return new ValueTask(true); + }, cancellationToken); } } } \ No newline at end of file diff --git a/LiteDB/Engine/Engine/Query.cs b/LiteDB/Engine/Engine/Query.cs index d4a807b64..e02fa2648 100644 --- a/LiteDB/Engine/Engine/Query.cs +++ b/LiteDB/Engine/Engine/Query.cs @@ -2,6 +2,8 @@ using System.Collections.Generic; using System.Diagnostics; using System.Linq; +using System.Threading; +using System.Threading.Tasks; using static LiteDB.Constants; namespace LiteDB.Engine @@ -12,11 +14,13 @@ public partial class LiteEngine /// Run query over collection using a query definition. /// Returns a new IBsonDataReader that run and return first document result (open transaction) /// - public IBsonDataReader Query(string collection, Query query) + public Task QueryAsync(string collection, Query query, CancellationToken cancellationToken = default) { if (string.IsNullOrWhiteSpace(collection)) throw new ArgumentNullException(nameof(collection)); if (query == null) throw new ArgumentNullException(nameof(query)); + cancellationToken.ThrowIfCancellationRequested(); + IEnumerable source = null; // test if is an system collection @@ -42,7 +46,7 @@ public IBsonDataReader Query(string collection, Query query) query, source); - return exec.ExecuteQuery(); + return Task.FromResult(exec.ExecuteQuery()); } } } \ No newline at end of file diff --git a/LiteDB/Engine/Engine/Rebuild.cs b/LiteDB/Engine/Engine/Rebuild.cs index 37036bad6..447d6724d 100644 --- a/LiteDB/Engine/Engine/Rebuild.cs +++ b/LiteDB/Engine/Engine/Rebuild.cs @@ -3,6 +3,8 @@ using System.IO; using System.Linq; using System.Text; +using System.Threading; +using System.Threading.Tasks; using static LiteDB.Constants; @@ -15,9 +17,11 @@ public partial class LiteEngine /// A backup copy will be created with -backup extention. All data will be readed and re created in another database /// After run, will re-open database /// - public long Rebuild(RebuildOptions options) + public Task RebuildAsync(RebuildOptions options, CancellationToken cancellationToken = default) { - if (string.IsNullOrEmpty(_settings.Filename)) return 0; // works only with os file + if (string.IsNullOrEmpty(_settings.Filename)) return Task.FromResult(0L); // works only with os file + + cancellationToken.ThrowIfCancellationRequested(); this.Close(); @@ -32,18 +36,20 @@ public long Rebuild(RebuildOptions options) _state.Disposed = false; - return diff; + return Task.FromResult(diff); } /// /// Implement a full rebuild database. A backup copy will be created with -backup extention. All data will be readed and re created in another database /// - public long Rebuild() + public Task RebuildAsync(CancellationToken cancellationToken = default) { - var collation = new Collation(this.Pragma(Pragmas.COLLATION)); + cancellationToken.ThrowIfCancellationRequested(); + + var collation = _header.Pragmas.Collation; var password = _settings.Password; - return this.Rebuild(new RebuildOptions { Password = password, Collation = collation }); + return this.RebuildAsync(new RebuildOptions { Password = password, Collation = collation }, cancellationToken); } /// @@ -77,10 +83,12 @@ internal void RebuildContent(IFileReader reader) // first create all user indexes (exclude _id index) foreach (var index in reader.GetIndexes(collection)) { - this.EnsureIndex(collection, + this.EnsureIndexAsync( + collection, index.Name, BsonExpression.Create(index.Expression), - index.Unique); + index.Unique, + CancellationToken.None).ConfigureAwait(false).GetAwaiter().GetResult(); } } diff --git a/LiteDB/Engine/Engine/Transaction.cs b/LiteDB/Engine/Engine/Transaction.cs index 4db9f57b3..91e67bde7 100644 --- a/LiteDB/Engine/Engine/Transaction.cs +++ b/LiteDB/Engine/Engine/Transaction.cs @@ -2,6 +2,7 @@ using System.Collections.Concurrent; using System.Collections.Generic; using System.Threading; +using System.Threading.Tasks; using static LiteDB.Constants; namespace LiteDB.Engine @@ -12,8 +13,10 @@ public partial class LiteEngine /// Initialize a new transaction. Transaction are created "per-thread". There is only one single transaction per thread. /// Return true if transaction was created or false if current thread already in a transaction. /// - public bool BeginTrans() + public Task BeginTransAsync(CancellationToken cancellationToken = default) { + cancellationToken.ThrowIfCancellationRequested(); + _state.Validate(); var transacion = _monitor.GetTransaction(true, false, out var isNew); @@ -24,14 +27,16 @@ public bool BeginTrans() LOG(isNew, $"begin trans", "COMMAND"); - return isNew; + return Task.FromResult(isNew); } /// /// Persist all dirty pages into LOG file /// - public bool Commit() + public async Task CommitAsync(CancellationToken cancellationToken = default) { + cancellationToken.ThrowIfCancellationRequested(); + _state.Validate(); var transaction = _monitor.GetTransaction(false, false, out _); @@ -43,7 +48,7 @@ public bool Commit() if (transaction.State == TransactionState.Active) { - this.CommitAndReleaseTransaction(transaction); + await this.CommitAndReleaseTransactionAsync(transaction, cancellationToken).ConfigureAwait(false); return true; } @@ -55,8 +60,10 @@ public bool Commit() /// /// Do rollback to current transaction. Clear dirty pages in memory and return new pages to main empty linked-list /// - public bool Rollback() + public Task RollbackAsync(CancellationToken cancellationToken = default) { + cancellationToken.ThrowIfCancellationRequested(); + _state.Validate(); var transaction = _monitor.GetTransaction(false, false, out _); @@ -67,16 +74,16 @@ public bool Rollback() _monitor.ReleaseTransaction(transaction); - return true; + return Task.FromResult(true); } - return false; + return Task.FromResult(false); } /// /// Create (or reuse) a transaction an add try/catch block. Commit transaction if is new transaction /// - private T AutoTransaction(Func fn) + private async Task AutoTransactionAsync(Func> fn, CancellationToken cancellationToken) { _state.Validate(); @@ -84,11 +91,13 @@ private T AutoTransaction(Func fn) try { - var result = fn(transaction); + var result = await fn(transaction, cancellationToken).ConfigureAwait(false); // if this transaction was auto-created for this operation, commit & dispose now if (isNew) - this.CommitAndReleaseTransaction(transaction); + { + await this.CommitAndReleaseTransactionAsync(transaction, cancellationToken).ConfigureAwait(false); + } return result; } @@ -105,18 +114,22 @@ private T AutoTransaction(Func fn) } } - private void CommitAndReleaseTransaction(TransactionService transaction) + private ValueTask CommitAndReleaseTransactionAsync(TransactionService transaction, CancellationToken cancellationToken) { + cancellationToken.ThrowIfCancellationRequested(); + transaction.Commit(); _monitor.ReleaseTransaction(transaction); // try checkpoint when finish transaction and log file are bigger than checkpoint pragma value (in pages) - if (_header.Pragmas.Checkpoint > 0 && + if (_header.Pragmas.Checkpoint > 0 && _disk.GetFileLength(FileOrigin.Log) > (_header.Pragmas.Checkpoint * PAGE_SIZE)) { _walIndex.TryCheckpoint(); } + + return default; } } } \ No newline at end of file diff --git a/LiteDB/Engine/Engine/Update.cs b/LiteDB/Engine/Engine/Update.cs index 0d825beea..96bc4f2f4 100644 --- a/LiteDB/Engine/Engine/Update.cs +++ b/LiteDB/Engine/Engine/Update.cs @@ -1,6 +1,8 @@ using System; using System.Collections.Generic; using System.Linq; +using System.Threading; +using System.Threading.Tasks; using static LiteDB.Constants; namespace LiteDB.Engine @@ -10,12 +12,14 @@ public partial class LiteEngine /// /// Implement update command to a document inside a collection. Return number of documents updated /// - public int Update(string collection, IEnumerable docs) + public Task UpdateAsync(string collection, IEnumerable docs, CancellationToken cancellationToken = default) { if (collection.IsNullOrWhiteSpace()) throw new ArgumentNullException(nameof(collection)); if (docs == null) throw new ArgumentNullException(nameof(docs)); - return this.AutoTransaction(transaction => + cancellationToken.ThrowIfCancellationRequested(); + + return this.AutoTransactionAsync((transaction, token) => { var snapshot = transaction.CreateSnapshot(LockMode.Write, collection, false); var collectionPage = snapshot.CollectionPage; @@ -23,12 +27,13 @@ public int Update(string collection, IEnumerable docs) var data = new DataService(snapshot, _disk.MAX_ITEMS_COUNT); var count = 0; - if (collectionPage == null) return 0; + if (collectionPage == null) return new ValueTask(0); LOG($"update `{collection}`", "COMMAND"); foreach (var doc in docs) { + token.ThrowIfCancellationRequested(); _state.Validate(); transaction.Safepoint(); @@ -39,56 +44,59 @@ public int Update(string collection, IEnumerable docs) } } - return count; - }); + return new ValueTask(count); + }, cancellationToken); } /// /// Update documents using transform expression (must return a scalar/document value) using predicate as filter /// - public int UpdateMany(string collection, BsonExpression transform, BsonExpression predicate) + public async Task UpdateManyAsync(string collection, BsonExpression transform, BsonExpression predicate, CancellationToken cancellationToken = default) { if (collection.IsNullOrWhiteSpace()) throw new ArgumentNullException(nameof(collection)); if (transform == null) throw new ArgumentNullException(nameof(transform)); - return this.Update(collection, transformDocs()); + cancellationToken.ThrowIfCancellationRequested(); + + var q = new Query { Select = "$", ForUpdate = true }; - IEnumerable transformDocs() + if (predicate != null) { - var q = new Query { Select = "$", ForUpdate = true }; + q.Where.Add(predicate); + } - if (predicate != null) - { - q.Where.Add(predicate); - } + var transformed = new List(); - using (var reader = this.Query(collection, q)) + await using (var reader = await this.QueryAsync(collection, q, cancellationToken).ConfigureAwait(false)) + { + while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false)) { - while (reader.Read()) - { - var doc = reader.Current.AsDocument; + cancellationToken.ThrowIfCancellationRequested(); - var id = doc["_id"]; - var value = transform.ExecuteScalar(doc, _header.Pragmas.Collation); + var doc = reader.Current.AsDocument; - if (!value.IsDocument) throw new ArgumentException("Extend expression must return a document", nameof(transform)); + var id = doc["_id"]; + var value = transform.ExecuteScalar(doc, _header.Pragmas.Collation); - var result = BsonExpressionMethods.EXTEND(doc, value.AsDocument).AsDocument; + if (!value.IsDocument) throw new ArgumentException("Extend expression must return a document", nameof(transform)); - // be sure result document will contain same _id as current doc - if (result.TryGetValue("_id", out var newId)) - { - if (newId != id) throw LiteException.InvalidUpdateField("_id"); - } - else - { - result["_id"] = id; - } + var result = BsonExpressionMethods.EXTEND(doc, value.AsDocument).AsDocument; - yield return result; + // be sure result document will contain same _id as current doc + if (result.TryGetValue("_id", out var newId)) + { + if (newId != id) throw LiteException.InvalidUpdateField("_id"); + } + else + { + result["_id"] = id; } + + transformed.Add(result); } } + + return await this.UpdateAsync(collection, transformed, cancellationToken).ConfigureAwait(false); } /// diff --git a/LiteDB/Engine/Engine/Upsert.cs b/LiteDB/Engine/Engine/Upsert.cs index e9545c0ea..a648a8eae 100644 --- a/LiteDB/Engine/Engine/Upsert.cs +++ b/LiteDB/Engine/Engine/Upsert.cs @@ -1,5 +1,7 @@ using System; using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; using static LiteDB.Constants; namespace LiteDB.Engine @@ -11,12 +13,14 @@ public partial class LiteEngine /// then any documents not updated are then attempted to insert. /// This will have the side effect of throwing if duplicate items are attempted to be inserted. /// - public int Upsert(string collection, IEnumerable docs, BsonAutoId autoId) + public Task UpsertAsync(string collection, IEnumerable docs, BsonAutoId autoId, CancellationToken cancellationToken = default) { if (collection.IsNullOrWhiteSpace()) throw new ArgumentNullException(nameof(collection)); if (docs == null) throw new ArgumentNullException(nameof(docs)); - return this.AutoTransaction(transaction => + cancellationToken.ThrowIfCancellationRequested(); + + return this.AutoTransactionAsync((transaction, token) => { var snapshot = transaction.CreateSnapshot(LockMode.Write, collection, true); var collectionPage = snapshot.CollectionPage; @@ -28,6 +32,7 @@ public int Upsert(string collection, IEnumerable docs, BsonAutoId foreach (var doc in docs) { + token.ThrowIfCancellationRequested(); _state.Validate(); transaction.Safepoint(); @@ -39,10 +44,10 @@ public int Upsert(string collection, IEnumerable docs, BsonAutoId count++; } } - + // returns how many document was inserted - return count; - }); + return new ValueTask(count); + }, cancellationToken); } } } \ No newline at end of file diff --git a/LiteDB/Engine/ILiteEngine.cs b/LiteDB/Engine/ILiteEngine.cs index 067589999..497509d03 100644 --- a/LiteDB/Engine/ILiteEngine.cs +++ b/LiteDB/Engine/ILiteEngine.cs @@ -1,33 +1,35 @@ -using System; +using System; using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; namespace LiteDB.Engine { - public interface ILiteEngine : IDisposable + public interface ILiteEngine : IDisposable, IAsyncDisposable { - int Checkpoint(); - long Rebuild(RebuildOptions options); + Task CheckpointAsync(CancellationToken cancellationToken = default); + Task RebuildAsync(RebuildOptions options, CancellationToken cancellationToken = default); - bool BeginTrans(); - bool Commit(); - bool Rollback(); + Task BeginTransAsync(CancellationToken cancellationToken = default); + Task CommitAsync(CancellationToken cancellationToken = default); + Task RollbackAsync(CancellationToken cancellationToken = default); - IBsonDataReader Query(string collection, Query query); + Task QueryAsync(string collection, Query query, CancellationToken cancellationToken = default); - int Insert(string collection, IEnumerable docs, BsonAutoId autoId); - int Update(string collection, IEnumerable docs); - int UpdateMany(string collection, BsonExpression transform, BsonExpression predicate); - int Upsert(string collection, IEnumerable docs, BsonAutoId autoId); - int Delete(string collection, IEnumerable ids); - int DeleteMany(string collection, BsonExpression predicate); + Task InsertAsync(string collection, IEnumerable docs, BsonAutoId autoId, CancellationToken cancellationToken = default); + Task UpdateAsync(string collection, IEnumerable docs, CancellationToken cancellationToken = default); + Task UpdateManyAsync(string collection, BsonExpression transform, BsonExpression predicate, CancellationToken cancellationToken = default); + Task UpsertAsync(string collection, IEnumerable docs, BsonAutoId autoId, CancellationToken cancellationToken = default); + Task DeleteAsync(string collection, IEnumerable ids, CancellationToken cancellationToken = default); + Task DeleteManyAsync(string collection, BsonExpression predicate, CancellationToken cancellationToken = default); - bool DropCollection(string name); - bool RenameCollection(string name, string newName); + Task DropCollectionAsync(string name, CancellationToken cancellationToken = default); + Task RenameCollectionAsync(string name, string newName, CancellationToken cancellationToken = default); - bool EnsureIndex(string collection, string name, BsonExpression expression, bool unique); - bool DropIndex(string collection, string name); + Task EnsureIndexAsync(string collection, string name, BsonExpression expression, bool unique, CancellationToken cancellationToken = default); + Task DropIndexAsync(string collection, string name, CancellationToken cancellationToken = default); - BsonValue Pragma(string name); - bool Pragma(string name, BsonValue value); + Task PragmaAsync(string name, CancellationToken cancellationToken = default); + Task PragmaAsync(string name, BsonValue value, CancellationToken cancellationToken = default); } -} \ No newline at end of file +} diff --git a/LiteDB/Engine/LiteEngine.cs b/LiteDB/Engine/LiteEngine.cs index 59bb84b4c..eb1fa3dd4 100644 --- a/LiteDB/Engine/LiteEngine.cs +++ b/LiteDB/Engine/LiteEngine.cs @@ -7,6 +7,7 @@ using System.IO; using System.Linq; using System.Threading; +using System.Threading.Tasks; using static LiteDB.Constants; namespace LiteDB.Engine @@ -253,7 +254,12 @@ internal List Close(Exception ex) /// /// Run checkpoint command to copy log file into data file /// - public int Checkpoint() => _walIndex.Checkpoint(); + public Task CheckpointAsync(CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + + return Task.FromResult(_walIndex.Checkpoint()); + } public void Dispose() { @@ -265,5 +271,11 @@ protected virtual void Dispose(bool disposing) { this.Close(); } + + public ValueTask DisposeAsync() + { + this.Dispose(); + return default; + } } } \ No newline at end of file diff --git a/LiteDB/Engine/LiteEngineSyncExtensions.cs b/LiteDB/Engine/LiteEngineSyncExtensions.cs new file mode 100644 index 000000000..a40ed9f61 --- /dev/null +++ b/LiteDB/Engine/LiteEngineSyncExtensions.cs @@ -0,0 +1,157 @@ +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; + +namespace LiteDB.Engine +{ + /// + /// Temporary synchronous shims for callers that still rely on the legacy blocking contract. + /// + public static class LiteEngineSyncExtensions + { + [Obsolete("Use BeginTransAsync and await the result instead of blocking.")] + public static bool BeginTrans(this ILiteEngine engine, CancellationToken cancellationToken = default) + { + if (engine == null) throw new ArgumentNullException(nameof(engine)); + + return engine.BeginTransAsync(cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use CommitAsync and await the result instead of blocking.")] + public static bool Commit(this ILiteEngine engine, CancellationToken cancellationToken = default) + { + if (engine == null) throw new ArgumentNullException(nameof(engine)); + + return engine.CommitAsync(cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use RollbackAsync and await the result instead of blocking.")] + public static bool Rollback(this ILiteEngine engine, CancellationToken cancellationToken = default) + { + if (engine == null) throw new ArgumentNullException(nameof(engine)); + + return engine.RollbackAsync(cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use QueryAsync and await the result instead of blocking.")] + public static IBsonDataReader Query(this ILiteEngine engine, string collection, Query query, CancellationToken cancellationToken = default) + { + if (engine == null) throw new ArgumentNullException(nameof(engine)); + + return engine.QueryAsync(collection, query, cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use InsertAsync and await the result instead of blocking.")] + public static int Insert(this ILiteEngine engine, string collection, IEnumerable docs, BsonAutoId autoId, CancellationToken cancellationToken = default) + { + if (engine == null) throw new ArgumentNullException(nameof(engine)); + + return engine.InsertAsync(collection, docs, autoId, cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use UpdateAsync and await the result instead of blocking.")] + public static int Update(this ILiteEngine engine, string collection, IEnumerable docs, CancellationToken cancellationToken = default) + { + if (engine == null) throw new ArgumentNullException(nameof(engine)); + + return engine.UpdateAsync(collection, docs, cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use UpdateManyAsync and await the result instead of blocking.")] + public static int UpdateMany(this ILiteEngine engine, string collection, BsonExpression transform, BsonExpression predicate, CancellationToken cancellationToken = default) + { + if (engine == null) throw new ArgumentNullException(nameof(engine)); + + return engine.UpdateManyAsync(collection, transform, predicate, cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use UpsertAsync and await the result instead of blocking.")] + public static int Upsert(this ILiteEngine engine, string collection, IEnumerable docs, BsonAutoId autoId, CancellationToken cancellationToken = default) + { + if (engine == null) throw new ArgumentNullException(nameof(engine)); + + return engine.UpsertAsync(collection, docs, autoId, cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use DeleteAsync and await the result instead of blocking.")] + public static int Delete(this ILiteEngine engine, string collection, IEnumerable ids, CancellationToken cancellationToken = default) + { + if (engine == null) throw new ArgumentNullException(nameof(engine)); + + return engine.DeleteAsync(collection, ids, cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use DeleteManyAsync and await the result instead of blocking.")] + public static int DeleteMany(this ILiteEngine engine, string collection, BsonExpression predicate, CancellationToken cancellationToken = default) + { + if (engine == null) throw new ArgumentNullException(nameof(engine)); + + return engine.DeleteManyAsync(collection, predicate, cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use DropCollectionAsync and await the result instead of blocking.")] + public static bool DropCollection(this ILiteEngine engine, string name, CancellationToken cancellationToken = default) + { + if (engine == null) throw new ArgumentNullException(nameof(engine)); + + return engine.DropCollectionAsync(name, cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use RenameCollectionAsync and await the result instead of blocking.")] + public static bool RenameCollection(this ILiteEngine engine, string name, string newName, CancellationToken cancellationToken = default) + { + if (engine == null) throw new ArgumentNullException(nameof(engine)); + + return engine.RenameCollectionAsync(name, newName, cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use EnsureIndexAsync and await the result instead of blocking.")] + public static bool EnsureIndex(this ILiteEngine engine, string collection, string name, BsonExpression expression, bool unique, CancellationToken cancellationToken = default) + { + if (engine == null) throw new ArgumentNullException(nameof(engine)); + + return engine.EnsureIndexAsync(collection, name, expression, unique, cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use DropIndexAsync and await the result instead of blocking.")] + public static bool DropIndex(this ILiteEngine engine, string collection, string name, CancellationToken cancellationToken = default) + { + if (engine == null) throw new ArgumentNullException(nameof(engine)); + + return engine.DropIndexAsync(collection, name, cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use PragmaAsync and await the result instead of blocking.")] + public static BsonValue Pragma(this ILiteEngine engine, string name, CancellationToken cancellationToken = default) + { + if (engine == null) throw new ArgumentNullException(nameof(engine)); + + return engine.PragmaAsync(name, cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use PragmaAsync(name, value) and await the result instead of blocking.")] + public static bool Pragma(this ILiteEngine engine, string name, BsonValue value, CancellationToken cancellationToken = default) + { + if (engine == null) throw new ArgumentNullException(nameof(engine)); + + return engine.PragmaAsync(name, value, cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use CheckpointAsync and await the result instead of blocking.")] + public static int Checkpoint(this ILiteEngine engine, CancellationToken cancellationToken = default) + { + if (engine == null) throw new ArgumentNullException(nameof(engine)); + + return engine.CheckpointAsync(cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + [Obsolete("Use RebuildAsync and await the result instead of blocking.")] + public static long Rebuild(this ILiteEngine engine, RebuildOptions options, CancellationToken cancellationToken = default) + { + if (engine == null) throw new ArgumentNullException(nameof(engine)); + + return engine.RebuildAsync(options, cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + } + } +} diff --git a/LiteDB/Engine/Services/LockService.cs b/LiteDB/Engine/Services/LockService.cs index 1165d8d0b..646ecb65b 100644 --- a/LiteDB/Engine/Services/LockService.cs +++ b/LiteDB/Engine/Services/LockService.cs @@ -1,7 +1,6 @@ -using System; +using System; using System.Collections.Concurrent; using System.Collections.Generic; -using System.Linq; using System.Threading; using System.Threading.Tasks; using static LiteDB.Constants; @@ -9,16 +8,21 @@ namespace LiteDB.Engine { /// - /// Lock service are collection-based locks. Lock will support any threads reading at same time. Writing operations will be locked - /// based on collection. Eventualy, write operation can change header page that has an exclusive locker for. + /// Lock service are collection-based locks. Lock will support any threads reading at same time. Writing operations will be + /// locked based on collection. Eventually, write operation can change header page that has an exclusive locker for. /// [ThreadSafe] /// internal class LockService : IDisposable { private readonly EnginePragmas _pragmas; - private readonly ReaderWriterLockSlim _transaction = new ReaderWriterLockSlim(LockRecursionPolicy.NoRecursion); - private readonly ConcurrentDictionary _collections = new ConcurrentDictionary(StringComparer.OrdinalIgnoreCase); + private readonly SemaphoreSlim _writerLock = new SemaphoreSlim(1, 1); + private readonly SemaphoreSlim _readerSemaphore = new SemaphoreSlim(1, 1); + private readonly ConcurrentDictionary _collections = new ConcurrentDictionary(StringComparer.OrdinalIgnoreCase); + private readonly AsyncLocal _scope = new AsyncLocal(); + + private int _disposed; + private int _readerCount; internal LockService(EnginePragmas pragmas) { @@ -26,24 +30,41 @@ internal LockService(EnginePragmas pragmas) } /// - /// Return if current thread have open transaction + /// Return if current logical context has an open transaction /// - public bool IsInTransaction => _transaction.IsReadLockHeld || _transaction.IsWriteLockHeld; + public bool IsInTransaction + { + get + { + var scope = _scope.Value; + return scope != null && (scope.TransactionDepth > 0 || scope.ExclusiveDepth > 0); + } + } /// /// Return how many transactions are opened /// - public int TransactionsCount => _transaction.CurrentReadCount; + public int TransactionsCount => Volatile.Read(ref _readerCount); /// - /// Enter transaction read lock - should be called just before enter a new transaction + /// Enter transaction read lock - should be called just before entering a new transaction /// public void EnterTransaction() { - // if current thread already in exclusive mode, just exit - if (_transaction.IsWriteLockHeld) return; + this.EnterTransactionAsync().GetAwaiter().GetResult(); + } + + public ValueTask EnterTransactionAsync(CancellationToken cancellationToken = default) + { + var scope = this.GetOrCreateScope(); + + if (scope.ExclusiveDepth > 0 || scope.TransactionDepth > 0) + { + scope.TransactionDepth++; + return default; + } - if (_transaction.TryEnterReadLock(_pragmas.Timeout) == false) throw LiteException.LockTimeout("transaction", _pragmas.Timeout); + return new ValueTask(this.EnterTransactionSlowAsync(scope, cancellationToken)); } /// @@ -51,18 +72,39 @@ public void EnterTransaction() /// public void ExitTransaction() { - // if current thread are in reserved mode, do not exit transaction (will be exit from ExitExclusive) - if (_transaction.IsWriteLockHeld) return; - - //This can be called when a lock has either been released by the slim or somewhere else therefore there is no lock to release from ExitReadLock() - if (_transaction.IsReadLockHeld) + this.ExitTransactionAsync().GetAwaiter().GetResult(); + } + + public ValueTask ExitTransactionAsync() + { + if (Volatile.Read(ref _disposed) != 0) { - try - { - _transaction.ExitReadLock(); - } - catch { } + return default; + } + + var scope = _scope.Value; + + if (scope == null || scope.TransactionDepth == 0) + { + return default; } + + scope.TransactionDepth--; + + if (scope.TransactionDepth > 0 || scope.ExclusiveDepth > 0) + { + return default; + } + + if (!scope.HoldsSharedLock) + { + this.TryClearScope(scope); + return default; + } + + scope.HoldsSharedLock = false; + + return new ValueTask(this.ExitTransactionSlowAsync(scope)); } /// @@ -70,36 +112,101 @@ public void ExitTransaction() /// public void EnterLock(string collectionName) { - ENSURE(_transaction.IsReadLockHeld || _transaction.IsWriteLockHeld, "Use EnterTransaction() before EnterLock(name)"); + this.EnterLockAsync(collectionName).GetAwaiter().GetResult(); + } + + public ValueTask EnterLockAsync(string collectionName, CancellationToken cancellationToken = default) + { + var scope = this.GetOrCreateScope(); + + ENSURE(scope.TransactionDepth > 0 || scope.ExclusiveDepth > 0, "Use EnterTransaction() before EnterLock(name)"); + + if (scope.CollectionLocks.TryGetValue(collectionName, out var counter)) + { + scope.CollectionLocks[collectionName] = counter + 1; + return default; + } - // get collection object lock from dictionary (or create new if doesnt exists) - var collection = _collections.GetOrAdd(collectionName, (s) => new object()); + var semaphore = _collections.GetOrAdd(collectionName, _ => new SemaphoreSlim(1, 1)); - if (Monitor.TryEnter(collection, _pragmas.Timeout) == false) throw LiteException.LockTimeout("write", collectionName, _pragmas.Timeout); + return new ValueTask(this.EnterCollectionLockAsync(scope, collectionName, semaphore, cancellationToken)); + } + + private async Task EnterCollectionLockAsync(LockScope scope, string collectionName, SemaphoreSlim semaphore, CancellationToken cancellationToken) + { + if (!await semaphore.WaitAsync(_pragmas.Timeout, cancellationToken).ConfigureAwait(false)) + { + throw LiteException.LockTimeout("write", collectionName, _pragmas.Timeout); + } + + scope.CollectionLocks[collectionName] = 1; } /// - /// Exit collection in reserved lock + /// Exit collection reserved lock /// public void ExitLock(string collectionName) { - if (_collections.TryGetValue(collectionName, out var collection) == false) throw LiteException.CollectionLockerNotFound(collectionName); + var scope = _scope.Value ?? throw LiteException.CollectionLockerNotFound(collectionName); + + if (scope.CollectionLocks.TryGetValue(collectionName, out var counter) == false) + { + throw LiteException.CollectionLockerNotFound(collectionName); + } + + counter--; + + if (counter > 0) + { + scope.CollectionLocks[collectionName] = counter; + return; + } + + scope.CollectionLocks.Remove(collectionName); - Monitor.Exit(collection); + if (_collections.TryGetValue(collectionName, out var semaphore)) + { + TryRelease(semaphore); + } + + this.TryClearScope(scope); } /// - /// Enter all database in exclusive lock. Wait for all transactions finish. In exclusive mode no one can enter in new transaction (for read/write) - /// If current thread already in exclusive mode, returns false + /// Enter all database in exclusive lock. Wait for all transactions to finish. In exclusive mode no one can enter a new transaction (for read/write) + /// If current context already in exclusive mode, returns false /// public bool EnterExclusive() { - // if current thread already in exclusive mode - if (_transaction.IsWriteLockHeld) return false; + return this.EnterExclusiveAsync().GetAwaiter().GetResult(); + } + + public ValueTask EnterExclusiveAsync(CancellationToken cancellationToken = default) + { + var scope = this.GetOrCreateScope(); - // wait finish all transactions before enter in reserved mode - if (_transaction.TryEnterWriteLock(_pragmas.Timeout) == false) throw LiteException.LockTimeout("exclusive", _pragmas.Timeout); + if (scope.ExclusiveDepth > 0) + { + scope.ExclusiveDepth++; + return new ValueTask(false); + } + if (scope.TransactionDepth > 0) + { + throw new InvalidOperationException("Cannot enter exclusive mode while holding transaction locks."); + } + + return new ValueTask(this.EnterExclusiveSlowAsync(scope, cancellationToken)); + } + + private async Task EnterExclusiveSlowAsync(LockScope scope, CancellationToken cancellationToken) + { + if (!await _writerLock.WaitAsync(_pragmas.Timeout, cancellationToken).ConfigureAwait(false)) + { + throw LiteException.LockTimeout("exclusive", _pragmas.Timeout); + } + + scope.ExclusiveDepth = 1; return true; } @@ -109,32 +216,30 @@ public bool EnterExclusive() /// public bool TryEnterExclusive(out bool mustExit) { - // if already in exclusive mode return true but "enter" indicator must be false (do not exit) - if (_transaction.IsWriteLockHeld) + var scope = this.GetOrCreateScope(); + + if (scope.ExclusiveDepth > 0) { + scope.ExclusiveDepth++; mustExit = false; return true; } - // if there is any open transaction, exit with false - if (_transaction.IsReadLockHeld || _transaction.CurrentReadCount > 0) + if (scope.TransactionDepth > 0) { mustExit = false; return false; } - // try enter in exclusive mode - but if not possible, just exit with false - if (_transaction.TryEnterWriteLock(10) == false) + if (_writerLock.Wait(0)) { - mustExit = false; - return false; + scope.ExclusiveDepth = 1; + mustExit = true; + return true; } - ENSURE(_transaction.RecursiveReadCount == 0, "must have no other transaction here"); - - // now, current thread are in exclusive mode (must run ExitExclusive to exit) - mustExit = true; - return true; + mustExit = false; + return false; } /// @@ -142,18 +247,180 @@ public bool TryEnterExclusive(out bool mustExit) /// public void ExitExclusive() { - _transaction.ExitWriteLock(); + var scope = _scope.Value ?? throw new SynchronizationLockException("No exclusive lock held by the current context."); + + if (scope.ExclusiveDepth == 0) + { + throw new SynchronizationLockException("No exclusive lock held by the current context."); + } + + scope.ExclusiveDepth--; + + if (scope.ExclusiveDepth > 0) + { + return; + } + + _writerLock.Release(); + this.TryClearScope(scope); } public void Dispose() { + if (Interlocked.Exchange(ref _disposed, 1) != 0) + { + return; + } + + _writerLock.Dispose(); + _readerSemaphore.Dispose(); + + foreach (var semaphore in _collections.Values) + { + semaphore.Dispose(); + } + } + + private async Task EnterTransactionSlowAsync(LockScope scope, CancellationToken cancellationToken) + { + var timeout = _pragmas.Timeout; + + try + { + if (!await _readerSemaphore.WaitAsync(timeout, cancellationToken).ConfigureAwait(false)) + { + throw LiteException.LockTimeout("transaction", timeout); + } + } + catch (ObjectDisposedException) + { + return; + } + + var acquiredWriter = false; + var incrementedReader = false; + try { - _transaction.Dispose(); + if (_readerCount == 0) + { + try + { + if (!await _writerLock.WaitAsync(timeout, cancellationToken).ConfigureAwait(false)) + { + throw LiteException.LockTimeout("transaction", timeout); + } + } + catch (ObjectDisposedException) + { + return; + } + + acquiredWriter = true; + } + + _readerCount++; + incrementedReader = true; + } + catch + { + if (incrementedReader) + { + _readerCount--; + + if (_readerCount == 0 && acquiredWriter) + { + TryRelease(_writerLock); + } + } + else if (acquiredWriter) + { + TryRelease(_writerLock); + } + + throw; + } + finally + { + TryRelease(_readerSemaphore); } - catch (SynchronizationLockException) + + scope.TransactionDepth = 1; + scope.HoldsSharedLock = true; + } + + private async Task ExitTransactionSlowAsync(LockScope scope) + { + try { + await _readerSemaphore.WaitAsync().ConfigureAwait(false); } + catch (ObjectDisposedException) + { + return; + } + + try + { + _readerCount--; + + if (_readerCount == 0) + { + TryRelease(_writerLock); + } + } + finally + { + TryRelease(_readerSemaphore); + } + + this.TryClearScope(scope); + } + + private LockScope GetOrCreateScope() + { + var scope = _scope.Value; + + if (scope == null) + { + scope = new LockScope(); + _scope.Value = scope; + } + + return scope; + } + + private void TryClearScope(LockScope scope) + { + if (scope.TransactionDepth == 0 && scope.ExclusiveDepth == 0 && scope.CollectionLocks.Count == 0) + { + if (ReferenceEquals(_scope.Value, scope)) + { + _scope.Value = null; + } + } + } + + private static void TryRelease(SemaphoreSlim semaphore) + { + try + { + semaphore.Release(); + } + catch (ObjectDisposedException) + { + } + catch (SemaphoreFullException) + { + } + } + + private sealed class LockScope + { + public int TransactionDepth; + public bool HoldsSharedLock; + public int ExclusiveDepth; + public Dictionary CollectionLocks { get; } = new Dictionary(StringComparer.OrdinalIgnoreCase); } } } diff --git a/LiteDB/Engine/Services/RebuildService.cs b/LiteDB/Engine/Services/RebuildService.cs index b4eaa155e..b1306e4ad 100644 --- a/LiteDB/Engine/Services/RebuildService.cs +++ b/LiteDB/Engine/Services/RebuildService.cs @@ -60,7 +60,7 @@ public long Rebuild(RebuildOptions options) })) { // copy all database to new Log file with NO checkpoint during all rebuild - engine.Pragma(Pragmas.CHECKPOINT, 0); + engine.PragmaAsync(Pragmas.CHECKPOINT, 0).ConfigureAwait(false).GetAwaiter().GetResult(); // rebuild all content from reader into new engine engine.RebuildContent(reader); @@ -70,20 +70,20 @@ public long Rebuild(RebuildOptions options) { var report = options.GetErrorReport(); - engine.Insert("_rebuild_errors", report, BsonAutoId.Int32); + engine.InsertAsync("_rebuild_errors", report, BsonAutoId.Int32).ConfigureAwait(false).GetAwaiter().GetResult(); } // update pragmas var pragmas = reader.GetPragmas(); - engine.Pragma(Pragmas.CHECKPOINT, pragmas[Pragmas.CHECKPOINT]); - engine.Pragma(Pragmas.TIMEOUT, pragmas[Pragmas.TIMEOUT]); - engine.Pragma(Pragmas.LIMIT_SIZE, pragmas[Pragmas.LIMIT_SIZE]); - engine.Pragma(Pragmas.UTC_DATE, pragmas[Pragmas.UTC_DATE]); - engine.Pragma(Pragmas.USER_VERSION, pragmas[Pragmas.USER_VERSION]); + engine.PragmaAsync(Pragmas.CHECKPOINT, pragmas[Pragmas.CHECKPOINT]).ConfigureAwait(false).GetAwaiter().GetResult(); + engine.PragmaAsync(Pragmas.TIMEOUT, pragmas[Pragmas.TIMEOUT]).ConfigureAwait(false).GetAwaiter().GetResult(); + engine.PragmaAsync(Pragmas.LIMIT_SIZE, pragmas[Pragmas.LIMIT_SIZE]).ConfigureAwait(false).GetAwaiter().GetResult(); + engine.PragmaAsync(Pragmas.UTC_DATE, pragmas[Pragmas.UTC_DATE]).ConfigureAwait(false).GetAwaiter().GetResult(); + engine.PragmaAsync(Pragmas.USER_VERSION, pragmas[Pragmas.USER_VERSION]).ConfigureAwait(false).GetAwaiter().GetResult(); // after rebuild, copy log bytes into data file - engine.Checkpoint(); + engine.CheckpointAsync().ConfigureAwait(false).GetAwaiter().GetResult(); } } diff --git a/LiteDB/Engine/Services/TransactionMonitor.cs b/LiteDB/Engine/Services/TransactionMonitor.cs index ebfee2ce3..d1397ab1e 100644 --- a/LiteDB/Engine/Services/TransactionMonitor.cs +++ b/LiteDB/Engine/Services/TransactionMonitor.cs @@ -1,8 +1,6 @@ -using System; -using System.Collections.Concurrent; +using System; using System.Collections.Generic; using System.Linq; -using System.Runtime.InteropServices; using System.Threading; using static LiteDB.Constants; @@ -15,7 +13,7 @@ namespace LiteDB.Engine internal class TransactionMonitor : IDisposable { private readonly Dictionary _transactions = new Dictionary(); - private readonly ThreadLocal _slot = new ThreadLocal(); + private readonly AsyncLocal _context = new AsyncLocal(); private readonly HeaderPage _header; private readonly LockService _locker; @@ -40,61 +38,31 @@ public TransactionMonitor(HeaderPage header, LockService locker, DiskService dis // initialize free pages with all avaiable pages in memory _freePages = MAX_TRANSACTION_SIZE; - // initial size + // initial size _initialSize = MAX_TRANSACTION_SIZE / MAX_OPEN_TRANSACTIONS; } public TransactionService GetTransaction(bool create, bool queryOnly, out bool isNew) { - var transaction = _slot.Value; + var context = create ? this.GetOrCreateContext() : _context.Value; + TransactionService transaction = null; - if (create && transaction == null) + if (context != null) { - isNew = true; - - bool alreadyLock; - - // must lock _transaction before work with _transactions (GetInitialSize use _transactions) - lock (_transactions) + if (queryOnly) { - if (_transactions.Count >= MAX_OPEN_TRANSACTIONS) throw new LiteException(0, "Maximum number of transactions reached"); - - var initialSize = this.GetInitialSize(); - - // check if current thread contains any transaction - alreadyLock = _transactions.Values.Any(x => x.ThreadID == Environment.CurrentManagedThreadId); - - transaction = new TransactionService(_header, _locker, _disk, _walIndex, initialSize, this, queryOnly); - - // add transaction to execution transaction dict - _transactions[transaction.TransactionID] = transaction; + transaction = context.WriteTransaction ?? PeekQuery(context); } - - // enter in lock transaction after release _transaction lock - if (alreadyLock == false) + else { - try - { - _locker.EnterTransaction(); - } - catch - { - transaction.Dispose(); - lock (_transactions) - { - // return pages - _freePages += transaction.MaxTransactionSize; - _transactions.Remove(transaction.TransactionID); - } - throw; - } + transaction = context.WriteTransaction; } + } - // do not store in thread query-only transaction - if (queryOnly == false) - { - _slot.Value = transaction; - } + if (transaction == null && create) + { + transaction = this.CreateTransaction(queryOnly, context!); + isNew = true; } else { @@ -109,37 +77,22 @@ public TransactionService GetTransaction(bool create, bool queryOnly, out bool i /// public void ReleaseTransaction(TransactionService transaction) { - // dispose current transaction transaction.Dispose(); - bool keepLocked; - lock (_transactions) { - // remove from "open transaction" list _transactions.Remove(transaction.TransactionID); // return freePages used area _freePages += transaction.MaxTransactionSize; - - // check if current thread contains more query transactions - keepLocked = _transactions.Values.Any(x => x.ThreadID == Environment.CurrentManagedThreadId); } - // unlock thread-transaction only if there is no more transactions - if (keepLocked == false) + if (transaction.Context != null) { - _locker.ExitTransaction(); + this.UnregisterTransaction(transaction); } - // remove transaction from thread if are no queryOnly transaction - if (transaction.QueryOnly == false) - { - ENSURE(_slot.Value == transaction, "current thread must contains transaction parameter"); - - // clear thread slot for new transaction - _slot.Value = null; - } + _locker.ExitTransaction(); } /// @@ -148,11 +101,16 @@ public void ReleaseTransaction(TransactionService transaction) /// public TransactionService GetThreadTransaction() { + var context = _context.Value; + + if (context != null) + { + return context.WriteTransaction ?? PeekQuery(context); + } + lock (_transactions) { - return - _slot.Value ?? - _transactions.Values.FirstOrDefault(x => x.ThreadID == Environment.CurrentManagedThreadId); + return _transactions.Values.FirstOrDefault(); } } @@ -211,7 +169,7 @@ private bool TryExtend(TransactionService trans) /// public bool CheckSafepoint(TransactionService trans) { - return + return trans.Pages.TransactionSize >= trans.MaxTransactionSize && this.TryExtend(trans) == false; } @@ -231,5 +189,133 @@ public void Dispose() _transactions.Clear(); } } + + private TransactionService CreateTransaction(bool queryOnly, TransactionContext context) + { + TransactionService transaction; + int initialSize; + + lock (_transactions) + { + if (_transactions.Count >= MAX_OPEN_TRANSACTIONS) throw new LiteException(0, "Maximum number of transactions reached"); + + initialSize = this.GetInitialSize(); + + transaction = new TransactionService(_header, _locker, _disk, _walIndex, initialSize, this, queryOnly); + _transactions[transaction.TransactionID] = transaction; + } + + try + { + _locker.EnterTransaction(); + } + catch + { + this.CleanupFailedTransaction(transaction); + throw; + } + + transaction.Context = context; + this.RegisterTransaction(context, transaction); + + return transaction; + } + + private void RegisterTransaction(TransactionContext context, TransactionService transaction) + { + if (transaction.QueryOnly) + { + context.QueryTransactions.Push(transaction); + } + else + { + context.WriteTransaction = transaction; + } + } + + private void CleanupFailedTransaction(TransactionService transaction) + { + transaction.Dispose(); + + lock (_transactions) + { + _freePages += transaction.MaxTransactionSize; + _transactions.Remove(transaction.TransactionID); + } + + transaction.Context = null; + } + + private void UnregisterTransaction(TransactionService transaction) + { + var context = transaction.Context; + + if (context == null) + { + return; + } + + if (transaction.QueryOnly) + { + if (context.QueryTransactions.Count > 0 && ReferenceEquals(context.QueryTransactions.Peek(), transaction)) + { + context.QueryTransactions.Pop(); + } + else if (context.QueryTransactions.Count > 0) + { + var buffer = new Stack(); + + while (context.QueryTransactions.Count > 0) + { + var current = context.QueryTransactions.Pop(); + + if (!ReferenceEquals(current, transaction)) + { + buffer.Push(current); + } + } + + while (buffer.Count > 0) + { + context.QueryTransactions.Push(buffer.Pop()); + } + } + } + else if (ReferenceEquals(context.WriteTransaction, transaction)) + { + context.WriteTransaction = null; + } + + transaction.Context = null; + + if (ReferenceEquals(_context.Value, context) && context.WriteTransaction == null && context.QueryTransactions.Count == 0) + { + _context.Value = null; + } + } + + private TransactionContext GetOrCreateContext() + { + var context = _context.Value; + + if (context == null) + { + context = new TransactionContext(); + _context.Value = context; + } + + return context; + } + + private static TransactionService PeekQuery(TransactionContext context) + { + return context.QueryTransactions.Count > 0 ? context.QueryTransactions.Peek() : null; + } + + internal sealed class TransactionContext + { + public TransactionService WriteTransaction; + public Stack QueryTransactions { get; } = new Stack(); + } } -} \ No newline at end of file +} diff --git a/LiteDB/Engine/Services/TransactionService.cs b/LiteDB/Engine/Services/TransactionService.cs index 7373251da..63ee1a021 100644 --- a/LiteDB/Engine/Services/TransactionService.cs +++ b/LiteDB/Engine/Services/TransactionService.cs @@ -43,6 +43,8 @@ internal class TransactionService : IDisposable public IEnumerable Snapshots => _snapshots.Values; public bool QueryOnly { get; } + internal TransactionMonitor.TransactionContext Context { get; set; } + // get/set public int MaxTransactionSize { get; set; } @@ -75,7 +77,7 @@ public TransactionService(HeaderPage header, LockService locker, DiskService dis } /// - /// Finalizer: Will be called once a thread is closed. The TransactionMonitor._slot releases the used TransactionService. + /// Finalizer: Will be called once a thread is closed. The TransactionMonitor releases the used TransactionService. /// ~TransactionService() { @@ -403,25 +405,20 @@ protected virtual void Dispose(bool dispose) // clean snapshots if there is no commit/rollback if (_state == TransactionState.Active && _snapshots.Count > 0) { - // release writable snapshots - foreach (var snapshot in _snapshots.Values.Where(x => x.Mode == LockMode.Write)) - { - // discard all dirty pages - _disk.DiscardDirtyPages(snapshot.GetWritablePages(true, true).Select(x => x.Buffer)); - - // discard all clean pages - _disk.DiscardCleanPages(snapshot.GetWritablePages(false, true).Select(x => x.Buffer)); - } - - // release buffers in read-only snaphosts - foreach (var snapshot in _snapshots.Values.Where(x => x.Mode == LockMode.Read)) + foreach (var snapshot in _snapshots.Values) { - foreach (var page in snapshot.LocalPages) + try { - page.Buffer.Release(); + if (snapshot.Mode == LockMode.Write) + { + _disk.DiscardDirtyPages(snapshot.GetWritablePages(true, true).Select(x => x.Buffer)); + _disk.DiscardCleanPages(snapshot.GetWritablePages(false, true).Select(x => x.Buffer)); + } + } + finally + { + snapshot.Dispose(); } - - snapshot.CollectionPage?.Buffer.Release(); } } diff --git a/LiteDB/LiteDB.csproj b/LiteDB/LiteDB.csproj index 7ebb6d4ce..ea4fc53a6 100644 --- a/LiteDB/LiteDB.csproj +++ b/LiteDB/LiteDB.csproj @@ -53,7 +53,11 @@ - - - - + + + + + + + + diff --git a/LiteDB/Utils/Extensions/StreamExtensions.cs b/LiteDB/Utils/Extensions/StreamExtensions.cs index 3b037e6e6..265017aee 100644 --- a/LiteDB/Utils/Extensions/StreamExtensions.cs +++ b/LiteDB/Utils/Extensions/StreamExtensions.cs @@ -1,5 +1,7 @@ using System; using System.IO; +using System.Threading; +using System.Threading.Tasks; using static LiteDB.Constants; namespace LiteDB @@ -20,5 +22,19 @@ public static void FlushToDisk(this Stream stream) stream.Flush(); } } + + /// + /// Flushes the stream contents to disk asynchronously, avoiding OS level buffering when possible. + /// + public static ValueTask FlushToDiskAsync(this Stream stream, CancellationToken cancellationToken = default) + { + if (stream is FileStream fstream) + { + fstream.Flush(true); + return default; + } + + return new ValueTask(stream.FlushAsync(cancellationToken)); + } } } \ No newline at end of file diff --git a/README.md b/README.md index 2859e3b1d..e21c4304a 100644 --- a/README.md +++ b/README.md @@ -48,6 +48,8 @@ New UI to manage and visualize your database: Visit [the Wiki](https://github.com/mbdavid/LiteDB/wiki) for full documentation. For simplified chinese version, [check here](https://github.com/lidanger/LiteDB.wiki_Translation_zh-cn). +Developers tracking the async-first transition can follow the shared assumptions in [`docs/async-baseline.md`](docs/async-baseline.md). + ## LiteDB Community Help LiteDB grow its user community by answering this [simple survey](https://docs.google.com/forms/d/e/1FAIpQLSc4cNG7wyLKXXcOLIt7Ea4TlXCG6s-51_EfHPu2p5WZ2dIx7A/viewform?usp=sf_link) diff --git a/docs/async-baseline.md b/docs/async-baseline.md new file mode 100644 index 000000000..8bc3de851 --- /dev/null +++ b/docs/async-baseline.md @@ -0,0 +1,32 @@ +# Async-first Baseline + +This document captures the shared assumptions behind the LiteDB async-first overhaul. It lists the primitives every new API will +use and the build-time dependencies that keep the multi-targeted package consistent. + +## Core async primitives + +LiteDB code and new public entry points will prefer the following types: + +- `Task` for asynchronous operations that do not need custom pooling. +- `ValueTask` for performance sensitive paths where allocations must stay predictable. +- `IAsyncEnumerable` for streaming results from queries, cursors, and file storage. +- `CancellationToken` for all operations that can be interrupted by the caller. +- `IAsyncDisposable` for database, engine, and reader lifetimes. + +These primitives are supported on all target frameworks the library currently ships (netstandard2.0 and net8.0). + +## Multi-target compatibility + +`netstandard2.0` does not ship the newer async abstractions in-box, so the project references +`Microsoft.Bcl.AsyncInterfaces` to provide them. Consumers on .NET Framework or Xamarin will therefore receive the necessary +interfaces transitively, while `net8.0` builds fall back to the implementations already available in the runtime. + +## Guidance for dependent projects + +- Keep the package reference to `Microsoft.Bcl.AsyncInterfaces` for the `netstandard2.0` target in `LiteDB.csproj`. +- Do not add `Microsoft.Bcl.AsyncInterfaces` to the `net8.0` target. The types exist in the base class library and pulling the + package would only increase deployment size. +- When new projects are added to the solution, ensure they either target a framework where the async interfaces are in-box or + reference the same package explicitly. + +Following this baseline lets the rest of the async-first work proceed without forcing breaking framework changes.