From e71f72e4db755df67339e5874e39aced4fc2ab82 Mon Sep 17 00:00:00 2001
From: DavidNemecek <38526555+DavidNemecek@users.noreply.github.com>
Date: Sat, 20 Sep 2025 13:48:40 +0200
Subject: [PATCH 01/53] Use MinVer to compute package versions
---
.github/workflows/ci.yml | 32 +++++++++++
.github/workflows/publish-prerelease.yml | 63 +++++++++++++++++++++
.github/workflows/publish-release.yml | 63 +++++++++++++++++++++
Directory.Build.props | 11 ++++
LiteDB.Benchmarks/LiteDB.Benchmarks.csproj | 10 ++--
LiteDB.Shell/LiteDB.Shell.csproj | 5 +-
LiteDB.Stress/LiteDB.Stress.csproj | 2 +-
LiteDB.Tests/LiteDB.Tests.csproj | 6 +-
LiteDB/LiteDB.csproj | 55 +++++++-----------
LiteDB/LiteDB.snk | Bin 596 -> 0 bytes
LiteDB/Utils/Constants.cs | 2 +-
appveyor.yml | 26 ---------
12 files changed, 198 insertions(+), 77 deletions(-)
create mode 100644 .github/workflows/ci.yml
create mode 100644 .github/workflows/publish-prerelease.yml
create mode 100644 .github/workflows/publish-release.yml
create mode 100644 Directory.Build.props
delete mode 100644 LiteDB/LiteDB.snk
delete mode 100644 appveyor.yml
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
new file mode 100644
index 000000000..df895551d
--- /dev/null
+++ b/.github/workflows/ci.yml
@@ -0,0 +1,32 @@
+name: CI
+
+on:
+ push:
+ branches:
+ - main
+ - dev
+ pull_request:
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Check out repository
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - name: Set up .NET SDK
+ uses: actions/setup-dotnet@v4
+ with:
+ dotnet-version: 8.0.x
+
+ - name: Restore
+ run: dotnet restore LiteDB.sln
+
+ - name: Build
+ run: dotnet build LiteDB.sln --configuration Release --no-restore
+
+ - name: Test
+ run: dotnet test LiteDB.sln --configuration Release --no-build --verbosity normal
diff --git a/.github/workflows/publish-prerelease.yml b/.github/workflows/publish-prerelease.yml
new file mode 100644
index 000000000..a89ad1518
--- /dev/null
+++ b/.github/workflows/publish-prerelease.yml
@@ -0,0 +1,63 @@
+name: Publish prerelease
+
+on:
+ push:
+ branches:
+ - dev
+
+jobs:
+ publish:
+ runs-on: ubuntu-latest
+ permissions:
+ contents: write
+ env:
+ MinVerDefaultPreReleaseIdentifiers: prerelease.${{ github.run_number }}
+
+ steps:
+ - name: Check out repository
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - name: Set up .NET SDK
+ uses: actions/setup-dotnet@v4
+ with:
+ dotnet-version: 8.0.x
+
+ - name: Restore
+ run: dotnet restore LiteDB.sln
+
+ - name: Build
+ run: dotnet build LiteDB.sln --configuration Release --no-restore
+
+ - name: Test
+ run: dotnet test LiteDB.sln --configuration Release --no-build --verbosity normal
+
+ - name: Pack
+ run: |
+ dotnet pack LiteDB/LiteDB.csproj --configuration Release --no-build -o artifacts
+
+ - name: Capture package version
+ id: version
+ run: |
+ PACKAGE_PATH=$(ls artifacts/LiteDB.*.nupkg | head -n 1)
+ PACKAGE_FILENAME=$(basename "$PACKAGE_PATH")
+ PACKAGE_VERSION=${PACKAGE_FILENAME#LiteDB.}
+ PACKAGE_VERSION=${PACKAGE_VERSION%.nupkg}
+ echo "package_version=${PACKAGE_VERSION}" >> "$GITHUB_OUTPUT"
+
+ - name: Push package to NuGet
+ env:
+ NUGET_API_KEY: ${{ secrets.NUGET_API_KEY }}
+ run: |
+ dotnet nuget push "artifacts/*.nupkg" --api-key "$NUGET_API_KEY" --source https://api.nuget.org/v3/index.json --skip-duplicate
+
+ - name: Publish GitHub prerelease
+ uses: softprops/action-gh-release@v2
+ with:
+ tag_name: v${{ steps.version.outputs.package_version }}
+ name: LiteDB ${{ steps.version.outputs.package_version }}
+ prerelease: true
+ files: artifacts/*.nupkg
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/publish-release.yml b/.github/workflows/publish-release.yml
new file mode 100644
index 000000000..40f35bfb7
--- /dev/null
+++ b/.github/workflows/publish-release.yml
@@ -0,0 +1,63 @@
+name: Publish release
+
+on:
+ push:
+ branches:
+ - main
+ tags:
+ - v*
+
+jobs:
+ publish:
+ if: startsWith(github.ref, 'refs/tags/v')
+ runs-on: ubuntu-latest
+ permissions:
+ contents: write
+
+ steps:
+ - name: Check out repository
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - name: Set up .NET SDK
+ uses: actions/setup-dotnet@v4
+ with:
+ dotnet-version: 8.0.x
+
+ - name: Restore
+ run: dotnet restore LiteDB.sln
+
+ - name: Build
+ run: dotnet build LiteDB.sln --configuration Release --no-restore
+
+ - name: Test
+ run: dotnet test LiteDB.sln --configuration Release --no-build --verbosity normal
+
+ - name: Pack
+ run: |
+ dotnet pack LiteDB/LiteDB.csproj --configuration Release --no-build -o artifacts
+
+ - name: Capture package version
+ id: version
+ run: |
+ PACKAGE_PATH=$(ls artifacts/LiteDB.*.nupkg | head -n 1)
+ PACKAGE_FILENAME=$(basename "$PACKAGE_PATH")
+ PACKAGE_VERSION=${PACKAGE_FILENAME#LiteDB.}
+ PACKAGE_VERSION=${PACKAGE_VERSION%.nupkg}
+ echo "package_version=${PACKAGE_VERSION}" >> "$GITHUB_OUTPUT"
+
+ - name: Push package to NuGet
+ env:
+ NUGET_API_KEY: ${{ secrets.NUGET_API_KEY }}
+ run: |
+ dotnet nuget push "artifacts/*.nupkg" --api-key "$NUGET_API_KEY" --source https://api.nuget.org/v3/index.json --skip-duplicate
+
+ - name: Publish GitHub release
+ uses: softprops/action-gh-release@v2
+ with:
+ tag_name: v${{ steps.version.outputs.package_version }}
+ name: LiteDB ${{ steps.version.outputs.package_version }}
+ files: artifacts/*.nupkg
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/Directory.Build.props b/Directory.Build.props
new file mode 100644
index 000000000..6f556c035
--- /dev/null
+++ b/Directory.Build.props
@@ -0,0 +1,11 @@
+
+
+ v
+ prerelease.0
+ 6.0
+
+
+
+
+
+
diff --git a/LiteDB.Benchmarks/LiteDB.Benchmarks.csproj b/LiteDB.Benchmarks/LiteDB.Benchmarks.csproj
index 80a1ebdde..1bb264165 100644
--- a/LiteDB.Benchmarks/LiteDB.Benchmarks.csproj
+++ b/LiteDB.Benchmarks/LiteDB.Benchmarks.csproj
@@ -1,10 +1,10 @@
-
- Exe
- net472;net6
- 8
-
+
+ Exe
+ net8.0
+ latest
+
diff --git a/LiteDB.Shell/LiteDB.Shell.csproj b/LiteDB.Shell/LiteDB.Shell.csproj
index 4b361b0f7..56d24672b 100644
--- a/LiteDB.Shell/LiteDB.Shell.csproj
+++ b/LiteDB.Shell/LiteDB.Shell.csproj
@@ -1,14 +1,11 @@
- net6
+ net8.0
LiteDB.Shell
LiteDB.Shell
Exe
LiteDB.Shell
- 5.0.6.0
- 5.0.6
- 5.0.6
Maurício David
MIT
en-US
diff --git a/LiteDB.Stress/LiteDB.Stress.csproj b/LiteDB.Stress/LiteDB.Stress.csproj
index f34f8fcb8..dd5ee37f9 100644
--- a/LiteDB.Stress/LiteDB.Stress.csproj
+++ b/LiteDB.Stress/LiteDB.Stress.csproj
@@ -2,7 +2,7 @@
Exe
- net8
+ net8.0
diff --git a/LiteDB.Tests/LiteDB.Tests.csproj b/LiteDB.Tests/LiteDB.Tests.csproj
index 47c3bd292..2c0dec1d3 100644
--- a/LiteDB.Tests/LiteDB.Tests.csproj
+++ b/LiteDB.Tests/LiteDB.Tests.csproj
@@ -1,7 +1,7 @@
- net8
+ net8.0
LiteDB.Tests
LiteDB.Tests
Maurício David
@@ -9,9 +9,7 @@
en-US
false
1701;1702;1705;1591;0618
- True
- ..\LiteDB\LiteDB.snk
-
+
diff --git a/LiteDB/LiteDB.csproj b/LiteDB/LiteDB.csproj
index 67eeb45f1..7ebb6d4ce 100644
--- a/LiteDB/LiteDB.csproj
+++ b/LiteDB/LiteDB.csproj
@@ -1,11 +1,7 @@
-
- net4.5;netstandard1.3;netstandard2.0
- 5.0.21
- 5.0.21
- 5.0.21
- 5.0.21
+
+ netstandard2.0;net8.0
Maurício David
LiteDB
LiteDB - A lightweight embedded .NET NoSQL document store in a single datafile
@@ -16,20 +12,17 @@
database nosql embedded
icon_64x64.png
MIT
- https://www.litedb.org
- https://github.com/mbdavid/LiteDB
+ https://www.litedb.org
+ https://github.com/litedb-org/LiteDB
git
LiteDB
LiteDB
true
- 1.6.1
- 1701;1702;1705;1591;0618
- bin\$(Configuration)\$(TargetFramework)\LiteDB.xml
- true
- LiteDB.snk
- true
- latest
-
+ 1701;1702;1705;1591;0618
+ bin\$(Configuration)\$(TargetFramework)\LiteDB.xml
+ true
+ latest
+
@@ -57,19 +50,9 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
diff --git a/LiteDB/LiteDB.snk b/LiteDB/LiteDB.snk
deleted file mode 100644
index baaa3405584f27dfa598600713022b7812e10190..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001
literal 596
zcmV-a0;~N80ssI2Bme+XQ$aES1ONa50096g=4p`PArQ_$rx|`X`1mdJGb}ltX>IrN
zeF@=C<&dl`+H+k;cm7pOLUlmB={pqD(rg5C9F{YK+^QIboQ%BGp{c`^(I8JpOe(hW
zIxec;f$NHTJv_%YScZ!O0v0=xqG3#+P^)k|<2*hccJ*5k^!R+YHCXeiX~E8NZJy{q
zo2l5yS*71r@oEsE698E*R^eB@5q)jz7=s7KfjN)t;F{d3&)|-cfH*31**T*Ds*$ek
ze)3&M{!hj?(9QXmtgldWDFpO;5jG*L5LT701@XBWW@6)il9I=07z=c
zq65FCOX_Z)wgG)
i-rLy7{@9zdUPj}((9--5R0E}+Dt
Date: Sat, 20 Sep 2025 14:03:02 +0200
Subject: [PATCH 02/53] Remove MinVerDefaultPreReleaseIdentifiers environment
variable from publish workflow
---
.github/workflows/publish-prerelease.yml | 2 --
1 file changed, 2 deletions(-)
diff --git a/.github/workflows/publish-prerelease.yml b/.github/workflows/publish-prerelease.yml
index a89ad1518..404c1ff18 100644
--- a/.github/workflows/publish-prerelease.yml
+++ b/.github/workflows/publish-prerelease.yml
@@ -10,8 +10,6 @@ jobs:
runs-on: ubuntu-latest
permissions:
contents: write
- env:
- MinVerDefaultPreReleaseIdentifiers: prerelease.${{ github.run_number }}
steps:
- name: Check out repository
From a12aea67d6d070762276364213dc14fb6ef07c34 Mon Sep 17 00:00:00 2001
From: Jonas Kamsker <11245306+JKamsker@users.noreply.github.com>
Date: Sat, 20 Sep 2025 14:06:23 +0200
Subject: [PATCH 03/53] Update MinVerDefaultPreReleaseIdentifiers to use a
single prerelease identifier
---
Directory.Build.props | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/Directory.Build.props b/Directory.Build.props
index 6f556c035..dbc5e80b9 100644
--- a/Directory.Build.props
+++ b/Directory.Build.props
@@ -1,7 +1,8 @@
v
- prerelease.0
+
+ prerelease
6.0
From e2eb0c243d87455a26e59d45bd89c0512294e929 Mon Sep 17 00:00:00 2001
From: Jonas Kamsker <11245306+JKamsker@users.noreply.github.com>
Date: Sat, 20 Sep 2025 15:15:04 +0200
Subject: [PATCH 04/53] Add Bitwarden secret retrieval for NuGet API key in
publish workflows
---
.github/workflows/publish-prerelease.yml | 11 +++++++++--
.github/workflows/publish-release.yml | 11 +++++++++--
2 files changed, 18 insertions(+), 4 deletions(-)
diff --git a/.github/workflows/publish-prerelease.yml b/.github/workflows/publish-prerelease.yml
index 404c1ff18..20ba6a83b 100644
--- a/.github/workflows/publish-prerelease.yml
+++ b/.github/workflows/publish-prerelease.yml
@@ -44,9 +44,15 @@ jobs:
PACKAGE_VERSION=${PACKAGE_VERSION%.nupkg}
echo "package_version=${PACKAGE_VERSION}" >> "$GITHUB_OUTPUT"
+ - name: Retrieve secrets from Bitwarden
+ uses: bitwarden/sm-action@v2
+ with:
+ access_token: ${{ secrets.BW_ACCESS_TOKEN }}
+ base_url: https://vault.bitwarden.eu
+ secrets: |
+ 265b2fb6-2cf0-4859-9bc8-b24c00ab4378 > NUGET_API_KEY
+
- name: Push package to NuGet
- env:
- NUGET_API_KEY: ${{ secrets.NUGET_API_KEY }}
run: |
dotnet nuget push "artifacts/*.nupkg" --api-key "$NUGET_API_KEY" --source https://api.nuget.org/v3/index.json --skip-duplicate
@@ -55,6 +61,7 @@ jobs:
with:
tag_name: v${{ steps.version.outputs.package_version }}
name: LiteDB ${{ steps.version.outputs.package_version }}
+ generate_release_notes: true
prerelease: true
files: artifacts/*.nupkg
env:
diff --git a/.github/workflows/publish-release.yml b/.github/workflows/publish-release.yml
index 40f35bfb7..28318d567 100644
--- a/.github/workflows/publish-release.yml
+++ b/.github/workflows/publish-release.yml
@@ -47,9 +47,15 @@ jobs:
PACKAGE_VERSION=${PACKAGE_VERSION%.nupkg}
echo "package_version=${PACKAGE_VERSION}" >> "$GITHUB_OUTPUT"
+ - name: Retrieve secrets from Bitwarden
+ uses: bitwarden/sm-action@v2
+ with:
+ access_token: ${{ secrets.BW_ACCESS_TOKEN }}
+ base_url: https://vault.bitwarden.eu
+ secrets: |
+ 265b2fb6-2cf0-4859-9bc8-b24c00ab4378 > NUGET_API_KEY
+
- name: Push package to NuGet
- env:
- NUGET_API_KEY: ${{ secrets.NUGET_API_KEY }}
run: |
dotnet nuget push "artifacts/*.nupkg" --api-key "$NUGET_API_KEY" --source https://api.nuget.org/v3/index.json --skip-duplicate
@@ -58,6 +64,7 @@ jobs:
with:
tag_name: v${{ steps.version.outputs.package_version }}
name: LiteDB ${{ steps.version.outputs.package_version }}
+ generate_release_notes: true
files: artifacts/*.nupkg
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
From 92b3f1b687ce3837fe2ab89bb0e252ec319ffceb Mon Sep 17 00:00:00 2001
From: Jonas Kamsker <11245306+JKamsker@users.noreply.github.com>
Date: Sat, 20 Sep 2025 15:25:56 +0200
Subject: [PATCH 05/53] Add test run settings and set timeout for test jobs in
CI workflows
---
.github/workflows/ci.yml | 4 +++-
.github/workflows/publish-prerelease.yml | 4 +++-
.github/workflows/publish-release.yml | 4 +++-
tests.runsettings | 7 +++++++
4 files changed, 16 insertions(+), 3 deletions(-)
create mode 100644 tests.runsettings
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index df895551d..f37cee1ed 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -29,4 +29,6 @@ jobs:
run: dotnet build LiteDB.sln --configuration Release --no-restore
- name: Test
- run: dotnet test LiteDB.sln --configuration Release --no-build --verbosity normal
+ timeout-minutes: 5
+ run: dotnet test LiteDB.sln --configuration Release --no-build --verbosity normal --settings tests.runsettings
+
diff --git a/.github/workflows/publish-prerelease.yml b/.github/workflows/publish-prerelease.yml
index 20ba6a83b..e5bcc7f8f 100644
--- a/.github/workflows/publish-prerelease.yml
+++ b/.github/workflows/publish-prerelease.yml
@@ -29,7 +29,8 @@ jobs:
run: dotnet build LiteDB.sln --configuration Release --no-restore
- name: Test
- run: dotnet test LiteDB.sln --configuration Release --no-build --verbosity normal
+ timeout-minutes: 5
+ run: dotnet test LiteDB.sln --configuration Release --no-build --verbosity normal --settings tests.runsettings
- name: Pack
run: |
@@ -66,3 +67,4 @@ jobs:
files: artifacts/*.nupkg
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+
diff --git a/.github/workflows/publish-release.yml b/.github/workflows/publish-release.yml
index 28318d567..94c43b6ad 100644
--- a/.github/workflows/publish-release.yml
+++ b/.github/workflows/publish-release.yml
@@ -32,7 +32,8 @@ jobs:
run: dotnet build LiteDB.sln --configuration Release --no-restore
- name: Test
- run: dotnet test LiteDB.sln --configuration Release --no-build --verbosity normal
+ timeout-minutes: 5
+ run: dotnet test LiteDB.sln --configuration Release --no-build --verbosity normal --settings tests.runsettings
- name: Pack
run: |
@@ -68,3 +69,4 @@ jobs:
files: artifacts/*.nupkg
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+
diff --git a/tests.runsettings b/tests.runsettings
new file mode 100644
index 000000000..c97856d4c
--- /dev/null
+++ b/tests.runsettings
@@ -0,0 +1,7 @@
+
+
+
+ 300000
+ 30000
+
+
From 6604e370f755f2ec4f968f86bf59c2a887b956a7 Mon Sep 17 00:00:00 2001
From: JKamsker <11245306+JKamsker@users.noreply.github.com>
Date: Sat, 20 Sep 2025 21:05:49 +0200
Subject: [PATCH 06/53] Add agents
---
AGENTS.md | 20 ++++++++++++++++++++
1 file changed, 20 insertions(+)
create mode 100644 AGENTS.md
diff --git a/AGENTS.md b/AGENTS.md
new file mode 100644
index 000000000..bfe51d3e9
--- /dev/null
+++ b/AGENTS.md
@@ -0,0 +1,20 @@
+# Repository Guidelines
+
+## Project Structure & Module Organization
+The `LiteDB/` library is the heart of the solution and is split into domains such as `Engine/`, `Document/`, `Client/`, and `Utils/` for low-level storage, document modeling, and public APIs. Companion apps live beside it: `LiteDB.Shell/` provides the interactive CLI, `LiteDB.Benchmarks/` and `LiteDB.Stress/` target performance and endurance scenarios, while `LiteDB.Tests/` houses unit coverage grouped by feature area. Sample integrations and temporary packaging output stay in `ConsoleApp1/` and `artifacts_temp/` respectively.
+
+## Build, Test, and Development Commands
+Restore and build with `dotnet build LiteDB.sln -c Release` after a `dotnet restore`. Execute `dotnet test LiteDB.sln --settings tests.runsettings` to honor the solution-wide timeout profile, or scope to a single project with `dotnet test LiteDB.Tests -f net8.0`. Launch the shell locally via `dotnet run --project LiteDB.Shell/LiteDB.Shell.csproj -- MyData.db`. Produce NuGet artifacts using `dotnet pack LiteDB/LiteDB.csproj -c Release` when preparing releases.
+
+## Coding Style & Naming Conventions
+Follow the repository’s C# conventions: four-space indentation, Allman braces, and grouped `using` directives with system namespaces first. Prefer `var` only when the right-hand side is obvious, keep public APIs XML-documented (the build emits `LiteDB.xml`), and avoid introducing nullable warnings in both `netstandard2.0` and `net8.0` targets. Unsafe code is enabled; justify its use with comments tied to the relevant `Engine` component.
+
+## Testing Guidelines
+Tests are written with xUnit and FluentAssertions; mirror the production folder names (`Engine`, `Query`, `Issues`, etc.) when adding scenarios. Name files after the type under test and choose expressive `[Fact]` / `[Theory]` method names describing the behavior. Long-running tests must finish within the 300-second session timeout defined in `tests.runsettings`; run focused suites with `dotnet test LiteDB.Tests --filter FullyQualifiedName~Engine` to triage regressions quickly.
+
+## Commit & Pull Request Guidelines
+Commits use concise, present-tense subject lines (e.g., `Add test run settings`) and may reference issues inline (`Fix #123`). Each PR should describe the problem, the approach, and include before/after notes or perf metrics when touching storage internals. Link to tracking issues, attach shell transcripts or benchmarks where relevant, and confirm `dotnet test` output so reviewers can spot regressions.
+
+## Versioning & Release Prep
+Semantic versions are generated by MinVer; create annotated tags like `v6.0.0` on the main branch rather than editing project files manually. Before tagging, ensure Release builds are clean, pack outputs land in `artifacts_temp/`, and update any shell or benchmark usage notes affected by the change. Update this guide whenever you discover repository practices worth sharing.
+
From 9276f0804f0e2bceaea469294c4aa44f5075e2e3 Mon Sep 17 00:00:00 2001
From: Jonas Kamsker <11245306+JKamsker@users.noreply.github.com>
Date: Sat, 20 Sep 2025 21:22:24 +0200
Subject: [PATCH 07/53] Enhance test logging and introduce
CpuBoundFactAttribute for CPU-bound tests
---
.github/workflows/ci.yml | 3 +--
.github/workflows/publish-prerelease.yml | 3 +--
.github/workflows/publish-release.yml | 3 +--
LiteDB.Tests/Engine/Transactions_Tests.cs | 14 +++++++++-----
LiteDB.Tests/Utils/CpuBoundFactAttribute.cs | 15 +++++++++++++++
5 files changed, 27 insertions(+), 11 deletions(-)
create mode 100644 LiteDB.Tests/Utils/CpuBoundFactAttribute.cs
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index f37cee1ed..d87f992ce 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -30,5 +30,4 @@ jobs:
- name: Test
timeout-minutes: 5
- run: dotnet test LiteDB.sln --configuration Release --no-build --verbosity normal --settings tests.runsettings
-
+ run: dotnet test LiteDB.sln --configuration Release --no-build --verbosity normal --settings tests.runsettings --logger "trx;LogFileName=TestResults.trx" --logger "console;verbosity=detailed"
diff --git a/.github/workflows/publish-prerelease.yml b/.github/workflows/publish-prerelease.yml
index e5bcc7f8f..c7c45d19e 100644
--- a/.github/workflows/publish-prerelease.yml
+++ b/.github/workflows/publish-prerelease.yml
@@ -30,7 +30,7 @@ jobs:
- name: Test
timeout-minutes: 5
- run: dotnet test LiteDB.sln --configuration Release --no-build --verbosity normal --settings tests.runsettings
+ run: dotnet test LiteDB.sln --configuration Release --no-build --verbosity normal --settings tests.runsettings --logger "trx;LogFileName=TestResults.trx" --logger "console;verbosity=detailed"
- name: Pack
run: |
@@ -67,4 +67,3 @@ jobs:
files: artifacts/*.nupkg
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-
diff --git a/.github/workflows/publish-release.yml b/.github/workflows/publish-release.yml
index 94c43b6ad..baf394b85 100644
--- a/.github/workflows/publish-release.yml
+++ b/.github/workflows/publish-release.yml
@@ -33,7 +33,7 @@ jobs:
- name: Test
timeout-minutes: 5
- run: dotnet test LiteDB.sln --configuration Release --no-build --verbosity normal --settings tests.runsettings
+ run: dotnet test LiteDB.sln --configuration Release --no-build --verbosity normal --settings tests.runsettings --logger "trx;LogFileName=TestResults.trx" --logger "console;verbosity=detailed"
- name: Pack
run: |
@@ -69,4 +69,3 @@ jobs:
files: artifacts/*.nupkg
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-
diff --git a/LiteDB.Tests/Engine/Transactions_Tests.cs b/LiteDB.Tests/Engine/Transactions_Tests.cs
index 9fd567a34..b10b4fb02 100644
--- a/LiteDB.Tests/Engine/Transactions_Tests.cs
+++ b/LiteDB.Tests/Engine/Transactions_Tests.cs
@@ -4,7 +4,9 @@
using System.Threading.Tasks;
using FluentAssertions;
using LiteDB.Engine;
+using LiteDB.Tests.Utils;
using Xunit;
+using Xunit.Sdk;
namespace LiteDB.Tests.Engine
{
@@ -12,7 +14,9 @@ namespace LiteDB.Tests.Engine
public class Transactions_Tests
{
- [Fact]
+ const int MIN_CPU_COUNT = 2;
+
+ [CpuBoundFact(MIN_CPU_COUNT)]
public async Task Transaction_Write_Lock_Timeout()
{
var data1 = DataGen.Person(1, 100).ToArray();
@@ -69,7 +73,7 @@ public async Task Transaction_Write_Lock_Timeout()
}
- [Fact]
+ [CpuBoundFact(MIN_CPU_COUNT)]
public async Task Transaction_Avoid_Dirty_Read()
{
var data1 = DataGen.Person(1, 100).ToArray();
@@ -129,7 +133,7 @@ public async Task Transaction_Avoid_Dirty_Read()
}
}
- [Fact]
+ [CpuBoundFact(MIN_CPU_COUNT)]
public async Task Transaction_Read_Version()
{
var data1 = DataGen.Person(1, 100).ToArray();
@@ -186,7 +190,7 @@ public async Task Transaction_Read_Version()
}
}
- [Fact]
+ [CpuBoundFact(MIN_CPU_COUNT)]
public void Test_Transaction_States()
{
var data0 = DataGen.Person(1, 10).ToArray();
@@ -243,7 +247,7 @@ public override void Write(byte[] buffer, int offset, int count)
}
}
- [Fact]
+ [CpuBoundFact(MIN_CPU_COUNT)]
public void Test_Transaction_ReleaseWhenFailToStart()
{
var blockingStream = new BlockingStream();
diff --git a/LiteDB.Tests/Utils/CpuBoundFactAttribute.cs b/LiteDB.Tests/Utils/CpuBoundFactAttribute.cs
new file mode 100644
index 000000000..50c4a26bb
--- /dev/null
+++ b/LiteDB.Tests/Utils/CpuBoundFactAttribute.cs
@@ -0,0 +1,15 @@
+using System;
+using Xunit;
+
+namespace LiteDB.Tests.Utils;
+
+class CpuBoundFactAttribute : FactAttribute
+{
+ public CpuBoundFactAttribute(int minCpuCount = 1)
+ {
+ if (minCpuCount > Environment.ProcessorCount)
+ {
+ Skip = $"This test requires at least {minCpuCount} processors to run properly.";
+ }
+ }
+}
\ No newline at end of file
From 1b8a9aa9fe821dc1b0d6ffb1dcfc7a62d71fb9e7 Mon Sep 17 00:00:00 2001
From: Jonas Kamsker <11245306+JKamsker@users.noreply.github.com>
Date: Sat, 20 Sep 2025 21:44:48 +0200
Subject: [PATCH 08/53] Comment out CI workflow steps for future reference
---
.github/workflows/ci.yml | 52 ++++++++++++++++++++--------------------
1 file changed, 26 insertions(+), 26 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index d87f992ce..8fd6567be 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -1,33 +1,33 @@
-name: CI
+# name: CI
-on:
- push:
- branches:
- - main
- - dev
- pull_request:
+# on:
+# push:
+# branches:
+# - main
+# - dev
+# pull_request:
-jobs:
- build:
- runs-on: ubuntu-latest
+# jobs:
+# build:
+# runs-on: ubuntu-latest
- steps:
- - name: Check out repository
- uses: actions/checkout@v4
- with:
- fetch-depth: 0
+# steps:
+# - name: Check out repository
+# uses: actions/checkout@v4
+# with:
+# fetch-depth: 0
- - name: Set up .NET SDK
- uses: actions/setup-dotnet@v4
- with:
- dotnet-version: 8.0.x
+# - name: Set up .NET SDK
+# uses: actions/setup-dotnet@v4
+# with:
+# dotnet-version: 8.0.x
- - name: Restore
- run: dotnet restore LiteDB.sln
+# - name: Restore
+# run: dotnet restore LiteDB.sln
- - name: Build
- run: dotnet build LiteDB.sln --configuration Release --no-restore
+# - name: Build
+# run: dotnet build LiteDB.sln --configuration Release --no-restore
- - name: Test
- timeout-minutes: 5
- run: dotnet test LiteDB.sln --configuration Release --no-build --verbosity normal --settings tests.runsettings --logger "trx;LogFileName=TestResults.trx" --logger "console;verbosity=detailed"
+# - name: Test
+# timeout-minutes: 5
+# run: dotnet test LiteDB.sln --configuration Release --no-build --verbosity normal --settings tests.runsettings --logger "trx;LogFileName=TestResults.trx" --logger "console;verbosity=detailed"
From c594e44a0b75d0bee5246defa41d0cd10400cee7 Mon Sep 17 00:00:00 2001
From: Jonas Kamsker <11245306+JKamsker@users.noreply.github.com>
Date: Sun, 21 Sep 2025 01:27:13 +0200
Subject: [PATCH 09/53] Add synthetic zip payloads for rebuild tests
---
LiteDB.Tests/Engine/Rebuild_Tests.cs | 55 +++++++++++++++++++++++-----
LiteDB.Tests/Utils/Models/Zip.cs | 1 +
2 files changed, 47 insertions(+), 9 deletions(-)
diff --git a/LiteDB.Tests/Engine/Rebuild_Tests.cs b/LiteDB.Tests/Engine/Rebuild_Tests.cs
index 8de711bf2..1a32935be 100644
--- a/LiteDB.Tests/Engine/Rebuild_Tests.cs
+++ b/LiteDB.Tests/Engine/Rebuild_Tests.cs
@@ -1,6 +1,7 @@
using FluentAssertions;
using LiteDB.Engine;
using System;
+using System.Collections.Generic;
using System.IO;
using System.Linq;
@@ -18,7 +19,7 @@ public void Rebuild_After_DropCollection()
{
var col = db.GetCollection("zip");
- col.Insert(DataGen.Zip());
+ col.Insert(CreateSyntheticZipData(200, SurvivorId));
db.DropCollection("zip");
@@ -54,25 +55,27 @@ void DoTest(ILiteDatabase db, ILiteCollection col)
col.EnsureIndex("city", false);
- var inserted = col.Insert(DataGen.Zip()); // 29.353 docs
- var deleted = col.DeleteMany(x => x.Id != "01001"); // delete 29.352 docs
+ const int documentCount = 200;
- Assert.Equal(29353, inserted);
- Assert.Equal(29352, deleted);
+ var inserted = col.Insert(CreateSyntheticZipData(documentCount, SurvivorId));
+ var deleted = col.DeleteMany(x => x.Id != SurvivorId);
+
+ Assert.Equal(documentCount, inserted);
+ Assert.Equal(documentCount - 1, deleted);
Assert.Equal(1, col.Count());
// must checkpoint
db.Checkpoint();
- // file still large than 5mb (even with only 1 document)
- Assert.True(file.Size > 5 * 1024 * 1024);
+ // file still larger than 1 MB (even with only 1 document)
+ Assert.True(file.Size > 1 * 1024 * 1024);
// reduce datafile
var reduced = db.Rebuild();
- // now file are small than 50kb
- Assert.True(file.Size < 50 * 1024);
+ // now file should be small again
+ Assert.True(file.Size < 256 * 1024);
DoTest(db, col);
}
@@ -91,6 +94,40 @@ void DoTest(ILiteDatabase db, ILiteCollection col)
}
}
+ private const string SurvivorId = "01001";
+
+ private static IEnumerable CreateSyntheticZipData(int totalCount, string survivingId)
+ {
+ if (totalCount < 1)
+ {
+ throw new ArgumentOutOfRangeException(nameof(totalCount));
+ }
+
+ const int payloadLength = 32 * 1024; // 32 KB payload to force file growth
+
+ for (var i = 0; i < totalCount; i++)
+ {
+ var id = (20000 + i).ToString("00000");
+
+ if (!string.IsNullOrEmpty(survivingId) && i == 0)
+ {
+ id = survivingId;
+ }
+
+ var payload = new byte[payloadLength];
+ Array.Fill(payload, (byte)(i % 256));
+
+ yield return new Zip
+ {
+ Id = id,
+ City = $"City {i:D4}",
+ Loc = new[] { (double)i, (double)i + 0.5 },
+ State = "ST",
+ Payload = payload
+ };
+ }
+ }
+
[Fact (Skip = "Not supported yet")]
public void Rebuild_Change_Culture_Error()
{
diff --git a/LiteDB.Tests/Utils/Models/Zip.cs b/LiteDB.Tests/Utils/Models/Zip.cs
index 8f8a01960..3fddc7d2a 100644
--- a/LiteDB.Tests/Utils/Models/Zip.cs
+++ b/LiteDB.Tests/Utils/Models/Zip.cs
@@ -16,6 +16,7 @@ public class Zip : IEqualityComparer, IComparable
public string City { get; set; }
public double[] Loc { get; set; }
public string State { get; set; }
+ public byte[] Payload { get; set; }
public int CompareTo(Zip other)
{
From 305aba7ca8b28b4818c8d8af779f4995b93aca17 Mon Sep 17 00:00:00 2001
From: Jonas Kamsker <11245306+JKamsker@users.noreply.github.com>
Date: Sun, 21 Sep 2025 01:27:17 +0200
Subject: [PATCH 10/53] Use deterministic sample for descending sort test
---
LiteDB.Tests/Internals/Sort_Tests.cs | 17 ++++++++---------
1 file changed, 8 insertions(+), 9 deletions(-)
diff --git a/LiteDB.Tests/Internals/Sort_Tests.cs b/LiteDB.Tests/Internals/Sort_Tests.cs
index 8e55aa5df..7a15ee278 100644
--- a/LiteDB.Tests/Internals/Sort_Tests.cs
+++ b/LiteDB.Tests/Internals/Sort_Tests.cs
@@ -43,25 +43,24 @@ public void Sort_String_Asc()
[Fact]
public void Sort_Int_Desc()
{
- var rnd = new Random();
- var source = Enumerable.Range(0, 20000)
- .Select(x => new KeyValuePair(rnd.Next(1, 30000), PageAddress.Empty))
+ var source = Enumerable.Range(0, 900)
+ .Select(x => (x * 37) % 1000)
+ .Select(x => new KeyValuePair(x, PageAddress.Empty))
.ToArray();
var pragmas = new EnginePragmas(null);
pragmas.Set(Pragmas.COLLATION, Collation.Binary.ToString(), false);
- using (var tempDisk = new SortDisk(_factory, 10 * 8192, pragmas))
+ using (var tempDisk = new SortDisk(_factory, 8192, pragmas))
using (var s = new SortService(tempDisk, Query.Descending, pragmas))
{
s.Insert(source);
- s.Count.Should().Be(20000);
- s.Containers.Count.Should().Be(3);
+ s.Count.Should().Be(900);
+ s.Containers.Count.Should().Be(2);
- s.Containers.ElementAt(0).Count.Should().Be(8192);
- s.Containers.ElementAt(1).Count.Should().Be(8192);
- s.Containers.ElementAt(2).Count.Should().Be(3616);
+ s.Containers.ElementAt(0).Count.Should().Be(819);
+ s.Containers.ElementAt(1).Count.Should().Be(81);
var output = s.Sort().ToArray();
From d6dd57c66bc31a52e864e53ad9544a7f0a4354af Mon Sep 17 00:00:00 2001
From: Jonas Kamsker <11245306+JKamsker@users.noreply.github.com>
Date: Sun, 21 Sep 2025 01:27:22 +0200
Subject: [PATCH 11/53] Clarify write lock timeout comment
---
LiteDB.Tests/Engine/Transactions_Tests.cs | 35 +++++++++++++++++++----
1 file changed, 29 insertions(+), 6 deletions(-)
diff --git a/LiteDB.Tests/Engine/Transactions_Tests.cs b/LiteDB.Tests/Engine/Transactions_Tests.cs
index b10b4fb02..dcab8e31f 100644
--- a/LiteDB.Tests/Engine/Transactions_Tests.cs
+++ b/LiteDB.Tests/Engine/Transactions_Tests.cs
@@ -1,5 +1,6 @@
using System.IO;
using System.Linq;
+using System.Reflection;
using System.Threading;
using System.Threading.Tasks;
using FluentAssertions;
@@ -24,8 +25,9 @@ public async Task Transaction_Write_Lock_Timeout()
using (var db = new LiteDatabase("filename=:memory:"))
{
- // small timeout
+ // configure the minimal pragma timeout and then override the engine to a few milliseconds
db.Pragma(Pragmas.TIMEOUT, 1);
+ SetEngineTimeout(db, TimeSpan.FromMilliseconds(20));
var person = db.GetCollection();
@@ -35,8 +37,8 @@ public async Task Transaction_Write_Lock_Timeout()
var taskASemaphore = new SemaphoreSlim(0, 1);
var taskBSemaphore = new SemaphoreSlim(0, 1);
- // task A will open transaction and will insert +100 documents
- // but will commit only 2s later
+ // task A will open transaction and will insert +100 documents
+ // but will commit only after task B observes the timeout
var ta = Task.Run(() =>
{
db.BeginTrans();
@@ -53,7 +55,7 @@ public async Task Transaction_Write_Lock_Timeout()
db.Commit();
});
- // task B will try delete all documents but will be locked during 1 second
+ // task B will try delete all documents but will be locked until the short timeout is hit
var tb = Task.Run(() =>
{
taskBSemaphore.Wait();
@@ -251,7 +253,8 @@ public override void Write(byte[] buffer, int offset, int count)
public void Test_Transaction_ReleaseWhenFailToStart()
{
var blockingStream = new BlockingStream();
- var db = new LiteDatabase(blockingStream) { Timeout = TimeSpan.FromSeconds(1) };
+ var db = new LiteDatabase(blockingStream);
+ SetEngineTimeout(db, TimeSpan.FromMilliseconds(50));
Thread lockerThread = null;
try
{
@@ -263,7 +266,7 @@ public void Test_Transaction_ReleaseWhenFailToStart()
db.Dispose();
});
lockerThread.Start();
- blockingStream.Blocked.WaitOne(1000).Should().BeTrue();
+ blockingStream.Blocked.WaitOne(200).Should().BeTrue();
Assert.Throws(() => db.GetCollection().Insert(new Person())).Message.Should().Contain("timeout");
Assert.Throws(() => db.GetCollection().Insert(new Person())).Message.Should().Contain("timeout");
}
@@ -273,5 +276,25 @@ public void Test_Transaction_ReleaseWhenFailToStart()
lockerThread?.Join();
}
}
+
+ private static void SetEngineTimeout(LiteDatabase database, TimeSpan timeout)
+ {
+ var engineField = typeof(LiteDatabase).GetField("_engine", BindingFlags.Instance | BindingFlags.NonPublic);
+ var engine = engineField?.GetValue(database);
+
+ if (engine is not LiteEngine liteEngine)
+ {
+ throw new InvalidOperationException("Unable to retrieve LiteEngine instance for timeout override.");
+ }
+
+ var headerField = typeof(LiteEngine).GetField("_header", BindingFlags.Instance | BindingFlags.NonPublic);
+ var header = headerField?.GetValue(liteEngine) ?? throw new InvalidOperationException("LiteEngine header not available.");
+ var pragmasProp = header.GetType().GetProperty("Pragmas", BindingFlags.Instance | BindingFlags.Public) ?? throw new InvalidOperationException("Engine pragmas not accessible.");
+ var pragmas = pragmasProp.GetValue(header) ?? throw new InvalidOperationException("Engine pragmas not available.");
+ var timeoutProp = pragmas.GetType().GetProperty("Timeout", BindingFlags.Instance | BindingFlags.Public) ?? throw new InvalidOperationException("Timeout property not found.");
+ var setter = timeoutProp.GetSetMethod(true) ?? throw new InvalidOperationException("Timeout setter not accessible.");
+
+ setter.Invoke(pragmas, new object[] { timeout });
+ }
}
}
\ No newline at end of file
From 6cbaeac34014901cb4d0ad6f0ec99c33c372fb82 Mon Sep 17 00:00:00 2001
From: Jonas Kamsker <11245306+JKamsker@users.noreply.github.com>
Date: Sun, 21 Sep 2025 01:40:28 +0200
Subject: [PATCH 12/53] Refactor CI workflow by removing commented-out code for
clarity
---
.github/workflows/ci.yml | 48 ++++++++++++++++++----------------------
1 file changed, 22 insertions(+), 26 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 8fd6567be..2ef778838 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -1,33 +1,29 @@
-# name: CI
+name: CI
-# on:
-# push:
-# branches:
-# - main
-# - dev
-# pull_request:
+on:
+ pull_request:
-# jobs:
-# build:
-# runs-on: ubuntu-latest
+jobs:
+ build:
+ runs-on: ubuntu-latest
-# steps:
-# - name: Check out repository
-# uses: actions/checkout@v4
-# with:
-# fetch-depth: 0
+ steps:
+ - name: Check out repository
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
-# - name: Set up .NET SDK
-# uses: actions/setup-dotnet@v4
-# with:
-# dotnet-version: 8.0.x
+ - name: Set up .NET SDK
+ uses: actions/setup-dotnet@v4
+ with:
+ dotnet-version: 8.0.x
-# - name: Restore
-# run: dotnet restore LiteDB.sln
+ - name: Restore
+ run: dotnet restore LiteDB.sln
-# - name: Build
-# run: dotnet build LiteDB.sln --configuration Release --no-restore
+ - name: Build
+ run: dotnet build LiteDB.sln --configuration Release --no-restore
-# - name: Test
-# timeout-minutes: 5
-# run: dotnet test LiteDB.sln --configuration Release --no-build --verbosity normal --settings tests.runsettings --logger "trx;LogFileName=TestResults.trx" --logger "console;verbosity=detailed"
+ - name: Test
+ timeout-minutes: 5
+ run: dotnet test LiteDB.sln --configuration Release --no-build --verbosity normal --settings tests.runsettings --logger "trx;LogFileName=TestResults.trx" --logger "console;verbosity=detailed"
From 23f5fa4987a8459e00b3eef134aa83b56b4bedae Mon Sep 17 00:00:00 2001
From: Jonas Kamsker <11245306+JKamsker@users.noreply.github.com>
Date: Sun, 21 Sep 2025 01:53:05 +0200
Subject: [PATCH 13/53] Add test database factory
---
LiteDB.Tests/Database/AutoId_Tests.cs | 3 +-
.../Database/Create_Database_Tests.cs | 3 +-
.../Database/Database_Pragmas_Tests.cs | 3 +-
LiteDB.Tests/Database/DeleteMany_Tests.cs | 3 +-
LiteDB.Tests/Database/Delete_By_Name_Tests.cs | 3 +-
LiteDB.Tests/Database/Document_Size_Tests.cs | 3 +-
LiteDB.Tests/Database/FindAll_Tests.cs | 5 ++-
.../Database/IndexSortAndFilter_Tests.cs | 3 +-
.../Database/MultiKey_Mapper_Tests.cs | 3 +-
LiteDB.Tests/Database/NonIdPoco_Tests.cs | 3 +-
LiteDB.Tests/Database/Query_Min_Max_Tests.cs | 3 +-
LiteDB.Tests/Database/Site_Tests.cs | 3 +-
.../Database/Snapshot_Upgrade_Tests.cs | 3 +-
LiteDB.Tests/Database/Storage_Tests.cs | 3 +-
LiteDB.Tests/Database/Upgrade_Tests.cs | 9 +++--
.../Database/Writing_While_Reading_Test.cs | 7 ++--
LiteDB.Tests/Engine/DropCollection_Tests.cs | 7 ++--
LiteDB.Tests/Engine/Index_Tests.cs | 11 ++---
LiteDB.Tests/Engine/Rebuild_Tests.cs | 9 +++--
LiteDB.Tests/Engine/Transactions_Tests.cs | 2 +-
LiteDB.Tests/Engine/UserVersion_Tests.cs | 5 ++-
.../Internals/ExtendedLength_Tests.cs | 3 +-
LiteDB.Tests/Issues/Issue1651_Tests.cs | 3 +-
LiteDB.Tests/Issues/Issue1695_Tests.cs | 3 +-
LiteDB.Tests/Issues/Issue1701_Tests.cs | 3 +-
LiteDB.Tests/Issues/Issue1838_Tests.cs | 3 +-
LiteDB.Tests/Issues/Issue1860_Tests.cs | 7 ++--
LiteDB.Tests/Issues/Issue1865_Tests.cs | 3 +-
LiteDB.Tests/Issues/Issue2127_Tests.cs | 3 +-
LiteDB.Tests/Issues/Issue2129_Tests.cs | 3 +-
LiteDB.Tests/Issues/Issue2265_Tests.cs | 3 +-
LiteDB.Tests/Issues/Issue2298_Tests.cs | 37 ++++++++---------
LiteDB.Tests/Issues/Issue2458_Tests.cs | 7 ++--
LiteDB.Tests/Issues/Issue2471_Test.cs | 5 ++-
LiteDB.Tests/Issues/Issue2494_Tests.cs | 3 +-
LiteDB.Tests/Issues/Issue2570_Tests.cs | 5 ++-
LiteDB.Tests/Issues/Pull2468_Tests.cs | 5 ++-
LiteDB.Tests/Mapper/Mapper_Tests.cs | 3 +-
LiteDB.Tests/Query/Data/PersonQueryData.cs | 3 +-
LiteDB.Tests/Query/Select_Tests.cs | 3 +-
LiteDB.Tests/Utils/DatabaseFactory.cs | 40 +++++++++++++++++++
41 files changed, 159 insertions(+), 80 deletions(-)
create mode 100644 LiteDB.Tests/Utils/DatabaseFactory.cs
diff --git a/LiteDB.Tests/Database/AutoId_Tests.cs b/LiteDB.Tests/Database/AutoId_Tests.cs
index bd615d8d4..0e2eed9f8 100644
--- a/LiteDB.Tests/Database/AutoId_Tests.cs
+++ b/LiteDB.Tests/Database/AutoId_Tests.cs
@@ -2,6 +2,7 @@
using System.IO;
using System.Linq;
using LiteDB;
+using LiteDB.Tests.Utils;
using FluentAssertions;
using Xunit;
@@ -268,7 +269,7 @@ public void AutoId_No_Duplicate_After_Delete()
[Fact]
public void AutoId_Zero_Int()
{
- using (var db = new LiteDatabase(":memory:"))
+ using (var db = DatabaseFactory.Create())
{
var test = db.GetCollection("Test", BsonAutoId.Int32);
var doc = new BsonDocument() { ["_id"] = 0, ["p1"] = 1 };
diff --git a/LiteDB.Tests/Database/Create_Database_Tests.cs b/LiteDB.Tests/Database/Create_Database_Tests.cs
index 169ad6ebd..a8c945ade 100644
--- a/LiteDB.Tests/Database/Create_Database_Tests.cs
+++ b/LiteDB.Tests/Database/Create_Database_Tests.cs
@@ -3,6 +3,7 @@
using System.Linq;
using FluentAssertions;
using LiteDB.Engine;
+using LiteDB.Tests.Utils;
using Xunit;
namespace LiteDB.Tests.Database
@@ -17,7 +18,7 @@ public void Create_Database_With_Initial_Size()
using (var file = new TempFile())
{
- using (var db = new LiteDatabase("filename=" + file.Filename + ";initial size=" + initial))
+ using (var db = DatabaseFactory.Create(TestDatabaseType.Disk, "filename=" + file.Filename + ";initial size=" + initial))
{
var col = db.GetCollection("col");
diff --git a/LiteDB.Tests/Database/Database_Pragmas_Tests.cs b/LiteDB.Tests/Database/Database_Pragmas_Tests.cs
index e21041bfb..1d9092cbe 100644
--- a/LiteDB.Tests/Database/Database_Pragmas_Tests.cs
+++ b/LiteDB.Tests/Database/Database_Pragmas_Tests.cs
@@ -2,6 +2,7 @@
using System.IO;
using System.Linq;
using LiteDB;
+using LiteDB.Tests.Utils;
using FluentAssertions;
using Xunit;
using System.Globalization;
@@ -13,7 +14,7 @@ public class Database_Pragmas_Tests
[Fact]
public void Database_Pragmas_Get_Set()
{
- using (var db = new LiteDatabase(":memory:"))
+ using (var db = DatabaseFactory.Create())
{
db.Timeout.TotalSeconds.Should().Be(60.0);
db.UtcDate.Should().Be(false);
diff --git a/LiteDB.Tests/Database/DeleteMany_Tests.cs b/LiteDB.Tests/Database/DeleteMany_Tests.cs
index f7de356bd..cf80be741 100644
--- a/LiteDB.Tests/Database/DeleteMany_Tests.cs
+++ b/LiteDB.Tests/Database/DeleteMany_Tests.cs
@@ -2,6 +2,7 @@
using System.IO;
using System.Linq;
using LiteDB;
+using LiteDB.Tests.Utils;
using FluentAssertions;
using Xunit;
@@ -12,7 +13,7 @@ public class DeleteMany_Tests
[Fact]
public void DeleteMany_With_Arguments()
{
- using (var db = new LiteDatabase(":memory:"))
+ using (var db = DatabaseFactory.Create())
{
var c1 = db.GetCollection("Test");
diff --git a/LiteDB.Tests/Database/Delete_By_Name_Tests.cs b/LiteDB.Tests/Database/Delete_By_Name_Tests.cs
index ed206f7b0..053cbdb25 100644
--- a/LiteDB.Tests/Database/Delete_By_Name_Tests.cs
+++ b/LiteDB.Tests/Database/Delete_By_Name_Tests.cs
@@ -2,6 +2,7 @@
using System.IO;
using System.Linq;
using FluentAssertions;
+using LiteDB.Tests.Utils;
using Xunit;
namespace LiteDB.Tests.Database
@@ -22,7 +23,7 @@ public class Person
public void Delete_By_Name()
{
using (var f = new TempFile())
- using (var db = new LiteDatabase(f.Filename))
+ using (var db = DatabaseFactory.Create(TestDatabaseType.Disk, f.Filename))
{
var col = db.GetCollection("Person");
diff --git a/LiteDB.Tests/Database/Document_Size_Tests.cs b/LiteDB.Tests/Database/Document_Size_Tests.cs
index 6c25112cb..03dee85c4 100644
--- a/LiteDB.Tests/Database/Document_Size_Tests.cs
+++ b/LiteDB.Tests/Database/Document_Size_Tests.cs
@@ -4,6 +4,7 @@
using System.Linq;
using FluentAssertions;
using LiteDB.Engine;
+using LiteDB.Tests.Utils;
using Xunit;
namespace LiteDB.Tests.Database
@@ -16,7 +17,7 @@ public class Document_Size_Tests
public void Very_Large_Single_Document_Support_With_Partial_Load_Memory_Usage()
{
using (var file = new TempFile())
- using (var db = new LiteDatabase(file.Filename))
+ using (var db = DatabaseFactory.Create(TestDatabaseType.Disk, file.Filename))
{
var col = db.GetCollection("col");
diff --git a/LiteDB.Tests/Database/FindAll_Tests.cs b/LiteDB.Tests/Database/FindAll_Tests.cs
index 7f0f2b9a0..11b194ec9 100644
--- a/LiteDB.Tests/Database/FindAll_Tests.cs
+++ b/LiteDB.Tests/Database/FindAll_Tests.cs
@@ -2,6 +2,7 @@
using System.IO;
using System.Linq;
using FluentAssertions;
+using LiteDB.Tests.Utils;
using Xunit;
namespace LiteDB.Tests.Database
@@ -23,7 +24,7 @@ public void FindAll()
{
using (var f = new TempFile())
{
- using (var db = new LiteDatabase(f.Filename))
+ using (var db = DatabaseFactory.Create(TestDatabaseType.Disk, f.Filename))
{
var col = db.GetCollection("Person");
@@ -34,7 +35,7 @@ public void FindAll()
}
// close datafile
- using (var db = new LiteDatabase(f.Filename))
+ using (var db = DatabaseFactory.Create(TestDatabaseType.Disk, f.Filename))
{
var p = db.GetCollection("Person").Find(Query.All("Fullname", Query.Ascending));
diff --git a/LiteDB.Tests/Database/IndexSortAndFilter_Tests.cs b/LiteDB.Tests/Database/IndexSortAndFilter_Tests.cs
index 1e2f780fe..1d607803e 100644
--- a/LiteDB.Tests/Database/IndexSortAndFilter_Tests.cs
+++ b/LiteDB.Tests/Database/IndexSortAndFilter_Tests.cs
@@ -2,6 +2,7 @@
using System.Collections.Generic;
using System.Linq;
using FluentAssertions;
+using LiteDB.Tests.Utils;
using Xunit;
namespace LiteDB.Tests.Database
@@ -25,7 +26,7 @@ public class Item
public IndexSortAndFilterTest()
{
_tempFile = new TempFile();
- _database = new LiteDatabase(_tempFile.Filename);
+ _database = DatabaseFactory.Create(TestDatabaseType.Disk, _tempFile.Filename);
_collection = _database.GetCollection- ("items");
_collection.Upsert(new Item() { Id = "C", Value = "Value 1" });
diff --git a/LiteDB.Tests/Database/MultiKey_Mapper_Tests.cs b/LiteDB.Tests/Database/MultiKey_Mapper_Tests.cs
index b4d0ae131..b791bfbdb 100644
--- a/LiteDB.Tests/Database/MultiKey_Mapper_Tests.cs
+++ b/LiteDB.Tests/Database/MultiKey_Mapper_Tests.cs
@@ -2,6 +2,7 @@
using System.IO;
using System.Linq;
using FluentAssertions;
+using LiteDB.Tests.Utils;
using Xunit;
namespace LiteDB.Tests.Database
@@ -28,7 +29,7 @@ public class Customer
[Fact]
public void MultiKey_Mapper()
{
- using (var db = new LiteDatabase(":memory:"))
+ using (var db = DatabaseFactory.Create())
{
var col = db.GetCollection("col");
diff --git a/LiteDB.Tests/Database/NonIdPoco_Tests.cs b/LiteDB.Tests/Database/NonIdPoco_Tests.cs
index 4ba0b29d6..b792f956a 100644
--- a/LiteDB.Tests/Database/NonIdPoco_Tests.cs
+++ b/LiteDB.Tests/Database/NonIdPoco_Tests.cs
@@ -2,6 +2,7 @@
using System.IO;
using System.Linq;
using FluentAssertions;
+using LiteDB.Tests.Utils;
using Xunit;
namespace LiteDB.Tests.Database
@@ -22,7 +23,7 @@ public class MissingIdDoc
public void MissingIdDoc_Test()
{
using (var file = new TempFile())
- using (var db = new LiteDatabase(file.Filename))
+ using (var db = DatabaseFactory.Create(TestDatabaseType.Disk, file.Filename))
{
var col = db.GetCollection("col");
diff --git a/LiteDB.Tests/Database/Query_Min_Max_Tests.cs b/LiteDB.Tests/Database/Query_Min_Max_Tests.cs
index 46511594a..466e20b5c 100644
--- a/LiteDB.Tests/Database/Query_Min_Max_Tests.cs
+++ b/LiteDB.Tests/Database/Query_Min_Max_Tests.cs
@@ -2,6 +2,7 @@
using System.IO;
using System.Linq;
using FluentAssertions;
+using LiteDB.Tests.Utils;
using Xunit;
namespace LiteDB.Tests.Database
@@ -25,7 +26,7 @@ public class EntityMinMax
public void Query_Min_Max()
{
using (var f = new TempFile())
- using (var db = new LiteDatabase(f.Filename))
+ using (var db = DatabaseFactory.Create(TestDatabaseType.Disk, f.Filename))
{
var c = db.GetCollection("col");
diff --git a/LiteDB.Tests/Database/Site_Tests.cs b/LiteDB.Tests/Database/Site_Tests.cs
index 2ab05b6d2..19427ab88 100644
--- a/LiteDB.Tests/Database/Site_Tests.cs
+++ b/LiteDB.Tests/Database/Site_Tests.cs
@@ -3,6 +3,7 @@
using System.Linq;
using System.Security.Cryptography;
using FluentAssertions;
+using LiteDB.Tests.Utils;
using Xunit;
namespace LiteDB.Tests.Database
@@ -13,7 +14,7 @@ public class Site_Tests
public void Home_Example()
{
using (var f = new TempFile())
- using (var db = new LiteDatabase(f.Filename))
+ using (var db = DatabaseFactory.Create(TestDatabaseType.Disk, f.Filename))
{
// Get customer collection
var customers = db.GetCollection("customers");
diff --git a/LiteDB.Tests/Database/Snapshot_Upgrade_Tests.cs b/LiteDB.Tests/Database/Snapshot_Upgrade_Tests.cs
index 07ec94af3..1a8285dcf 100644
--- a/LiteDB.Tests/Database/Snapshot_Upgrade_Tests.cs
+++ b/LiteDB.Tests/Database/Snapshot_Upgrade_Tests.cs
@@ -2,6 +2,7 @@
using System.IO;
using System.Linq;
using FluentAssertions;
+using LiteDB.Tests.Utils;
using Xunit;
namespace LiteDB.Tests.Database
@@ -11,7 +12,7 @@ public class Snapshot_Upgrade_Tests
[Fact]
public void Transaction_Update_Upsert()
{
- using var db = new LiteDatabase(":memory:");
+ using var db = DatabaseFactory.Create();
var col = db.GetCollection("test");
bool transactionCreated = db.BeginTrans();
diff --git a/LiteDB.Tests/Database/Storage_Tests.cs b/LiteDB.Tests/Database/Storage_Tests.cs
index b54d5f395..69ef61ce3 100644
--- a/LiteDB.Tests/Database/Storage_Tests.cs
+++ b/LiteDB.Tests/Database/Storage_Tests.cs
@@ -3,6 +3,7 @@
using System.Linq;
using System.Security.Cryptography;
using FluentAssertions;
+using LiteDB.Tests.Utils;
using Xunit;
namespace LiteDB.Tests.Database
@@ -31,7 +32,7 @@ public Storage_Tests()
public void Storage_Upload_Download()
{
using (var f = new TempFile())
- using (var db = new LiteDatabase(f.Filename))
+ using (var db = DatabaseFactory.Create(TestDatabaseType.Disk, f.Filename))
//using (var db = new LiteDatabase(@"c:\temp\file.db"))
{
var fs = db.GetStorage("_files", "_chunks");
diff --git a/LiteDB.Tests/Database/Upgrade_Tests.cs b/LiteDB.Tests/Database/Upgrade_Tests.cs
index b1fd21761..49ac2441a 100644
--- a/LiteDB.Tests/Database/Upgrade_Tests.cs
+++ b/LiteDB.Tests/Database/Upgrade_Tests.cs
@@ -2,6 +2,7 @@
using System.IO;
using System.Linq;
using LiteDB;
+using LiteDB.Tests.Utils;
using FluentAssertions;
using Xunit;
using Microsoft.VisualStudio.TestPlatform.CommunicationUtilities.ObjectModel;
@@ -16,7 +17,7 @@ public void Migrage_From_V4()
// v5 upgrades only from v4!
using(var tempFile = new TempFile("../../../Resources/v4.db"))
{
- using (var db = new LiteDatabase($"filename={tempFile};upgrade=true"))
+ using (var db = DatabaseFactory.Create(TestDatabaseType.Disk, $"filename={tempFile};upgrade=true"))
{
// convert and open database
var col1 = db.GetCollection("col1");
@@ -24,7 +25,7 @@ public void Migrage_From_V4()
col1.Count().Should().Be(3);
}
- using (var db = new LiteDatabase($"filename={tempFile};upgrade=true"))
+ using (var db = DatabaseFactory.Create(TestDatabaseType.Disk, $"filename={tempFile};upgrade=true"))
{
// database already converted
var col1 = db.GetCollection("col1");
@@ -40,7 +41,7 @@ public void Migrage_From_V4_No_FileExtension()
// v5 upgrades only from v4!
using (var tempFile = new TempFile("../../../Resources/v4.db"))
{
- using (var db = new LiteDatabase($"filename={tempFile};upgrade=true"))
+ using (var db = DatabaseFactory.Create(TestDatabaseType.Disk, $"filename={tempFile};upgrade=true"))
{
// convert and open database
var col1 = db.GetCollection("col1");
@@ -48,7 +49,7 @@ public void Migrage_From_V4_No_FileExtension()
col1.Count().Should().Be(3);
}
- using (var db = new LiteDatabase($"filename={tempFile};upgrade=true"))
+ using (var db = DatabaseFactory.Create(TestDatabaseType.Disk, $"filename={tempFile};upgrade=true"))
{
// database already converted
var col1 = db.GetCollection("col1");
diff --git a/LiteDB.Tests/Database/Writing_While_Reading_Test.cs b/LiteDB.Tests/Database/Writing_While_Reading_Test.cs
index fc2c8f86d..69dda1db6 100644
--- a/LiteDB.Tests/Database/Writing_While_Reading_Test.cs
+++ b/LiteDB.Tests/Database/Writing_While_Reading_Test.cs
@@ -1,4 +1,5 @@
using System.IO;
+using LiteDB.Tests.Utils;
using Xunit;
namespace LiteDB.Tests.Database;
@@ -9,7 +10,7 @@ public class Writing_While_Reading_Test
public void Test()
{
using var f = new TempFile();
- using (var db = new LiteDatabase(f.Filename))
+ using (var db = DatabaseFactory.Create(TestDatabaseType.Disk, f.Filename))
{
var col = db.GetCollection("col");
col.Insert(new MyClass { Name = "John", Description = "Doe" });
@@ -18,7 +19,7 @@ public void Test()
}
- using (var db = new LiteDatabase(f.Filename))
+ using (var db = DatabaseFactory.Create(TestDatabaseType.Disk, f.Filename))
{
var col = db.GetCollection("col");
foreach (var item in col.FindAll())
@@ -31,7 +32,7 @@ public void Test()
}
- using (var db = new LiteDatabase(f.Filename))
+ using (var db = DatabaseFactory.Create(TestDatabaseType.Disk, f.Filename))
{
var col = db.GetCollection("col");
foreach (var item in col.FindAll())
diff --git a/LiteDB.Tests/Engine/DropCollection_Tests.cs b/LiteDB.Tests/Engine/DropCollection_Tests.cs
index 13be40d58..372b4ad0a 100644
--- a/LiteDB.Tests/Engine/DropCollection_Tests.cs
+++ b/LiteDB.Tests/Engine/DropCollection_Tests.cs
@@ -1,5 +1,6 @@
using System.Linq;
using FluentAssertions;
+using LiteDB.Tests.Utils;
using Xunit;
namespace LiteDB.Tests.Engine
@@ -10,7 +11,7 @@ public class DropCollection_Tests
public void DropCollection()
{
using (var file = new TempFile())
- using (var db = new LiteDatabase(file.Filename))
+ using (var db = DatabaseFactory.Create(TestDatabaseType.Disk, file.Filename))
{
db.GetCollectionNames().Should().NotContain("col");
@@ -31,7 +32,7 @@ public void InsertDropCollection()
{
using (var file = new TempFile())
{
- using (var db = new LiteDatabase(file.Filename))
+ using (var db = DatabaseFactory.Create(TestDatabaseType.Disk, file.Filename))
{
var col = db.GetCollection("test");
col.Insert(new BsonDocument { ["_id"] = 1 });
@@ -39,7 +40,7 @@ public void InsertDropCollection()
db.Rebuild();
}
- using (var db = new LiteDatabase(file.Filename))
+ using (var db = DatabaseFactory.Create(TestDatabaseType.Disk, file.Filename))
{
var col = db.GetCollection("test");
col.Insert(new BsonDocument { ["_id"] = 1 });
diff --git a/LiteDB.Tests/Engine/Index_Tests.cs b/LiteDB.Tests/Engine/Index_Tests.cs
index bdd468c17..16c522c1c 100644
--- a/LiteDB.Tests/Engine/Index_Tests.cs
+++ b/LiteDB.Tests/Engine/Index_Tests.cs
@@ -1,6 +1,7 @@
using System;
using System.Linq;
using FluentAssertions;
+using LiteDB.Tests.Utils;
using Xunit;
namespace LiteDB.Tests.Engine
@@ -10,7 +11,7 @@ public class Index_Tests
[Fact]
public void Index_With_No_Name()
{
- using (var db = new LiteDatabase("filename=:memory:"))
+ using (var db = DatabaseFactory.Create(connectionString: "filename=:memory:"))
{
var users = db.GetCollection("users");
var indexes = db.GetCollection("$indexes");
@@ -31,7 +32,7 @@ public void Index_With_No_Name()
[Fact]
public void Index_Order()
{
- using (var db = new LiteDatabase("filename=:memory:"))
+ using (var db = DatabaseFactory.Create(connectionString: "filename=:memory:"))
{
var col = db.GetCollection("col");
var indexes = db.GetCollection("$indexes");
@@ -68,7 +69,7 @@ public void Index_Order()
[Fact]
public void Index_With_Like()
{
- using (var db = new LiteDatabase("filename=:memory:"))
+ using (var db = DatabaseFactory.Create(connectionString: "filename=:memory:"))
{
var col = db.GetCollection("names", BsonAutoId.Int32);
@@ -118,7 +119,7 @@ public void Index_With_Like()
[Fact]
public void EnsureIndex_Invalid_Arguments()
{
- using var db = new LiteDatabase("filename=:memory:");
+ using var db = DatabaseFactory.Create(connectionString: "filename=:memory:");
var test = db.GetCollection("test");
// null name
@@ -143,7 +144,7 @@ public void EnsureIndex_Invalid_Arguments()
[Fact]
public void MultiKey_Index_Test()
{
- using var db = new LiteDatabase("filename=:memory:");
+ using var db = DatabaseFactory.Create(connectionString: "filename=:memory:");
var col = db.GetCollection("customers", BsonAutoId.Int32);
col.EnsureIndex("$.Phones[*].Type");
diff --git a/LiteDB.Tests/Engine/Rebuild_Tests.cs b/LiteDB.Tests/Engine/Rebuild_Tests.cs
index 1a32935be..9ab98c6cf 100644
--- a/LiteDB.Tests/Engine/Rebuild_Tests.cs
+++ b/LiteDB.Tests/Engine/Rebuild_Tests.cs
@@ -1,5 +1,6 @@
using FluentAssertions;
using LiteDB.Engine;
+using LiteDB.Tests.Utils;
using System;
using System.Collections.Generic;
using System.IO;
@@ -15,7 +16,7 @@ public class Rebuild_Tests
public void Rebuild_After_DropCollection()
{
using (var file = new TempFile())
- using (var db = new LiteDatabase(file.Filename))
+ using (var db = DatabaseFactory.Create(TestDatabaseType.Disk, file.Filename))
{
var col = db.GetCollection("zip");
@@ -47,7 +48,7 @@ void DoTest(ILiteDatabase db, ILiteCollection col)
using (var file = new TempFile())
{
- using (var db = new LiteDatabase(file.Filename))
+ using (var db = DatabaseFactory.Create(TestDatabaseType.Disk, file.Filename))
{
var col = db.GetCollection();
@@ -81,7 +82,7 @@ void DoTest(ILiteDatabase db, ILiteCollection col)
}
// re-open and rebuild again
- using (var db = new LiteDatabase(file.Filename))
+ using (var db = DatabaseFactory.Create(TestDatabaseType.Disk, file.Filename))
{
var col = db.GetCollection();
@@ -132,7 +133,7 @@ private static IEnumerable CreateSyntheticZipData(int totalCount, string su
public void Rebuild_Change_Culture_Error()
{
using (var file = new TempFile())
- using (var db = new LiteDatabase(file.Filename))
+ using (var db = DatabaseFactory.Create(TestDatabaseType.Disk, file.Filename))
{
// remove string comparer ignore case
db.Rebuild(new RebuildOptions { Collation = new Collation("en-US/None") });
diff --git a/LiteDB.Tests/Engine/Transactions_Tests.cs b/LiteDB.Tests/Engine/Transactions_Tests.cs
index dcab8e31f..47bbd638d 100644
--- a/LiteDB.Tests/Engine/Transactions_Tests.cs
+++ b/LiteDB.Tests/Engine/Transactions_Tests.cs
@@ -23,7 +23,7 @@ public async Task Transaction_Write_Lock_Timeout()
var data1 = DataGen.Person(1, 100).ToArray();
var data2 = DataGen.Person(101, 200).ToArray();
- using (var db = new LiteDatabase("filename=:memory:"))
+ using (var db = DatabaseFactory.Create(connectionString: "filename=:memory:"))
{
// configure the minimal pragma timeout and then override the engine to a few milliseconds
db.Pragma(Pragmas.TIMEOUT, 1);
diff --git a/LiteDB.Tests/Engine/UserVersion_Tests.cs b/LiteDB.Tests/Engine/UserVersion_Tests.cs
index 5ac937052..410dec974 100644
--- a/LiteDB.Tests/Engine/UserVersion_Tests.cs
+++ b/LiteDB.Tests/Engine/UserVersion_Tests.cs
@@ -1,4 +1,5 @@
using FluentAssertions;
+using LiteDB.Tests.Utils;
using Xunit;
namespace LiteDB.Tests.Engine
@@ -10,14 +11,14 @@ public void UserVersion_Get_Set()
{
using (var file = new TempFile())
{
- using (var db = new LiteDatabase(file.Filename))
+ using (var db = DatabaseFactory.Create(TestDatabaseType.Disk, file.Filename))
{
db.UserVersion.Should().Be(0);
db.UserVersion = 5;
db.Checkpoint();
}
- using (var db = new LiteDatabase(file.Filename))
+ using (var db = DatabaseFactory.Create(TestDatabaseType.Disk, file.Filename))
{
db.UserVersion.Should().Be(5);
}
diff --git a/LiteDB.Tests/Internals/ExtendedLength_Tests.cs b/LiteDB.Tests/Internals/ExtendedLength_Tests.cs
index 6e0b0fdb7..054ad19bb 100644
--- a/LiteDB.Tests/Internals/ExtendedLength_Tests.cs
+++ b/LiteDB.Tests/Internals/ExtendedLength_Tests.cs
@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.Text;
+using LiteDB.Tests.Utils;
using Xunit;
namespace LiteDB.Internals
@@ -22,7 +23,7 @@ public void ExtendedLengthHelper_Tests()
[Fact]
public void IndexExtendedLength_Tests()
{
- using var db = new LiteDatabase(":memory:");
+ using var db = DatabaseFactory.Create();
var col = db.GetCollection("customers", BsonAutoId.Int32);
col.EnsureIndex("$.Name");
col.Insert(new BsonDocument { ["Name"] = new string('A', 1010) });
diff --git a/LiteDB.Tests/Issues/Issue1651_Tests.cs b/LiteDB.Tests/Issues/Issue1651_Tests.cs
index 0b71dcce2..eccc545cc 100644
--- a/LiteDB.Tests/Issues/Issue1651_Tests.cs
+++ b/LiteDB.Tests/Issues/Issue1651_Tests.cs
@@ -2,6 +2,7 @@
using System.Collections.Generic;
using Xunit;
using System.Linq;
+using LiteDB.Tests.Utils;
namespace LiteDB.Tests.Issues
{
@@ -28,7 +29,7 @@ public void Find_ByRelationId_Success()
{
BsonMapper.Global.Entity().DbRef(order => order.Customer);
- using var _database = new LiteDatabase(":memory:");
+ using var _database = DatabaseFactory.Create();
var _orderCollection = _database.GetCollection("Order");
var _customerCollection = _database.GetCollection("Customer");
diff --git a/LiteDB.Tests/Issues/Issue1695_Tests.cs b/LiteDB.Tests/Issues/Issue1695_Tests.cs
index c4ee06122..de3504f04 100644
--- a/LiteDB.Tests/Issues/Issue1695_Tests.cs
+++ b/LiteDB.Tests/Issues/Issue1695_Tests.cs
@@ -4,6 +4,7 @@
using System.Linq;
using FluentAssertions;
using LiteDB.Engine;
+using LiteDB.Tests.Utils;
using Xunit;
namespace LiteDB.Tests.Issues
@@ -19,7 +20,7 @@ public class StateModel
[Fact]
public void ICollection_Parameter_Test()
{
- using var db = new LiteDatabase(":memory:");
+ using var db = DatabaseFactory.Create();
var col = db.GetCollection("col");
ICollection ids = new List();
diff --git a/LiteDB.Tests/Issues/Issue1701_Tests.cs b/LiteDB.Tests/Issues/Issue1701_Tests.cs
index edd2c609a..16db55ced 100644
--- a/LiteDB.Tests/Issues/Issue1701_Tests.cs
+++ b/LiteDB.Tests/Issues/Issue1701_Tests.cs
@@ -2,6 +2,7 @@
using System.Collections.Generic;
using Xunit;
using System.Linq;
+using LiteDB.Tests.Utils;
namespace LiteDB.Tests.Issues
{
@@ -10,7 +11,7 @@ public class Issue1701_Tests
[Fact]
public void Deleted_Index_Slot_Test()
{
- using var db = new LiteDatabase(":memory:");
+ using var db = DatabaseFactory.Create();
var col = db.GetCollection("col", BsonAutoId.Int32);
var id = col.Insert(new BsonDocument { ["attr1"] = "attr", ["attr2"] = "attr", ["attr3"] = "attr" });
diff --git a/LiteDB.Tests/Issues/Issue1838_Tests.cs b/LiteDB.Tests/Issues/Issue1838_Tests.cs
index 5d3c47507..7934bd07a 100644
--- a/LiteDB.Tests/Issues/Issue1838_Tests.cs
+++ b/LiteDB.Tests/Issues/Issue1838_Tests.cs
@@ -2,6 +2,7 @@
using System.Collections.Generic;
using Xunit;
using System.Linq;
+using LiteDB.Tests.Utils;
namespace LiteDB.Tests.Issues
{
@@ -10,7 +11,7 @@ public class Issue1838_Tests
[Fact]
public void Find_ByDatetime_Offset()
{
- using var db = new LiteDatabase(":memory:");
+ using var db = DatabaseFactory.Create();
var collection = db.GetCollection(nameof(TestType));
// sample data
diff --git a/LiteDB.Tests/Issues/Issue1860_Tests.cs b/LiteDB.Tests/Issues/Issue1860_Tests.cs
index f34f5cc67..a869960ce 100644
--- a/LiteDB.Tests/Issues/Issue1860_Tests.cs
+++ b/LiteDB.Tests/Issues/Issue1860_Tests.cs
@@ -2,6 +2,7 @@
using System.Collections.Generic;
using Xunit;
using System.Linq;
+using LiteDB.Tests.Utils;
namespace LiteDB.Tests.Issues
{
@@ -10,7 +11,7 @@ public class Issue1860_Tests
[Fact]
public void Constructor_has_enum_bsonctor()
{
- using var db = new LiteDatabase(":memory:");
+ using var db = DatabaseFactory.Create();
// Get a collection (or create, if doesn't exist)
var col1 = db.GetCollection("c1");
@@ -44,7 +45,7 @@ public void Constructor_has_enum_bsonctor()
[Fact]
public void Constructor_has_enum()
{
- using var db = new LiteDatabase(":memory:");
+ using var db = DatabaseFactory.Create();
// Get a collection (or create, if doesn't exist)
var col1 = db.GetCollection("c1");
@@ -78,7 +79,7 @@ public void Constructor_has_enum()
[Fact]
public void Constructor_has_enum_asint()
{
- using var db = new LiteDatabase(":memory:", new BsonMapper { EnumAsInteger = true });
+ using var db = DatabaseFactory.Create(mapper: new BsonMapper { EnumAsInteger = true });
// Get a collection (or create, if doesn't exist)
var col1 = db.GetCollection("c1");
diff --git a/LiteDB.Tests/Issues/Issue1865_Tests.cs b/LiteDB.Tests/Issues/Issue1865_Tests.cs
index 0d9ef62e3..90e53662e 100644
--- a/LiteDB.Tests/Issues/Issue1865_Tests.cs
+++ b/LiteDB.Tests/Issues/Issue1865_Tests.cs
@@ -3,6 +3,7 @@
using Xunit;
using System.Linq;
using System.Security.Cryptography;
+using LiteDB.Tests.Utils;
namespace LiteDB.Tests.Issues
{
@@ -37,7 +38,7 @@ public void Incluced_document_types_should_be_reald()
//BsonMapper.Global.ResolveCollectionName = (s) => "activity";
- using var _database = new LiteDatabase(":memory:");
+ using var _database = DatabaseFactory.Create();
var projectsCol = _database.GetCollection("activity");
var pointsCol = _database.GetCollection("activity");
diff --git a/LiteDB.Tests/Issues/Issue2127_Tests.cs b/LiteDB.Tests/Issues/Issue2127_Tests.cs
index 4a2320c5b..4ab1cd3ce 100644
--- a/LiteDB.Tests/Issues/Issue2127_Tests.cs
+++ b/LiteDB.Tests/Issues/Issue2127_Tests.cs
@@ -3,6 +3,7 @@
using System.IO;
using System.Text;
using System.Threading;
+using LiteDB.Tests.Utils;
namespace LiteDB.Tests.Issues
{
@@ -81,7 +82,7 @@ public ExampleItemRepository(string databasePath)
Connection = ConnectionType.Direct
};
- _liteDb = new LiteDatabase(connectionString);
+ _liteDb = DatabaseFactory.Create(TestDatabaseType.Disk, connectionString.ToString());
}
public void Insert(ExampleItem item)
diff --git a/LiteDB.Tests/Issues/Issue2129_Tests.cs b/LiteDB.Tests/Issues/Issue2129_Tests.cs
index 1dce48cd1..3779191bf 100644
--- a/LiteDB.Tests/Issues/Issue2129_Tests.cs
+++ b/LiteDB.Tests/Issues/Issue2129_Tests.cs
@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.Linq;
+using LiteDB.Tests.Utils;
using Xunit;
namespace LiteDB.Tests.Issues
@@ -10,7 +11,7 @@ public class Issue2129_Tests
[Fact]
public void TestInsertAfterDeleteAll()
{
- var db = new LiteDatabase(":memory:");
+ using var db = DatabaseFactory.Create();
var col = db.GetCollection(nameof(SwapChance));
col.EnsureIndex(x => x.Accounts1to2);
col.EnsureIndex(x => x.Accounts2to1);
diff --git a/LiteDB.Tests/Issues/Issue2265_Tests.cs b/LiteDB.Tests/Issues/Issue2265_Tests.cs
index 347753348..57257c12c 100644
--- a/LiteDB.Tests/Issues/Issue2265_Tests.cs
+++ b/LiteDB.Tests/Issues/Issue2265_Tests.cs
@@ -1,5 +1,6 @@
using System;
+using LiteDB.Tests.Utils;
using Xunit;
namespace LiteDB.Tests.Issues;
@@ -31,7 +32,7 @@ public Weights()
[Fact]
public void Test()
{
- using (var db = new LiteDatabase(":memory:"))
+ using (var db = DatabaseFactory.Create())
{
var c = db.GetCollection("weights");
Weights? w = c.FindOne(x => true);
diff --git a/LiteDB.Tests/Issues/Issue2298_Tests.cs b/LiteDB.Tests/Issues/Issue2298_Tests.cs
index c4d4c5a97..645abc001 100644
--- a/LiteDB.Tests/Issues/Issue2298_Tests.cs
+++ b/LiteDB.Tests/Issues/Issue2298_Tests.cs
@@ -5,6 +5,7 @@
using System.Text.Json;
using System.Threading.Tasks;
+using LiteDB.Tests.Utils;
using Xunit;
namespace LiteDB.Tests.Issues;
@@ -45,23 +46,23 @@ public static QuantityRange MassRangeBuilder(BsonDocument document)
}
[Fact]
- public void We_Dont_Need_Ctor()
- {
- BsonMapper.Global.RegisterType>(
- serialize: (range) => new BsonDocument
- {
- { nameof(QuantityRange.Min), range.Min },
- { nameof(QuantityRange.Max), range.Max },
- { nameof(QuantityRange.Unit), range.Unit.ToString() }
- },
- deserialize: (document) => MassRangeBuilder(document as BsonDocument)
- );
+ public void We_Dont_Need_Ctor()
+ {
+ BsonMapper.Global.RegisterType>(
+ serialize: (range) => new BsonDocument
+ {
+ { nameof(QuantityRange.Min), range.Min },
+ { nameof(QuantityRange.Max), range.Max },
+ { nameof(QuantityRange.Unit), range.Unit.ToString() }
+ },
+ deserialize: (document) => MassRangeBuilder(document as BsonDocument)
+ );
- var range = new QuantityRange(100, 500, Mass.Units.Pound);
- var filename = "Demo.DB";
- var DB = new LiteDatabase(filename);
- var collection = DB.GetCollection>("DEMO");
- collection.Insert(range);
- var restored = collection.FindAll().First();
- }
+ var range = new QuantityRange(100, 500, Mass.Units.Pound);
+ using var filename = new TempFile();
+ using var db = DatabaseFactory.Create(TestDatabaseType.Disk, filename.Filename);
+ var collection = db.GetCollection>("DEMO");
+ collection.Insert(range);
+ var restored = collection.FindAll().First();
+ }
}
\ No newline at end of file
diff --git a/LiteDB.Tests/Issues/Issue2458_Tests.cs b/LiteDB.Tests/Issues/Issue2458_Tests.cs
index 113132f6c..e538e18e9 100644
--- a/LiteDB.Tests/Issues/Issue2458_Tests.cs
+++ b/LiteDB.Tests/Issues/Issue2458_Tests.cs
@@ -1,5 +1,6 @@
using System;
using System.IO;
+using LiteDB.Tests.Utils;
using Xunit;
namespace LiteDB.Tests.Issues;
@@ -9,7 +10,7 @@ public class Issue2458_Tests
[Fact]
public void NegativeSeekFails()
{
- using var db = new LiteDatabase(":memory:");
+ using var db = DatabaseFactory.Create();
var fs = db.FileStorage;
AddTestFile("test", 1, fs);
using Stream stream = fs.OpenRead("test");
@@ -21,7 +22,7 @@ public void NegativeSeekFails()
[Fact]
public void SeekPastFileSucceds()
{
- using var db = new LiteDatabase(":memory:");
+ using var db = DatabaseFactory.Create();
var fs = db.FileStorage;
AddTestFile("test", 1, fs);
using Stream stream = fs.OpenRead("test");
@@ -31,7 +32,7 @@ public void SeekPastFileSucceds()
[Fact]
public void SeekShortChunks()
{
- using var db = new LiteDatabase(":memory:");
+ using var db = DatabaseFactory.Create();
var fs = db.FileStorage;
using(Stream writeStream = fs.OpenWrite("test", "test"))
{
diff --git a/LiteDB.Tests/Issues/Issue2471_Test.cs b/LiteDB.Tests/Issues/Issue2471_Test.cs
index 1f50e1aff..1c7aa3ed2 100644
--- a/LiteDB.Tests/Issues/Issue2471_Test.cs
+++ b/LiteDB.Tests/Issues/Issue2471_Test.cs
@@ -8,6 +8,7 @@
using System.Threading;
using System.Threading.Tasks;
+using LiteDB.Tests.Utils;
using Xunit;
namespace LiteDB.Tests.Issues;
@@ -17,7 +18,7 @@ public class Issue2471_Test
[Fact]
public void TestFragmentDB_FindByIDException()
{
- using var db = new LiteDatabase(":memory:");
+ using var db = DatabaseFactory.Create();
var collection = db.GetCollection
diff --git a/LiteDB.RollbackRepro/Program.cs b/LiteDB.RollbackRepro/Program.cs
index c2b9f486e..779812685 100644
--- a/LiteDB.RollbackRepro/Program.cs
+++ b/LiteDB.RollbackRepro/Program.cs
@@ -9,6 +9,10 @@
namespace LiteDB.RollbackRepro;
+///
+/// Repro of #2586
+/// To repro after the patch, set ````
+///
internal static class Program
{
private const int HolderTransactionCount = 99;
@@ -230,8 +234,13 @@ private static void RunFailingTransaction(LiteDatabase db, ILiteCollection
Date: Tue, 23 Sep 2025 17:43:30 +0200
Subject: [PATCH 24/53] Fix testing
---
.github/workflows/ci.yml | 4 ++--
ConsoleApp1/Program.cs | 2 +-
LiteDB.Tests/Engine/Rebuild_Crash_Tests.cs | 2 +-
LiteDB/Engine/Disk/DiskReader.cs | 2 +-
LiteDB/Engine/Disk/DiskService.cs | 2 +-
LiteDB/Engine/EngineState.cs | 2 +-
LiteDB/Engine/LiteEngine.cs | 2 +-
LiteDB/Engine/Structures/PageBuffer.cs | 2 +-
LiteDB/Utils/Constants.cs | 4 ++--
9 files changed, 11 insertions(+), 11 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 2ef778838..6d9516bbe 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -22,8 +22,8 @@ jobs:
run: dotnet restore LiteDB.sln
- name: Build
- run: dotnet build LiteDB.sln --configuration Release --no-restore
+ run: dotnet build LiteDB.sln --configuration Release --no-restore /p:DefineConstants=TESTING
- name: Test
timeout-minutes: 5
- run: dotnet test LiteDB.sln --configuration Release --no-build --verbosity normal --settings tests.runsettings --logger "trx;LogFileName=TestResults.trx" --logger "console;verbosity=detailed"
+ run: dotnet test LiteDB.sln --configuration Release --no-build --verbosity normal --settings tests.runsettings --logger "trx;LogFileName=TestResults.trx" --logger "console;verbosity=detailed" /p:DefineConstants=TESTING
diff --git a/ConsoleApp1/Program.cs b/ConsoleApp1/Program.cs
index cfa7f4a6f..ef7bd993e 100644
--- a/ConsoleApp1/Program.cs
+++ b/ConsoleApp1/Program.cs
@@ -27,7 +27,7 @@
{
using (var db = new LiteEngine(settings))
{
-#if DEBUG
+#if DEBUG || TESTING
db.SimulateDiskWriteFail = (page) =>
{
var p = new BasePage(page);
diff --git a/LiteDB.Tests/Engine/Rebuild_Crash_Tests.cs b/LiteDB.Tests/Engine/Rebuild_Crash_Tests.cs
index 7bce95d7a..7c36438a3 100644
--- a/LiteDB.Tests/Engine/Rebuild_Crash_Tests.cs
+++ b/LiteDB.Tests/Engine/Rebuild_Crash_Tests.cs
@@ -6,7 +6,7 @@
using Xunit;
-#if DEBUG
+#if DEBUG || TESTING
namespace LiteDB.Tests.Engine
{
public class Rebuild_Crash_Tests
diff --git a/LiteDB/Engine/Disk/DiskReader.cs b/LiteDB/Engine/Disk/DiskReader.cs
index c419f16b4..adf0e4313 100644
--- a/LiteDB/Engine/Disk/DiskReader.cs
+++ b/LiteDB/Engine/Disk/DiskReader.cs
@@ -50,7 +50,7 @@ public PageBuffer ReadPage(long position, bool writable, FileOrigin origin)
_cache.GetWritablePage(position, origin, (pos, buf) => this.ReadStream(stream, pos, buf)) :
_cache.GetReadablePage(position, origin, (pos, buf) => this.ReadStream(stream, pos, buf));
-#if DEBUG
+#if DEBUG || TESTING
_state.SimulateDiskReadFail?.Invoke(page);
#endif
diff --git a/LiteDB/Engine/Disk/DiskService.cs b/LiteDB/Engine/Disk/DiskService.cs
index 73e7910b5..bd21d57fc 100644
--- a/LiteDB/Engine/Disk/DiskService.cs
+++ b/LiteDB/Engine/Disk/DiskService.cs
@@ -190,7 +190,7 @@ public int WriteLogDisk(IEnumerable pages)
// set log stream position to page
stream.Position = page.Position;
-#if DEBUG
+#if DEBUG || TESTING
_state.SimulateDiskWriteFail?.Invoke(page);
#endif
diff --git a/LiteDB/Engine/EngineState.cs b/LiteDB/Engine/EngineState.cs
index bd3dafac7..dfce2e984 100644
--- a/LiteDB/Engine/EngineState.cs
+++ b/LiteDB/Engine/EngineState.cs
@@ -18,7 +18,7 @@ internal class EngineState
private readonly LiteEngine _engine; // can be null for unit tests
private readonly EngineSettings _settings;
-#if DEBUG
+#if DEBUG || TESTING
public Action SimulateDiskReadFail = null;
public Action SimulateDiskWriteFail = null;
#endif
diff --git a/LiteDB/Engine/LiteEngine.cs b/LiteDB/Engine/LiteEngine.cs
index 59bb84b4c..2e36024c8 100644
--- a/LiteDB/Engine/LiteEngine.cs
+++ b/LiteDB/Engine/LiteEngine.cs
@@ -243,7 +243,7 @@ internal List Close(Exception ex)
#endregion
-#if DEBUG
+#if DEBUG || TESTING
// exposes for unit tests
internal TransactionMonitor GetMonitor() => _monitor;
internal Action SimulateDiskReadFail { set => _state.SimulateDiskReadFail = value; }
diff --git a/LiteDB/Engine/Structures/PageBuffer.cs b/LiteDB/Engine/Structures/PageBuffer.cs
index d4b95c18f..7892c24d6 100644
--- a/LiteDB/Engine/Structures/PageBuffer.cs
+++ b/LiteDB/Engine/Structures/PageBuffer.cs
@@ -62,7 +62,7 @@ public void Release()
Interlocked.Decrement(ref this.ShareCounter);
}
-#if DEBUG
+#if DEBUG || TESTING
~PageBuffer()
{
ENSURE(this.ShareCounter == 0, $"share count must be 0 in destroy PageBuffer (current: {this.ShareCounter})");
diff --git a/LiteDB/Utils/Constants.cs b/LiteDB/Utils/Constants.cs
index e13ae9add..8f0d0eeb4 100644
--- a/LiteDB/Utils/Constants.cs
+++ b/LiteDB/Utils/Constants.cs
@@ -6,7 +6,7 @@
using System.Threading;
[assembly: InternalsVisibleTo("LiteDB.Tests")]
-#if DEBUG
+#if DEBUG || TESTING
[assembly: InternalsVisibleTo("ConsoleApp1")]
#endif
@@ -102,7 +102,7 @@ internal class Constants
///
/// Initial seed for Random
///
-#if DEBUG
+#if DEBUG || TESTING
public const int RANDOMIZER_SEED = 3131;
#else
public const int RANDOMIZER_SEED = 0;
From 8b02f19a89c84a6331b0042a162432523b92f3ae Mon Sep 17 00:00:00 2001
From: JKamsker <11245306+JKamsker@users.noreply.github.com>
Date: Tue, 23 Sep 2025 18:04:54 +0200
Subject: [PATCH 25/53] Fix dev build
---
.github/workflows/publish-prerelease.yml | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/.github/workflows/publish-prerelease.yml b/.github/workflows/publish-prerelease.yml
index 5d1f3ef61..19bee31a4 100644
--- a/.github/workflows/publish-prerelease.yml
+++ b/.github/workflows/publish-prerelease.yml
@@ -34,12 +34,12 @@ jobs:
- name: Restore
run: dotnet restore LiteDB.sln
- - name: Build
- run: dotnet build LiteDB.sln --configuration Release --no-restore
-
- name: Test
timeout-minutes: 5
- run: dotnet test LiteDB.sln --configuration Release --no-build --verbosity normal --settings tests.runsettings --logger "trx;LogFileName=TestResults.trx" --logger "console;verbosity=detailed"
+ run: dotnet test LiteDB.sln --configuration Release --verbosity normal --settings tests.runsettings --logger "trx;LogFileName=TestResults.trx" --logger "console;verbosity=detailed" /p:DefineConstants=TESTING
+
+ - name: Build
+ run: dotnet build LiteDB.sln --configuration Release --no-restore
- name: Pack
run: |
From bad727392b27da55f1e272c87a5caee4a51ee00e Mon Sep 17 00:00:00 2001
From: Jonas Kamsker <11245306+JKamsker@users.noreply.github.com>
Date: Tue, 23 Sep 2025 18:27:23 +0200
Subject: [PATCH 26/53] Simplify rebuild crash test setup
---
LiteDB.Tests/Engine/Rebuild_Crash_Tests.cs | 12 +++++++-----
1 file changed, 7 insertions(+), 5 deletions(-)
diff --git a/LiteDB.Tests/Engine/Rebuild_Crash_Tests.cs b/LiteDB.Tests/Engine/Rebuild_Crash_Tests.cs
index 7c36438a3..da3fd8a1c 100644
--- a/LiteDB.Tests/Engine/Rebuild_Crash_Tests.cs
+++ b/LiteDB.Tests/Engine/Rebuild_Crash_Tests.cs
@@ -15,7 +15,7 @@ public class Rebuild_Crash_Tests
[Fact]
public void Rebuild_Crash_IO_Write_Error()
{
- var N = 1_000;
+ var N = 200;
using (var file = new TempFile())
{
@@ -26,13 +26,15 @@ public void Rebuild_Crash_IO_Write_Error()
Password = "46jLz5QWd5fI3m4LiL2r"
};
+ var initial = new DateTime(2024, 1, 1);
+
var data = Enumerable.Range(1, N).Select(i => new BsonDocument
{
["_id"] = i,
- ["name"] = Faker.Fullname(),
- ["age"] = Faker.Age(),
- ["created"] = Faker.Birthday(),
- ["lorem"] = Faker.Lorem(5, 25)
+ ["name"] = $"user-{i:D4}",
+ ["age"] = 18 + (i % 60),
+ ["created"] = initial.AddDays(i),
+ ["lorem"] = new string((char)('a' + (i % 26)), 200)
}).ToArray();
try
From de7fd61fee2eaed29996a7b83d7184edeb217a1e Mon Sep 17 00:00:00 2001
From: Jonas Kamsker <11245306+JKamsker@users.noreply.github.com>
Date: Tue, 23 Sep 2025 18:41:40 +0200
Subject: [PATCH 27/53] Add timeouts to skipped query and rebuild tests
---
LiteDB.Tests/Engine/Rebuild_Crash_Tests.cs | 11 +++++---
LiteDB.Tests/Query/Where_Tests.cs | 33 ++++++++++++++--------
2 files changed, 29 insertions(+), 15 deletions(-)
diff --git a/LiteDB.Tests/Engine/Rebuild_Crash_Tests.cs b/LiteDB.Tests/Engine/Rebuild_Crash_Tests.cs
index da3fd8a1c..d5c5425f3 100644
--- a/LiteDB.Tests/Engine/Rebuild_Crash_Tests.cs
+++ b/LiteDB.Tests/Engine/Rebuild_Crash_Tests.cs
@@ -3,6 +3,7 @@
using System;
using System.IO;
using System.Linq;
+using System.Threading.Tasks;
using Xunit;
@@ -12,10 +13,10 @@ namespace LiteDB.Tests.Engine
public class Rebuild_Crash_Tests
{
- [Fact]
- public void Rebuild_Crash_IO_Write_Error()
+ [Fact(Timeout = 30000)]
+ public async Task Rebuild_Crash_IO_Write_Error()
{
- var N = 200;
+ var N = 1000;
using (var file = new TempFile())
{
@@ -34,7 +35,7 @@ public void Rebuild_Crash_IO_Write_Error()
["name"] = $"user-{i:D4}",
["age"] = 18 + (i % 60),
["created"] = initial.AddDays(i),
- ["lorem"] = new string((char)('a' + (i % 26)), 200)
+ ["lorem"] = new string((char)('a' + (i % 26)), 800)
}).ToArray();
try
@@ -88,6 +89,8 @@ public void Rebuild_Crash_IO_Write_Error()
errors.Should().Be(1);
}
+
+ await Task.CompletedTask;
}
}
}
diff --git a/LiteDB.Tests/Query/Where_Tests.cs b/LiteDB.Tests/Query/Where_Tests.cs
index 00869f139..45b5db202 100644
--- a/LiteDB.Tests/Query/Where_Tests.cs
+++ b/LiteDB.Tests/Query/Where_Tests.cs
@@ -1,5 +1,6 @@
using FluentAssertions;
using System.Linq;
+using System.Threading.Tasks;
using Xunit;
namespace LiteDB.Tests.QueryTest
@@ -12,8 +13,8 @@ class Entity
public int Size { get; set; }
}
- [Fact]
- public void Query_Where_With_Parameter()
+ [Fact(Timeout = 30000)]
+ public async Task Query_Where_With_Parameter()
{
using var db = new PersonQueryData();
var (collection, local) = db.GetData();
@@ -27,10 +28,12 @@ public void Query_Where_With_Parameter()
.ToArray();
AssertEx.ArrayEqual(r0, r1, true);
+
+ await Task.CompletedTask;
}
- [Fact]
- public void Query_Multi_Where_With_Like()
+ [Fact(Timeout = 30000)]
+ public async Task Query_Multi_Where_With_Like()
{
using var db = new PersonQueryData();
var (collection, local) = db.GetData();
@@ -46,10 +49,12 @@ public void Query_Multi_Where_With_Like()
.ToArray();
AssertEx.ArrayEqual(r0, r1, true);
+
+ await Task.CompletedTask;
}
- [Fact]
- public void Query_Single_Where_With_And()
+ [Fact(Timeout = 30000)]
+ public async Task Query_Single_Where_With_And()
{
using var db = new PersonQueryData();
var (collection, local) = db.GetData();
@@ -63,10 +68,12 @@ public void Query_Single_Where_With_And()
.ToArray();
AssertEx.ArrayEqual(r0, r1, true);
+
+ await Task.CompletedTask;
}
- [Fact]
- public void Query_Single_Where_With_Or_And_In()
+ [Fact(Timeout = 30000)]
+ public async Task Query_Single_Where_With_Or_And_In()
{
using var db = new PersonQueryData();
var (collection, local) = db.GetData();
@@ -85,10 +92,12 @@ public void Query_Single_Where_With_Or_And_In()
AssertEx.ArrayEqual(r0, r1, true);
AssertEx.ArrayEqual(r1, r2, true);
+
+ await Task.CompletedTask;
}
- [Fact]
- public void Query_With_Array_Ids()
+ [Fact(Timeout = 30000)]
+ public async Task Query_With_Array_Ids()
{
using var db = new PersonQueryData();
var (collection, local) = db.GetData();
@@ -104,6 +113,8 @@ public void Query_With_Array_Ids()
.ToArray();
AssertEx.ArrayEqual(r0, r1, true);
+
+ await Task.CompletedTask;
}
}
-}
\ No newline at end of file
+}
From 07378b76799d336d0b384efad17a9c410f564ef1 Mon Sep 17 00:00:00 2001
From: Jonas Kamsker <11245306+JKamsker@users.noreply.github.com>
Date: Tue, 23 Sep 2025 18:53:19 +0200
Subject: [PATCH 28/53] Log start and completion for long-running tests
---
LiteDB.Tests/Engine/Rebuild_Crash_Tests.cs | 122 ++++++++-------
LiteDB.Tests/Query/Where_Tests.cs | 170 ++++++++++++++-------
2 files changed, 186 insertions(+), 106 deletions(-)
diff --git a/LiteDB.Tests/Engine/Rebuild_Crash_Tests.cs b/LiteDB.Tests/Engine/Rebuild_Crash_Tests.cs
index d5c5425f3..138d9b402 100644
--- a/LiteDB.Tests/Engine/Rebuild_Crash_Tests.cs
+++ b/LiteDB.Tests/Engine/Rebuild_Crash_Tests.cs
@@ -6,92 +6,110 @@
using System.Threading.Tasks;
using Xunit;
+using Xunit.Abstractions;
#if DEBUG || TESTING
namespace LiteDB.Tests.Engine
{
public class Rebuild_Crash_Tests
{
+ private readonly ITestOutputHelper _output;
+
+ public Rebuild_Crash_Tests(ITestOutputHelper output)
+ {
+ _output = output;
+ }
[Fact(Timeout = 30000)]
public async Task Rebuild_Crash_IO_Write_Error()
{
- var N = 1000;
+ var testName = nameof(Rebuild_Crash_IO_Write_Error);
+
+ _output.WriteLine($"starting {testName}");
- using (var file = new TempFile())
+ try
{
- var settings = new EngineSettings
+ var N = 1000;
+
+ using (var file = new TempFile())
{
- AutoRebuild = true,
- Filename = file.Filename,
- Password = "46jLz5QWd5fI3m4LiL2r"
- };
+ var settings = new EngineSettings
+ {
+ AutoRebuild = true,
+ Filename = file.Filename,
+ Password = "46jLz5QWd5fI3m4LiL2r"
+ };
- var initial = new DateTime(2024, 1, 1);
+ var initial = new DateTime(2024, 1, 1);
- var data = Enumerable.Range(1, N).Select(i => new BsonDocument
- {
- ["_id"] = i,
- ["name"] = $"user-{i:D4}",
- ["age"] = 18 + (i % 60),
- ["created"] = initial.AddDays(i),
- ["lorem"] = new string((char)('a' + (i % 26)), 800)
- }).ToArray();
-
- try
- {
- using (var db = new LiteEngine(settings))
+ var data = Enumerable.Range(1, N).Select(i => new BsonDocument
+ {
+ ["_id"] = i,
+ ["name"] = $"user-{i:D4}",
+ ["age"] = 18 + (i % 60),
+ ["created"] = initial.AddDays(i),
+ ["lorem"] = new string((char)('a' + (i % 26)), 800)
+ }).ToArray();
+
+ try
{
- db.SimulateDiskWriteFail = (page) =>
+ using (var db = new LiteEngine(settings))
{
- var p = new BasePage(page);
-
- if (p.PageID == 28)
+ db.SimulateDiskWriteFail = (page) =>
{
- p.ColID.Should().Be(1);
- p.PageType.Should().Be(PageType.Data);
+ var p = new BasePage(page);
- page.Write((uint)123123123, 8192 - 4);
- }
- };
+ if (p.PageID == 28)
+ {
+ p.ColID.Should().Be(1);
+ p.PageType.Should().Be(PageType.Data);
- db.Pragma("USER_VERSION", 123);
+ page.Write((uint)123123123, 8192 - 4);
+ }
+ };
- db.EnsureIndex("col1", "idx_age", "$.age", false);
+ db.Pragma("USER_VERSION", 123);
- db.Insert("col1", data, BsonAutoId.Int32);
- db.Insert("col2", data, BsonAutoId.Int32);
+ db.EnsureIndex("col1", "idx_age", "$.age", false);
- db.Checkpoint();
+ db.Insert("col1", data, BsonAutoId.Int32);
+ db.Insert("col2", data, BsonAutoId.Int32);
- // will fail
- var col1 = db.Query("col1", Query.All()).ToList().Count;
+ db.Checkpoint();
+
+ // will fail
+ var col1 = db.Query("col1", Query.All()).ToList().Count;
- // never run here
- Assert.Fail("should get error in query");
+ // never run here
+ Assert.Fail("should get error in query");
+ }
+ }
+ catch (Exception ex)
+ {
+ Assert.True(ex is LiteException lex && lex.ErrorCode == 999);
}
- }
- catch (Exception ex)
- {
- Assert.True(ex is LiteException lex && lex.ErrorCode == 999);
- }
- //Console.WriteLine("Recovering database...");
+ //Console.WriteLine("Recovering database...");
- using (var db = new LiteEngine(settings))
- {
- var col1 = db.Query("col1", Query.All()).ToList().Count;
- var col2 = db.Query("col2", Query.All()).ToList().Count;
- var errors = db.Query("_rebuild_errors", Query.All()).ToList().Count;
+ using (var db = new LiteEngine(settings))
+ {
+ var col1 = db.Query("col1", Query.All()).ToList().Count;
+ var col2 = db.Query("col2", Query.All()).ToList().Count;
+ var errors = db.Query("_rebuild_errors", Query.All()).ToList().Count;
- col1.Should().Be(N - 1);
- col2.Should().Be(N);
- errors.Should().Be(1);
+ col1.Should().Be(N - 1);
+ col2.Should().Be(N);
+ errors.Should().Be(1);
+ }
}
await Task.CompletedTask;
}
+ finally
+ {
+ _output.WriteLine($"{testName} completed");
+ }
}
}
}
diff --git a/LiteDB.Tests/Query/Where_Tests.cs b/LiteDB.Tests/Query/Where_Tests.cs
index 45b5db202..8d83ceb27 100644
--- a/LiteDB.Tests/Query/Where_Tests.cs
+++ b/LiteDB.Tests/Query/Where_Tests.cs
@@ -1,12 +1,19 @@
-using FluentAssertions;
-using System.Linq;
+using System.Linq;
using System.Threading.Tasks;
using Xunit;
+using Xunit.Abstractions;
namespace LiteDB.Tests.QueryTest
{
public class Where_Tests : PersonQueryData
{
+ private readonly ITestOutputHelper _output;
+
+ public Where_Tests(ITestOutputHelper output)
+ {
+ _output = output;
+ }
+
class Entity
{
public string Name { get; set; }
@@ -16,18 +23,29 @@ class Entity
[Fact(Timeout = 30000)]
public async Task Query_Where_With_Parameter()
{
- using var db = new PersonQueryData();
- var (collection, local) = db.GetData();
+ var testName = nameof(Query_Where_With_Parameter);
- var r0 = local
- .Where(x => x.Address.State == "FL")
- .ToArray();
+ _output.WriteLine($"starting {testName}");
- var r1 = collection.Query()
- .Where(x => x.Address.State == "FL")
- .ToArray();
+ try
+ {
+ using var db = new PersonQueryData();
+ var (collection, local) = db.GetData();
- AssertEx.ArrayEqual(r0, r1, true);
+ var r0 = local
+ .Where(x => x.Address.State == "FL")
+ .ToArray();
+
+ var r1 = collection.Query()
+ .Where(x => x.Address.State == "FL")
+ .ToArray();
+
+ AssertEx.ArrayEqual(r0, r1, true);
+ }
+ finally
+ {
+ _output.WriteLine($"{testName} completed");
+ }
await Task.CompletedTask;
}
@@ -35,20 +53,31 @@ public async Task Query_Where_With_Parameter()
[Fact(Timeout = 30000)]
public async Task Query_Multi_Where_With_Like()
{
- using var db = new PersonQueryData();
- var (collection, local) = db.GetData();
+ var testName = nameof(Query_Multi_Where_With_Like);
+
+ _output.WriteLine($"starting {testName}");
- var r0 = local
- .Where(x => x.Age >= 10 && x.Age <= 40)
- .Where(x => x.Name.StartsWith("Ge"))
- .ToArray();
+ try
+ {
+ using var db = new PersonQueryData();
+ var (collection, local) = db.GetData();
- var r1 = collection.Query()
- .Where(x => x.Age >= 10 && x.Age <= 40)
- .Where(x => x.Name.StartsWith("Ge"))
- .ToArray();
+ var r0 = local
+ .Where(x => x.Age >= 10 && x.Age <= 40)
+ .Where(x => x.Name.StartsWith("Ge"))
+ .ToArray();
- AssertEx.ArrayEqual(r0, r1, true);
+ var r1 = collection.Query()
+ .Where(x => x.Age >= 10 && x.Age <= 40)
+ .Where(x => x.Name.StartsWith("Ge"))
+ .ToArray();
+
+ AssertEx.ArrayEqual(r0, r1, true);
+ }
+ finally
+ {
+ _output.WriteLine($"{testName} completed");
+ }
await Task.CompletedTask;
}
@@ -56,18 +85,29 @@ public async Task Query_Multi_Where_With_Like()
[Fact(Timeout = 30000)]
public async Task Query_Single_Where_With_And()
{
- using var db = new PersonQueryData();
- var (collection, local) = db.GetData();
+ var testName = nameof(Query_Single_Where_With_And);
+
+ _output.WriteLine($"starting {testName}");
- var r0 = local
- .Where(x => x.Age == 25 && x.Active)
- .ToArray();
+ try
+ {
+ using var db = new PersonQueryData();
+ var (collection, local) = db.GetData();
- var r1 = collection.Query()
- .Where("age = 25 AND active = true")
- .ToArray();
+ var r0 = local
+ .Where(x => x.Age == 25 && x.Active)
+ .ToArray();
- AssertEx.ArrayEqual(r0, r1, true);
+ var r1 = collection.Query()
+ .Where("age = 25 AND active = true")
+ .ToArray();
+
+ AssertEx.ArrayEqual(r0, r1, true);
+ }
+ finally
+ {
+ _output.WriteLine($"{testName} completed");
+ }
await Task.CompletedTask;
}
@@ -75,23 +115,34 @@ public async Task Query_Single_Where_With_And()
[Fact(Timeout = 30000)]
public async Task Query_Single_Where_With_Or_And_In()
{
- using var db = new PersonQueryData();
- var (collection, local) = db.GetData();
+ var testName = nameof(Query_Single_Where_With_Or_And_In);
+
+ _output.WriteLine($"starting {testName}");
- var r0 = local
- .Where(x => x.Age == 25 || x.Age == 26 || x.Age == 27)
- .ToArray();
+ try
+ {
+ using var db = new PersonQueryData();
+ var (collection, local) = db.GetData();
- var r1 = collection.Query()
- .Where("age = 25 OR age = 26 OR age = 27")
- .ToArray();
+ var r0 = local
+ .Where(x => x.Age == 25 || x.Age == 26 || x.Age == 27)
+ .ToArray();
- var r2 = collection.Query()
- .Where("age IN [25, 26, 27]")
- .ToArray();
+ var r1 = collection.Query()
+ .Where("age = 25 OR age = 26 OR age = 27")
+ .ToArray();
- AssertEx.ArrayEqual(r0, r1, true);
- AssertEx.ArrayEqual(r1, r2, true);
+ var r2 = collection.Query()
+ .Where("age IN [25, 26, 27]")
+ .ToArray();
+
+ AssertEx.ArrayEqual(r0, r1, true);
+ AssertEx.ArrayEqual(r1, r2, true);
+ }
+ finally
+ {
+ _output.WriteLine($"{testName} completed");
+ }
await Task.CompletedTask;
}
@@ -99,20 +150,31 @@ public async Task Query_Single_Where_With_Or_And_In()
[Fact(Timeout = 30000)]
public async Task Query_With_Array_Ids()
{
- using var db = new PersonQueryData();
- var (collection, local) = db.GetData();
+ var testName = nameof(Query_With_Array_Ids);
+
+ _output.WriteLine($"starting {testName}");
+
+ try
+ {
+ using var db = new PersonQueryData();
+ var (collection, local) = db.GetData();
- var ids = new int[] { 1, 2, 3 };
+ var ids = new int[] { 1, 2, 3 };
- var r0 = local
- .Where(x => ids.Contains(x.Id))
- .ToArray();
+ var r0 = local
+ .Where(x => ids.Contains(x.Id))
+ .ToArray();
- var r1 = collection.Query()
- .Where(x => ids.Contains(x.Id))
- .ToArray();
+ var r1 = collection.Query()
+ .Where(x => ids.Contains(x.Id))
+ .ToArray();
- AssertEx.ArrayEqual(r0, r1, true);
+ AssertEx.ArrayEqual(r0, r1, true);
+ }
+ finally
+ {
+ _output.WriteLine($"{testName} completed");
+ }
await Task.CompletedTask;
}
From 7be883d2a5a485f15c0091ab6739d6cba2dc1ce6 Mon Sep 17 00:00:00 2001
From: JKamsker <11245306+JKamsker@users.noreply.github.com>
Date: Tue, 23 Sep 2025 19:54:14 +0200
Subject: [PATCH 29/53] ifdef
---
LiteDB.Tests/Engine/Transactions_Tests.cs | 9 +++++++--
1 file changed, 7 insertions(+), 2 deletions(-)
diff --git a/LiteDB.Tests/Engine/Transactions_Tests.cs b/LiteDB.Tests/Engine/Transactions_Tests.cs
index fa8582056..31375e952 100644
--- a/LiteDB.Tests/Engine/Transactions_Tests.cs
+++ b/LiteDB.Tests/Engine/Transactions_Tests.cs
@@ -1,4 +1,5 @@
-using System.IO;
+using System.Diagnostics;
+using System.IO;
using System.Linq;
using System.Reflection;
using System.Threading;
@@ -74,7 +75,7 @@ public async Task Transaction_Write_Lock_Timeout()
}
}
-
+
[CpuBoundFact(MIN_CPU_COUNT)]
public async Task Transaction_Avoid_Dirty_Read()
{
@@ -134,6 +135,7 @@ public async Task Transaction_Avoid_Dirty_Read()
await Task.WhenAll(ta, tb);
}
}
+
[CpuBoundFact(MIN_CPU_COUNT)]
public async Task Transaction_Read_Version()
@@ -231,6 +233,7 @@ public void Test_Transaction_States()
}
}
+#if DEBUG || TESTING
[Fact]
public void Transaction_Rollback_Should_Skip_ReadOnly_Buffers_From_Safepoint()
{
@@ -332,6 +335,8 @@ public void Transaction_Rollback_Should_Discard_Writable_Dirty_Pages()
collection.Count().Should().Be(0);
}
+#endif
+
private class BlockingStream : MemoryStream
{
public readonly AutoResetEvent Blocked = new AutoResetEvent(false);
From ebc999978f3d672292c2fb832f8bd282ce1f8ec6 Mon Sep 17 00:00:00 2001
From: JKamsker <11245306+JKamsker@users.noreply.github.com>
Date: Sun, 28 Sep 2025 00:58:28 +0200
Subject: [PATCH 30/53] Switch to gitversion
---
.config/dotnet-tools.json | 13 +++
.github/workflows/publish-prerelease.yml | 76 +++++++++-----
.github/workflows/publish-release.yml | 78 ++++++++++----
.github/workflows/tag-version.yml | 125 +++++++++++++++++++++++
Directory.Build.props | 19 +++-
GitVersion.yml | 39 +++++++
docs/versioning.md | 54 ++++++++++
scripts/gitver/gitversion.ps1 | 101 ++++++++++++++++++
scripts/gitver/gitversion.sh | 77 ++++++++++++++
9 files changed, 533 insertions(+), 49 deletions(-)
create mode 100644 .config/dotnet-tools.json
create mode 100644 .github/workflows/tag-version.yml
create mode 100644 GitVersion.yml
create mode 100644 docs/versioning.md
create mode 100644 scripts/gitver/gitversion.ps1
create mode 100644 scripts/gitver/gitversion.sh
diff --git a/.config/dotnet-tools.json b/.config/dotnet-tools.json
new file mode 100644
index 000000000..38d3dc968
--- /dev/null
+++ b/.config/dotnet-tools.json
@@ -0,0 +1,13 @@
+{
+ "version": 1,
+ "isRoot": true,
+ "tools": {
+ "gitversion.tool": {
+ "version": "6.4.0",
+ "commands": [
+ "dotnet-gitversion"
+ ],
+ "rollForward": false
+ }
+ }
+}
\ No newline at end of file
diff --git a/.github/workflows/publish-prerelease.yml b/.github/workflows/publish-prerelease.yml
index 19bee31a4..ffd7f99e6 100644
--- a/.github/workflows/publish-prerelease.yml
+++ b/.github/workflows/publish-prerelease.yml
@@ -22,14 +22,35 @@ jobs:
with:
dotnet-version: 8.0.x
- - name: Set version with padded build number
- id: version
+ - name: Restore .NET tools
+ run: dotnet tool restore
+
+ - name: Compute semantic version
+ id: gitversion
+ shell: bash
run: |
- # Pad the run number with leading zeros (4 digits)
- PADDED_BUILD_NUMBER=$(printf "%04d" ${{ github.run_number }})
- PACKAGE_VERSION="6.0.0-prerelease.${PADDED_BUILD_NUMBER}"
- echo "package_version=${PACKAGE_VERSION}" >> "$GITHUB_OUTPUT"
- echo "Version set to $PACKAGE_VERSION"
+ set -euo pipefail
+ JSON=$(dotnet tool run dotnet-gitversion /output json)
+ echo "$JSON"
+
+ NUGET_VERSION=$(echo "$JSON" | jq -r '.NuGetVersion')
+ FULL_SEMVER=$(echo "$JSON" | jq -r '.FullSemVer')
+ SHORT_SHA=$(echo "$JSON" | jq -r '.ShortSha')
+ MAJOR_MINOR_PATCH=$(echo "$JSON" | jq -r '.MajorMinorPatch')
+ PR_LABEL=$(echo "$JSON" | jq -r '.PreReleaseLabel')
+ PR_NUMBER=$(echo "$JSON" | jq -r '.PreReleaseNumber')
+
+ if [[ "$PR_LABEL" != "" && "$PR_LABEL" != "null" ]]; then
+ printf -v PR_PADDED '%04d' "$PR_NUMBER"
+ RELEASE_VERSION_PADDED="${MAJOR_MINOR_PATCH}-${PR_LABEL}.${PR_PADDED}"
+ else
+ RELEASE_VERSION_PADDED="$MAJOR_MINOR_PATCH"
+ fi
+
+ echo "nugetVersion=$NUGET_VERSION" >> "$GITHUB_OUTPUT"
+ echo "fullSemVer=$FULL_SEMVER" >> "$GITHUB_OUTPUT"
+ echo "releaseVersionPadded=$RELEASE_VERSION_PADDED" >> "$GITHUB_OUTPUT"
+ echo "informational=${NUGET_VERSION}+${SHORT_SHA}" >> "$GITHUB_OUTPUT"
- name: Restore
run: dotnet restore LiteDB.sln
@@ -38,23 +59,30 @@ jobs:
timeout-minutes: 5
run: dotnet test LiteDB.sln --configuration Release --verbosity normal --settings tests.runsettings --logger "trx;LogFileName=TestResults.trx" --logger "console;verbosity=detailed" /p:DefineConstants=TESTING
+ - name: Upload test results
+ uses: actions/upload-artifact@v4
+ if: always()
+ with:
+ name: test-results
+ path: "**/*TestResults*.trx"
+
+ - name: Upload hang dumps (if any)
+ uses: actions/upload-artifact@v4
+ if: always()
+ with:
+ name: hangdumps
+ path: |
+ hangdumps
+ **/TestResults/**/*.dmp
+ if-no-files-found: ignore
+
- name: Build
- run: dotnet build LiteDB.sln --configuration Release --no-restore
+ if: success()
+ run: dotnet build LiteDB/LiteDB.csproj --configuration Release --no-restore /p:ContinuousIntegrationBuild=true
- name: Pack
- run: |
- dotnet pack LiteDB/LiteDB.csproj --configuration Release --no-build -o artifacts -p:PackageVersion=${{ steps.version.outputs.package_version }} -p:Version=${{ steps.version.outputs.package_version }}
-
- - name: Publish GitHub prerelease
- uses: softprops/action-gh-release@v2
- with:
- tag_name: v${{ steps.version.outputs.package_version }}
- name: LiteDB ${{ steps.version.outputs.package_version }}
- generate_release_notes: true
- prerelease: true
- files: artifacts/*.nupkg
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ if: success()
+ run: dotnet pack LiteDB/LiteDB.csproj --configuration Release --no-build -o artifacts /p:ContinuousIntegrationBuild=true
- name: Retrieve secrets from Bitwarden
uses: bitwarden/sm-action@v2
@@ -65,7 +93,9 @@ jobs:
265b2fb6-2cf0-4859-9bc8-b24c00ab4378 > NUGET_API_KEY
- name: Push package to NuGet
+ if: success()
+ env:
+ PACKAGE_VERSION: ${{ steps.gitversion.outputs.nugetVersion }}
run: |
+ echo "Pushing LiteDB version $PACKAGE_VERSION"
dotnet nuget push "artifacts/*.nupkg" --api-key "$NUGET_API_KEY" --source https://api.nuget.org/v3/index.json --skip-duplicate
-
-
diff --git a/.github/workflows/publish-release.yml b/.github/workflows/publish-release.yml
index baf394b85..d6d61310d 100644
--- a/.github/workflows/publish-release.yml
+++ b/.github/workflows/publish-release.yml
@@ -1,15 +1,23 @@
name: Publish release
on:
- push:
- branches:
- - main
- tags:
- - v*
+ workflow_dispatch:
+ inputs:
+ ref:
+ description: Branch, tag, or SHA to release from
+ default: master
+ required: true
+ publish_nuget:
+ description: Push packages to NuGet
+ type: boolean
+ default: false
+ publish_github:
+ description: Create or update GitHub release
+ type: boolean
+ default: false
jobs:
publish:
- if: startsWith(github.ref, 'refs/tags/v')
runs-on: ubuntu-latest
permissions:
contents: write
@@ -19,36 +27,58 @@ jobs:
uses: actions/checkout@v4
with:
fetch-depth: 0
+ ref: ${{ inputs.ref }}
- name: Set up .NET SDK
uses: actions/setup-dotnet@v4
with:
dotnet-version: 8.0.x
+ - name: Restore .NET tools
+ run: dotnet tool restore
+
+ - name: Compute semantic version
+ id: gitversion
+ shell: bash
+ run: |
+ set -euo pipefail
+ JSON=$(dotnet tool run dotnet-gitversion /output json)
+ echo "$JSON"
+
+ NUGET_VERSION=$(echo "$JSON" | jq -r '.NuGetVersion')
+ FULL_SEMVER=$(echo "$JSON" | jq -r '.FullSemVer')
+ SHORT_SHA=$(echo "$JSON" | jq -r '.ShortSha')
+ MAJOR_MINOR_PATCH=$(echo "$JSON" | jq -r '.MajorMinorPatch')
+ PR_LABEL=$(echo "$JSON" | jq -r '.PreReleaseLabel')
+ PR_NUMBER=$(echo "$JSON" | jq -r '.PreReleaseNumber')
+
+ if [[ "$PR_LABEL" != "" && "$PR_LABEL" != "null" ]]; then
+ printf -v PR_PADDED '%04d' "$PR_NUMBER"
+ RELEASE_VERSION_PADDED="${MAJOR_MINOR_PATCH}-${PR_LABEL}.${PR_PADDED}"
+ else
+ RELEASE_VERSION_PADDED="$MAJOR_MINOR_PATCH"
+ fi
+
+ echo "nugetVersion=$NUGET_VERSION" >> "$GITHUB_OUTPUT"
+ echo "fullSemVer=$FULL_SEMVER" >> "$GITHUB_OUTPUT"
+ echo "releaseVersionPadded=$RELEASE_VERSION_PADDED" >> "$GITHUB_OUTPUT"
+ echo "informational=${NUGET_VERSION}+${SHORT_SHA}" >> "$GITHUB_OUTPUT"
+
- name: Restore
run: dotnet restore LiteDB.sln
- name: Build
- run: dotnet build LiteDB.sln --configuration Release --no-restore
+ run: dotnet build LiteDB.sln --configuration Release --no-restore /p:ContinuousIntegrationBuild=true
- name: Test
- timeout-minutes: 5
+ timeout-minutes: 8
run: dotnet test LiteDB.sln --configuration Release --no-build --verbosity normal --settings tests.runsettings --logger "trx;LogFileName=TestResults.trx" --logger "console;verbosity=detailed"
- name: Pack
- run: |
- dotnet pack LiteDB/LiteDB.csproj --configuration Release --no-build -o artifacts
-
- - name: Capture package version
- id: version
- run: |
- PACKAGE_PATH=$(ls artifacts/LiteDB.*.nupkg | head -n 1)
- PACKAGE_FILENAME=$(basename "$PACKAGE_PATH")
- PACKAGE_VERSION=${PACKAGE_FILENAME#LiteDB.}
- PACKAGE_VERSION=${PACKAGE_VERSION%.nupkg}
- echo "package_version=${PACKAGE_VERSION}" >> "$GITHUB_OUTPUT"
+ run: dotnet pack LiteDB/LiteDB.csproj --configuration Release --no-build -o artifacts /p:ContinuousIntegrationBuild=true
- name: Retrieve secrets from Bitwarden
+ if: ${{ inputs.publish_nuget }}
uses: bitwarden/sm-action@v2
with:
access_token: ${{ secrets.BW_ACCESS_TOKEN }}
@@ -57,15 +87,21 @@ jobs:
265b2fb6-2cf0-4859-9bc8-b24c00ab4378 > NUGET_API_KEY
- name: Push package to NuGet
+ if: ${{ inputs.publish_nuget }}
+ env:
+ PACKAGE_VERSION: ${{ steps.gitversion.outputs.nugetVersion }}
run: |
+ echo "Pushing LiteDB version $PACKAGE_VERSION"
dotnet nuget push "artifacts/*.nupkg" --api-key "$NUGET_API_KEY" --source https://api.nuget.org/v3/index.json --skip-duplicate
- name: Publish GitHub release
+ if: ${{ inputs.publish_github }}
uses: softprops/action-gh-release@v2
with:
- tag_name: v${{ steps.version.outputs.package_version }}
- name: LiteDB ${{ steps.version.outputs.package_version }}
+ tag_name: v${{ steps.gitversion.outputs.releaseVersionPadded }}
+ name: LiteDB ${{ steps.gitversion.outputs.releaseVersionPadded }}
generate_release_notes: true
files: artifacts/*.nupkg
+ target_commitish: ${{ github.sha }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/tag-version.yml b/.github/workflows/tag-version.yml
new file mode 100644
index 000000000..651cfbeb9
--- /dev/null
+++ b/.github/workflows/tag-version.yml
@@ -0,0 +1,125 @@
+name: Tag version
+
+on:
+ workflow_dispatch:
+ inputs:
+ ref:
+ description: Branch or SHA to tag
+ default: master
+ required: true
+ bump:
+ description: Version component to increment
+ type: choice
+ options:
+ - patch
+ - minor
+ - major
+ default: patch
+
+jobs:
+ create-tag:
+ runs-on: ubuntu-latest
+ permissions:
+ contents: write
+
+ steps:
+ - name: Check out repository
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+ ref: ${{ inputs.ref }}
+
+ - name: Set up .NET SDK
+ uses: actions/setup-dotnet@v4
+ with:
+ dotnet-version: 8.0.x
+
+ - name: Restore .NET tools
+ run: dotnet tool restore
+
+ - name: Determine current version
+ id: gitversion
+ shell: bash
+ run: |
+ set -euo pipefail
+ JSON=$(dotnet tool run dotnet-gitversion /output json)
+ echo "$JSON"
+
+ MAJOR_MINOR_PATCH=$(echo "$JSON" | jq -r '.MajorMinorPatch')
+ PR_LABEL=$(echo "$JSON" | jq -r '.PreReleaseLabel')
+ PR_NUMBER=$(echo "$JSON" | jq -r '.PreReleaseNumber')
+
+ if [[ "$PR_LABEL" != "" && "$PR_LABEL" != "null" ]]; then
+ printf -v PR_PADDED '%04d' "$PR_NUMBER"
+ SEMVER="${MAJOR_MINOR_PATCH}-${PR_LABEL}.${PR_PADDED}"
+ else
+ SEMVER="$MAJOR_MINOR_PATCH"
+ fi
+
+ echo "fullSemVer=$SEMVER" >> "$GITHUB_OUTPUT"
+ echo "major=$(echo "$JSON" | jq -r '.Major')" >> "$GITHUB_OUTPUT"
+ echo "minor=$(echo "$JSON" | jq -r '.Minor')" >> "$GITHUB_OUTPUT"
+ echo "patch=$(echo "$JSON" | jq -r '.Patch')" >> "$GITHUB_OUTPUT"
+
+ - name: Calculate next version
+ id: next
+ shell: bash
+ env:
+ BUMP: ${{ inputs.bump }}
+ MAJOR: ${{ steps.gitversion.outputs.major }}
+ MINOR: ${{ steps.gitversion.outputs.minor }}
+ PATCH: ${{ steps.gitversion.outputs.patch }}
+ run: |
+ set -euo pipefail
+ major=$MAJOR
+ minor=$MINOR
+ patch=$PATCH
+
+ case "$BUMP" in
+ major)
+ major=$((major + 1))
+ minor=0
+ patch=0
+ ;;
+ minor)
+ minor=$((minor + 1))
+ patch=0
+ ;;
+ patch)
+ patch=$((patch + 1))
+ ;;
+ esac
+
+ target="${major}.${minor}.${patch}"
+ echo "target=$target" >> "$GITHUB_OUTPUT"
+
+ - name: Configure git
+ run: |
+ git config user.name "github-actions[bot]"
+ git config user.email "github-actions[bot]@users.noreply.github.com"
+
+ - name: Create annotated tag
+ env:
+ TARGET: ${{ steps.next.outputs.target }}
+ run: |
+ set -euo pipefail
+ if git rev-parse -q --verify "refs/tags/v${TARGET}" >/dev/null; then
+ echo "Tag v${TARGET} already exists" >&2
+ exit 1
+ fi
+
+ git tag -a "v${TARGET}" -m "Release tag v${TARGET}"
+
+ - name: Push tag
+ env:
+ TARGET: ${{ steps.next.outputs.target }}
+ run: |
+ set -euo pipefail
+ git push origin "v${TARGET}"
+
+ - name: Summary
+ if: always()
+ env:
+ TARGET: ${{ steps.next.outputs.target }}
+ FULL: ${{ steps.gitversion.outputs.fullSemVer }}
+ run: echo "Created tag v${TARGET} (previous build was ${FULL})"
diff --git a/Directory.Build.props b/Directory.Build.props
index dbc5e80b9..06f762a7d 100644
--- a/Directory.Build.props
+++ b/Directory.Build.props
@@ -1,12 +1,21 @@
- v
-
- prerelease
- 6.0
+ true
+ false
+
+
+
+ <_GitVersionPreReleaseNumberPadded Condition="'$(GitVersion_PreReleaseNumber)' != ''">$([System.String]::Format("{0:0000}", $(GitVersion_PreReleaseNumber)))
+ <_GitVersionCalculatedSemVer Condition="'$(GitVersion_PreReleaseLabel)' != ''">$(GitVersion_MajorMinorPatch)-$(GitVersion_PreReleaseLabel).$(_GitVersionPreReleaseNumberPadded)
+ <_GitVersionCalculatedSemVer Condition="'$(GitVersion_PreReleaseLabel)' == ''">$(GitVersion_MajorMinorPatch)
+ $(_GitVersionCalculatedSemVer)
+ $(_GitVersionCalculatedSemVer)
+ $(GitVersion_AssemblySemVer)
+ $(GitVersion_AssemblySemFileVer)
+ $(_GitVersionCalculatedSemVer)+$(GitVersion_ShortSha)
-
+
diff --git a/GitVersion.yml b/GitVersion.yml
new file mode 100644
index 000000000..b532d865a
--- /dev/null
+++ b/GitVersion.yml
@@ -0,0 +1,39 @@
+branches:
+ main:
+ regex: ^master$|^main$
+ increment: Patch
+ label: ''
+ prevent-increment:
+ of-merged-branch: true
+ develop:
+ regex: ^dev(elop)?(ment)?$
+ increment: Patch
+ label: prerelease
+ source-branches:
+ - main
+ prevent-increment:
+ of-merged-branch: true
+ feature:
+ regex: ^(feature|bugfix|chore|refactor)/.*$
+ increment: Inherit
+ label: ''
+ source-branches:
+ - develop
+ - main
+ pull-request:
+ regex: ^(pull|pr)/.*$
+ increment: Inherit
+ label: pr
+ source-branches:
+ - develop
+ - main
+ release:
+ regex: ^release/.*$
+ increment: Patch
+ label: rc
+ source-branches:
+ - develop
+ - main
+commit-message-incrementing: Enabled
+commit-date-format: yyyy-MM-dd
+tag-prefix: 'v'
diff --git a/docs/versioning.md b/docs/versioning.md
new file mode 100644
index 000000000..fc94972b8
--- /dev/null
+++ b/docs/versioning.md
@@ -0,0 +1,54 @@
+# Versioning
+
+LiteDB uses GitVersion for semantic versioning across local builds and CI. The configuration lives in `GitVersion.yml` and is consumed by both MSBuild (via `GitVersion.MsBuild`) and the GitHub workflows.
+
+## Branch semantics
+
+- `master` is the mainline branch. Each direct commit or merge increments the patch number unless an annotated `v*` tag (or `+semver:` directive) requests a larger bump.
+- `dev` tracks the next patch version and produces prerelease builds like `6.0.1-prerelease.0003`. The numeric suffix is zero-padded for predictable ordering.
+- Feature branches (`feature/*`, `bugfix/*`, `chore/*`, `refactor/*`, `pr/*`) inherit their base version but do not publish artifacts. They exist purely for validation.
+
+The first prerelease that precedes the 6.0.0 release (commit `a0298891ddcaf7ba48c679f1052a6f442f6c094f`) remains the baseline for the prerelease numbering history.
+
+## GitHub workflows
+
+- `publish-prerelease.yml` runs on every push to `dev`. It resolves the semantic version with GitVersion, runs the full test suite, packs the library, and pushes the resulting prerelease package to NuGet. GitHub releases are intentionally skipped for now.
+- `publish-release.yml` is manual (`workflow_dispatch`). It computes the release version and can optionally push to NuGet and/or create a GitHub release via boolean inputs. GitHub releases use a zero-padded prerelease counter for predictable sorting in the UI, while NuGet publishing keeps the standard GitVersion output. By default it performs a dry run (build + pack only) so we keep the publishing path disabled until explicitly requested.
+- `tag-version.yml` lets you start a manual major/minor/patch bump. It tags the specified ref (defaults to `master`) with the next `v*` version so future builds pick up the new baseline. Use this after validating a release candidate.
+
+## Dry-running versions
+
+GitVersion is registered as a local dotnet tool. Restore the tool once (`dotnet tool restore`) and use one of the helpers:
+
+```powershell
+# PowerShell (Windows, macOS, Linux)
+./scripts/gitver/gitversion.ps1 # show version for HEAD
+./scripts/gitver/gitversion.ps1 dev~3 # inspect an arbitrary commit
+./scripts/gitver/gitversion.ps1 -Json # emit raw JSON
+```
+
+```bash
+# Bash (macOS, Linux, Git Bash on Windows)
+./scripts/gitver/gitversion.sh # show version for HEAD
+./scripts/gitver/gitversion.sh dev~3 # inspect an arbitrary commit
+./scripts/gitver/gitversion.sh --json # emit raw JSON
+```
+
+Both scripts resolve the git ref to a SHA, execute GitVersion with the repository configuration, and echo the key fields (FullSemVer, NuGetVersion, InformationalVersion, BranchName).
+
+## Manual bumps
+
+1. Merge the desired changes into `master`.
+2. Run the **Tag version** workflow from the Actions tab, pick `master`, and choose `patch`, `minor`, or `major`.
+3. The workflow creates and pushes the annotated `v*` tag. The next prerelease build from `dev` will increment accordingly, and the next stable run from `master` will match the tagged version.
+
+`+semver:` commit messages are still honoured. For example, including `+semver: minor` in a commit on `master` advances the minor version even without a tag.
+
+## Working locally
+
+- `dotnet build` / `dotnet pack` automatically consume the GitVersion-generated values; no extra parameters are required.
+- To bypass GitVersion temporarily (e.g., for experiments), set `GitVersion_NoFetch=false` in the build command. Reverting the property restores normal behaviour.
+- When you are ready to publish a prerelease, push to `dev` and let the workflow take care of packing and nuget push.
+
+For historical reference, the `v6.0.0-prerelease.0001` tag remains anchored to commit `a0298891ddcaf7ba48c679f1052a6f442f6c094f`, ensuring version ordering continues correctly from the original timeline.
+
diff --git a/scripts/gitver/gitversion.ps1 b/scripts/gitver/gitversion.ps1
new file mode 100644
index 000000000..c28317b6d
--- /dev/null
+++ b/scripts/gitver/gitversion.ps1
@@ -0,0 +1,101 @@
+[CmdletBinding()]
+param(
+ [Parameter(Position = 0)]
+ [string]$Ref = 'HEAD',
+ [string]$Branch,
+ [switch]$Json,
+ [switch]$NoRestore
+)
+
+$ErrorActionPreference = 'Stop'
+
+$repoRoot = Resolve-Path -LiteralPath (Join-Path $PSScriptRoot '..' '..')
+$manifestPath = Join-Path $repoRoot '.config/dotnet-tools.json'
+$gitVersionConfig = Join-Path $repoRoot 'GitVersion.yml'
+
+if (-not (Test-Path $manifestPath)) {
+ throw "Tool manifest not found at $manifestPath"
+}
+
+if (-not (Test-Path $gitVersionConfig)) {
+ throw "GitVersion configuration not found at $gitVersionConfig"
+}
+
+$sha = ((& git -C $repoRoot rev-parse --verify --quiet "$Ref").Trim())
+if (-not $sha) {
+ throw "Unable to resolve git ref '$Ref'"
+}
+
+if (-not $Branch) {
+ $candidates = (& git -C $repoRoot branch --contains $sha) | ForEach-Object {
+ ($_ -replace '^[*+\s]+', '').Trim()
+ } | Where-Object { $_ }
+
+ foreach ($candidate in @('dev', 'develop', 'master', 'main')) {
+ if ($candidates -contains $candidate) {
+ $Branch = $candidate
+ break
+ }
+ }
+
+ if (-not $Branch -and $candidates) {
+ $Branch = $candidates[0]
+ }
+}
+
+if (-not $Branch) {
+ $Branch = "gv-temp-" + $sha.Substring(0, 7)
+}
+
+$tempRoot = Join-Path ([System.IO.Path]::GetTempPath()) ("litedb-gv-" + [Guid]::NewGuid().ToString('N'))
+
+try {
+ git clone --quiet --local --no-hardlinks "$repoRoot" "$tempRoot"
+
+ Push-Location -LiteralPath $tempRoot
+
+ git checkout -B "$Branch" "$sha" | Out-Null
+
+ if (-not (Test-Path '.config')) {
+ New-Item -ItemType Directory -Path '.config' | Out-Null
+ }
+
+ Copy-Item -Path $manifestPath -Destination '.config/dotnet-tools.json' -Force
+ Copy-Item -Path $gitVersionConfig -Destination 'GitVersion.yml' -Force
+
+ if (-not $NoRestore) {
+ dotnet tool restore | Out-Null
+ }
+
+ $jsonText = dotnet tool run dotnet-gitversion /output json
+
+ if ($LASTEXITCODE -ne 0) {
+ throw 'dotnet-gitversion failed'
+ }
+
+ if ($Json) {
+ $jsonText
+ return
+ }
+
+ $data = $jsonText | ConvertFrom-Json
+
+ $semVer = $data.MajorMinorPatch
+ if ([string]::IsNullOrEmpty($data.PreReleaseLabel) -eq $false) {
+ $preNumber = [int]$data.PreReleaseNumber
+ $semVer = '{0}-{1}.{2:0000}' -f $data.MajorMinorPatch, $data.PreReleaseLabel, $preNumber
+ }
+
+ $line = '{0,-22} {1}'
+ Write-Host ($line -f 'Resolved SHA:', $sha)
+ Write-Host ($line -f 'FullSemVer:', $semVer)
+ Write-Host ($line -f 'NuGetVersion:', $semVer)
+ Write-Host ($line -f 'Informational:', "$semVer+$($data.ShortSha)")
+ Write-Host ($line -f 'BranchName:', $data.BranchName)
+}
+finally {
+ Pop-Location -ErrorAction SilentlyContinue
+ if (Test-Path $tempRoot) {
+ Remove-Item -Recurse -Force $tempRoot
+ }
+}
diff --git a/scripts/gitver/gitversion.sh b/scripts/gitver/gitversion.sh
new file mode 100644
index 000000000..4b434a73e
--- /dev/null
+++ b/scripts/gitver/gitversion.sh
@@ -0,0 +1,77 @@
+#!/usr/bin/env bash
+set -euo pipefail
+
+ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
+REF="HEAD"
+JSON_OUTPUT=false
+NO_RESTORE=false
+WORKTREE=""
+TARGET="$ROOT"
+
+while [[ $# -gt 0 ]]; do
+ case "$1" in
+ --json)
+ JSON_OUTPUT=true
+ shift
+ ;;
+ --no-restore)
+ NO_RESTORE=true
+ shift
+ ;;
+ *)
+ REF="$1"
+ shift
+ ;;
+ esac
+done
+
+cleanup() {
+ if [[ -n "$WORKTREE" && -d "$WORKTREE" ]]; then
+ git -C "$ROOT" worktree remove --force "$WORKTREE" >/dev/null 2>&1 || true
+ fi
+}
+trap cleanup EXIT
+
+if [[ "$REF" != "HEAD" ]]; then
+ SHA=$(git -C "$ROOT" rev-parse --verify --quiet "$REF")
+ if [[ -z "$SHA" ]]; then
+ echo "Unable to resolve git ref '$REF'" >&2
+ exit 1
+ fi
+ WORKTREE="$(mktemp -d -t litedb-gv-XXXXXX)"
+ git -C "$ROOT" worktree add --detach "$WORKTREE" "$SHA" >/dev/null
+ TARGET="$WORKTREE"
+else
+ SHA=$(git -C "$ROOT" rev-parse HEAD)
+fi
+
+cd "$TARGET"
+
+if [[ "$NO_RESTORE" != "true" ]]; then
+ dotnet tool restore >/dev/null
+fi
+
+JSON=$(dotnet tool run dotnet-gitversion /output json)
+
+if [[ "$JSON_OUTPUT" == "true" ]]; then
+ printf '%s\n' "$JSON"
+ exit 0
+fi
+
+MAJOR_MINOR_PATCH=$(jq -r '.MajorMinorPatch' <<<"$JSON")
+PRE_LABEL=$(jq -r '.PreReleaseLabel' <<<"$JSON")
+PRE_NUMBER=$(jq -r '.PreReleaseNumber' <<<"$JSON")
+SHORT_SHA=$(jq -r '.ShortSha' <<<"$JSON")
+BRANCH=$(jq -r '.BranchName' <<<"$JSON")
+
+SEMVER="$MAJOR_MINOR_PATCH"
+if [[ "$PRE_LABEL" != "" && "$PRE_LABEL" != "null" ]]; then
+ printf -v PRE_PADDED '%04d' "$PRE_NUMBER"
+ SEMVER="${MAJOR_MINOR_PATCH}-${PRE_LABEL}.${PRE_PADDED}"
+fi
+
+printf '%-22s %s\n' "Resolved SHA:" "$SHA"
+printf '%-22s %s\n' "FullSemVer:" "$SEMVER"
+printf '%-22s %s\n' "NuGetVersion:" "$SEMVER"
+printf '%-22s %s\n' "Informational:" "${SEMVER}+${SHORT_SHA}"
+printf '%-22s %s\n' "BranchName:" "$BRANCH"
\ No newline at end of file
From c32a197a603eca09a643725db9c3532485a5621b Mon Sep 17 00:00:00 2001
From: JKamsker <11245306+JKamsker@users.noreply.github.com>
Date: Sun, 28 Sep 2025 01:18:31 +0200
Subject: [PATCH 31/53] Fix stuck unit tests
---
LiteDB.Stress/Test/TestExecution.cs | 10 +++-
LiteDB.Tests/Engine/Rebuild_Crash_Tests.cs | 51 +++++++++++--------
LiteDB.Tests/Engine/Recursion_Tests.cs | 1 +
LiteDB.Tests/Engine/Transactions_Tests.cs | 7 ++-
LiteDB.Tests/Issues/Issue2534_Tests.cs | 1 +
.../Shared/SharedDemoDatabaseCollection.cs | 43 ++++++++++++++++
tests.ci.runsettings | 18 +++++++
7 files changed, 105 insertions(+), 26 deletions(-)
create mode 100644 LiteDB.Tests/Shared/SharedDemoDatabaseCollection.cs
create mode 100644 tests.ci.runsettings
diff --git a/LiteDB.Stress/Test/TestExecution.cs b/LiteDB.Stress/Test/TestExecution.cs
index e2ad943c6..3af18f4a9 100644
--- a/LiteDB.Stress/Test/TestExecution.cs
+++ b/LiteDB.Stress/Test/TestExecution.cs
@@ -47,7 +47,10 @@ public void Execute()
this.CreateThreads();
// start report thread
- var t = new Thread(() => this.ReportThread());
+ var t = new Thread(() => this.ReportThread())
+ {
+ IsBackground = true
+ };
t.Name = "REPORT";
t.Start();
}
@@ -100,7 +103,10 @@ private void CreateThreads()
info.Counter++;
info.LastRun = DateTime.Now;
}
- });
+ })
+ {
+ IsBackground = true
+ };
_threads[thread.ManagedThreadId] = new ThreadInfo
{
diff --git a/LiteDB.Tests/Engine/Rebuild_Crash_Tests.cs b/LiteDB.Tests/Engine/Rebuild_Crash_Tests.cs
index 138d9b402..c5f7ea8af 100644
--- a/LiteDB.Tests/Engine/Rebuild_Crash_Tests.cs
+++ b/LiteDB.Tests/Engine/Rebuild_Crash_Tests.cs
@@ -1,14 +1,15 @@
-using FluentAssertions;
+using FluentAssertions;
using LiteDB.Engine;
using System;
using System.IO;
using System.Linq;
+using System.Threading;
using System.Threading.Tasks;
using Xunit;
using Xunit.Abstractions;
-#if DEBUG || TESTING
+#if DEBUG
namespace LiteDB.Tests.Engine
{
public class Rebuild_Crash_Tests
@@ -20,16 +21,13 @@ public Rebuild_Crash_Tests(ITestOutputHelper output)
_output = output;
}
- [Fact(Timeout = 30000)]
- public async Task Rebuild_Crash_IO_Write_Error()
+ [Fact]
+ public void Rebuild_Crash_IO_Write_Error()
{
- var testName = nameof(Rebuild_Crash_IO_Write_Error);
-
- _output.WriteLine($"starting {testName}");
-
+ _output.WriteLine("Running Rebuild_Crash_IO_Write_Error");
try
{
- var N = 1000;
+ var N = 1_000;
using (var file = new TempFile())
{
@@ -40,31 +38,40 @@ public async Task Rebuild_Crash_IO_Write_Error()
Password = "46jLz5QWd5fI3m4LiL2r"
};
- var initial = new DateTime(2024, 1, 1);
-
var data = Enumerable.Range(1, N).Select(i => new BsonDocument
{
["_id"] = i,
- ["name"] = $"user-{i:D4}",
- ["age"] = 18 + (i % 60),
- ["created"] = initial.AddDays(i),
- ["lorem"] = new string((char)('a' + (i % 26)), 800)
+ ["name"] = Faker.Fullname(),
+ ["age"] = Faker.Age(),
+ ["created"] = Faker.Birthday(),
+ ["lorem"] = Faker.Lorem(5, 25)
}).ToArray();
+ var faultInjected = 0;
+
try
{
using (var db = new LiteEngine(settings))
{
+ var writeHits = 0;
+
db.SimulateDiskWriteFail = (page) =>
{
var p = new BasePage(page);
- if (p.PageID == 28)
+ if (p.PageType == PageType.Data && p.ColID == 1)
{
- p.ColID.Should().Be(1);
- p.PageType.Should().Be(PageType.Data);
+ var hit = Interlocked.Increment(ref writeHits);
- page.Write((uint)123123123, 8192 - 4);
+ if (hit == 10)
+ {
+ p.PageType.Should().Be(PageType.Data);
+ p.ColID.Should().Be(1);
+
+ page.Write((uint)123123123, 8192 - 4);
+
+ Interlocked.Exchange(ref faultInjected, 1);
+ }
}
};
@@ -86,6 +93,8 @@ public async Task Rebuild_Crash_IO_Write_Error()
}
catch (Exception ex)
{
+ faultInjected.Should().Be(1, "the simulated disk write fault should have triggered");
+
Assert.True(ex is LiteException lex && lex.ErrorCode == 999);
}
@@ -103,12 +112,10 @@ public async Task Rebuild_Crash_IO_Write_Error()
}
}
-
- await Task.CompletedTask;
}
finally
{
- _output.WriteLine($"{testName} completed");
+ _output.WriteLine("Finished running Rebuild_Crash_IO_Write_Error");
}
}
}
diff --git a/LiteDB.Tests/Engine/Recursion_Tests.cs b/LiteDB.Tests/Engine/Recursion_Tests.cs
index 4a9fdb3c7..f4fa85d78 100644
--- a/LiteDB.Tests/Engine/Recursion_Tests.cs
+++ b/LiteDB.Tests/Engine/Recursion_Tests.cs
@@ -3,6 +3,7 @@
namespace LiteDB.Tests.Engine;
+[Collection("SharedDemoDatabase")]
public class Recursion_Tests
{
[Fact]
diff --git a/LiteDB.Tests/Engine/Transactions_Tests.cs b/LiteDB.Tests/Engine/Transactions_Tests.cs
index 31375e952..94b39dfde 100644
--- a/LiteDB.Tests/Engine/Transactions_Tests.cs
+++ b/LiteDB.Tests/Engine/Transactions_Tests.cs
@@ -370,7 +370,10 @@ public void Test_Transaction_ReleaseWhenFailToStart()
blockingStream.ShouldBlock = true;
db.Checkpoint();
db.Dispose();
- });
+ })
+ {
+ IsBackground = true
+ };
lockerThread.Start();
blockingStream.Blocked.WaitOne(200).Should().BeTrue();
Assert.Throws(() => db.GetCollection().Insert(new Person())).Message.Should().Contain("timeout");
@@ -418,4 +421,4 @@ private static void SetEngineTimeout(LiteDatabase database, TimeSpan timeout)
setter.Invoke(pragmas, new object[] { timeout });
}
}
-}
\ No newline at end of file
+}
diff --git a/LiteDB.Tests/Issues/Issue2534_Tests.cs b/LiteDB.Tests/Issues/Issue2534_Tests.cs
index 725a276d3..7abd9e88a 100644
--- a/LiteDB.Tests/Issues/Issue2534_Tests.cs
+++ b/LiteDB.Tests/Issues/Issue2534_Tests.cs
@@ -2,6 +2,7 @@
namespace LiteDB.Tests.Issues;
+[Collection("SharedDemoDatabase")]
public class Issue2534_Tests
{
[Fact]
diff --git a/LiteDB.Tests/Shared/SharedDemoDatabaseCollection.cs b/LiteDB.Tests/Shared/SharedDemoDatabaseCollection.cs
new file mode 100644
index 000000000..1112ce922
--- /dev/null
+++ b/LiteDB.Tests/Shared/SharedDemoDatabaseCollection.cs
@@ -0,0 +1,43 @@
+namespace LiteDB.Tests;
+
+using System;
+using System.IO;
+using Xunit;
+
+[CollectionDefinition("SharedDemoDatabase", DisableParallelization = true)]
+public sealed class SharedDemoDatabaseCollection : ICollectionFixture
+{
+}
+
+public sealed class SharedDemoDatabaseFixture : IDisposable
+{
+ private readonly string _filename;
+
+ public SharedDemoDatabaseFixture()
+ {
+ _filename = Path.GetFullPath("Demo.db");
+ TryDeleteFile();
+ }
+
+ public void Dispose()
+ {
+ TryDeleteFile();
+ }
+
+ private void TryDeleteFile()
+ {
+ try
+ {
+ if (File.Exists(_filename))
+ {
+ File.Delete(_filename);
+ }
+ }
+ catch (IOException)
+ {
+ }
+ catch (UnauthorizedAccessException)
+ {
+ }
+ }
+}
diff --git a/tests.ci.runsettings b/tests.ci.runsettings
new file mode 100644
index 000000000..b730669db
--- /dev/null
+++ b/tests.ci.runsettings
@@ -0,0 +1,18 @@
+
+
+
+
+
+ 180000
+ 60000
+
+
+ 1
+ true
+
+
+
+
+ false
+
+
\ No newline at end of file
From bf3987fbc4e2f5859b56d884a3e7d4923f54214b Mon Sep 17 00:00:00 2001
From: Jonas Kamsker <11245306+JKamsker@users.noreply.github.com>
Date: Sun, 28 Sep 2025 01:24:36 +0200
Subject: [PATCH 32/53] Ensure external streams checkpoint on commit (#2652)
* Fix checkpoint persistence for external streams
* Restore checkpoint pragma after stream sessions
---
.../Issues/IssueCheckpointFlush_Tests.cs | 104 ++++++++++++++++++
LiteDB/Client/Database/LiteDatabase.cs | 27 +++++
LiteDB/Engine/Engine/Transaction.cs | 4 +-
3 files changed, 133 insertions(+), 2 deletions(-)
create mode 100644 LiteDB.Tests/Issues/IssueCheckpointFlush_Tests.cs
diff --git a/LiteDB.Tests/Issues/IssueCheckpointFlush_Tests.cs b/LiteDB.Tests/Issues/IssueCheckpointFlush_Tests.cs
new file mode 100644
index 000000000..e8e66b8b2
--- /dev/null
+++ b/LiteDB.Tests/Issues/IssueCheckpointFlush_Tests.cs
@@ -0,0 +1,104 @@
+using System;
+using System.IO;
+using FluentAssertions;
+using LiteDB;
+using LiteDB.Tests;
+using Xunit;
+
+namespace LiteDB.Tests.Issues
+{
+ public class IssueCheckpointFlush_Tests
+ {
+ private class Entity
+ {
+ public int Id { get; set; }
+
+ public string Value { get; set; } = string.Empty;
+ }
+
+ [Fact]
+ public void CommittedChangesAreLostWhenClosingExternalStreamWithoutCheckpoint()
+ {
+ using var tempFile = new TempFile();
+
+ using (var createStream = new FileStream(tempFile.Filename, FileMode.Create, FileAccess.ReadWrite, FileShare.ReadWrite))
+ {
+ using var createDb = new LiteDatabase(createStream);
+ var collection = createDb.GetCollection("entities");
+
+ collection.Upsert(new Entity { Id = 1, Value = "initial" });
+
+ createDb.Commit();
+ createStream.Flush(true);
+ }
+
+ var updateStream = new FileStream(tempFile.Filename, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite);
+ var updateDb = new LiteDatabase(updateStream);
+ var updateCollection = updateDb.GetCollection("entities");
+
+ updateCollection.Upsert(new Entity { Id = 1, Value = "updated" });
+
+ updateDb.Commit();
+ updateStream.Flush(true);
+ updateStream.Dispose();
+ updateDb = null;
+
+ GC.Collect();
+ GC.WaitForPendingFinalizers();
+
+ using (var verifyStream = new FileStream(tempFile.Filename, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite))
+ using (var verifyDb = new LiteDatabase(verifyStream))
+ {
+ var document = verifyDb.GetCollection("entities").FindById(1);
+
+ document.Should().NotBeNull();
+ document!.Value.Should().Be("updated");
+ }
+ }
+
+ [Fact]
+ public void StreamConstructorRestoresCheckpointSizeAfterDisposal()
+ {
+ using var tempFile = new TempFile();
+
+ using (var fileDb = new LiteDatabase(tempFile.Filename))
+ {
+ fileDb.CheckpointSize.Should().Be(1000);
+ }
+
+ using (var stream = new FileStream(tempFile.Filename, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite))
+ using (var streamDb = new LiteDatabase(stream))
+ {
+ streamDb.CheckpointSize.Should().Be(1);
+ }
+
+ using (var reopened = new LiteDatabase(tempFile.Filename))
+ {
+ reopened.CheckpointSize.Should().Be(1000);
+ }
+ }
+
+ [Fact]
+ public void StreamConstructorAllowsReadOnlyStreams()
+ {
+ using var tempFile = new TempFile();
+
+ using (var setup = new LiteDatabase(tempFile.Filename))
+ {
+ var collection = setup.GetCollection("entities");
+
+ collection.Insert(new Entity { Id = 1, Value = "initial" });
+
+ setup.Checkpoint();
+ }
+
+ using var readOnlyStream = new FileStream(tempFile.Filename, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
+ using var readOnlyDb = new LiteDatabase(readOnlyStream);
+
+ var document = readOnlyDb.GetCollection("entities").FindById(1);
+
+ document.Should().NotBeNull();
+ document!.Value.Should().Be("initial");
+ }
+ }
+}
diff --git a/LiteDB/Client/Database/LiteDatabase.cs b/LiteDB/Client/Database/LiteDatabase.cs
index 812ae629c..d301d54da 100644
--- a/LiteDB/Client/Database/LiteDatabase.cs
+++ b/LiteDB/Client/Database/LiteDatabase.cs
@@ -19,6 +19,7 @@ public partial class LiteDatabase : ILiteDatabase
private readonly ILiteEngine _engine;
private readonly BsonMapper _mapper;
private readonly bool _disposeOnClose;
+ private readonly int? _checkpointOverride;
///
/// Get current instance of BsonMapper used in this database instance (can be BsonMapper.Global)
@@ -66,6 +67,27 @@ public LiteDatabase(Stream stream, BsonMapper mapper = null, Stream logStream =
_engine = new LiteEngine(settings);
_mapper = mapper ?? BsonMapper.Global;
_disposeOnClose = true;
+
+ if (logStream == null && stream is not MemoryStream)
+ {
+ if (!stream.CanWrite)
+ {
+ // Read-only streams cannot participate in eager checkpointing because the process
+ // writes pages back to the underlying data stream immediately.
+ }
+ else
+ {
+ // Without a dedicated log stream the WAL lives purely in memory; force
+ // checkpointing to ensure commits reach the underlying data stream.
+ var originalCheckpointSize = _engine.Pragma(Pragmas.CHECKPOINT);
+
+ if (originalCheckpointSize != 1)
+ {
+ _engine.Pragma(Pragmas.CHECKPOINT, 1);
+ _checkpointOverride = originalCheckpointSize;
+ }
+ }
+ }
}
///
@@ -373,6 +395,11 @@ protected virtual void Dispose(bool disposing)
{
if (disposing && _disposeOnClose)
{
+ if (_checkpointOverride.HasValue)
+ {
+ _engine.Pragma(Pragmas.CHECKPOINT, _checkpointOverride.Value);
+ }
+
_engine.Dispose();
}
}
diff --git a/LiteDB/Engine/Engine/Transaction.cs b/LiteDB/Engine/Engine/Transaction.cs
index 4db9f57b3..b88977069 100644
--- a/LiteDB/Engine/Engine/Transaction.cs
+++ b/LiteDB/Engine/Engine/Transaction.cs
@@ -112,8 +112,8 @@ private void CommitAndReleaseTransaction(TransactionService transaction)
_monitor.ReleaseTransaction(transaction);
// try checkpoint when finish transaction and log file are bigger than checkpoint pragma value (in pages)
- if (_header.Pragmas.Checkpoint > 0 &&
- _disk.GetFileLength(FileOrigin.Log) > (_header.Pragmas.Checkpoint * PAGE_SIZE))
+ if (_header.Pragmas.Checkpoint > 0 &&
+ _disk.GetFileLength(FileOrigin.Log) >= (_header.Pragmas.Checkpoint * PAGE_SIZE))
{
_walIndex.TryCheckpoint();
}
From 30624bca9b57850d45a353eac766916fc37b78d5 Mon Sep 17 00:00:00 2001
From: Jonas Kamsker <11245306+JKamsker@users.noreply.github.com>
Date: Sun, 28 Sep 2025 10:04:50 +0200
Subject: [PATCH 33/53] Add cleanup step for detached NuGet packages and
enhance GitVersion configuration
---
.github/workflows/publish-prerelease.yml | 5 +++++
.github/workflows/publish-release.yml | 5 +++++
Directory.Build.props | 18 ++++++++++++++++--
docs/versioning.md | 3 +++
4 files changed, 29 insertions(+), 2 deletions(-)
diff --git a/.github/workflows/publish-prerelease.yml b/.github/workflows/publish-prerelease.yml
index ffd7f99e6..17795175b 100644
--- a/.github/workflows/publish-prerelease.yml
+++ b/.github/workflows/publish-prerelease.yml
@@ -84,6 +84,11 @@ jobs:
if: success()
run: dotnet pack LiteDB/LiteDB.csproj --configuration Release --no-build -o artifacts /p:ContinuousIntegrationBuild=true
+ # if delete all nuget packages which are named ``0.0.0-detached.nupkg`` (leftover from detached builds)
+ - name: Clean up detached packages
+ run: |
+ find artifacts -name "0.0.0-detached.nupkg" -delete
+
- name: Retrieve secrets from Bitwarden
uses: bitwarden/sm-action@v2
with:
diff --git a/.github/workflows/publish-release.yml b/.github/workflows/publish-release.yml
index d6d61310d..3703a1210 100644
--- a/.github/workflows/publish-release.yml
+++ b/.github/workflows/publish-release.yml
@@ -77,6 +77,11 @@ jobs:
- name: Pack
run: dotnet pack LiteDB/LiteDB.csproj --configuration Release --no-build -o artifacts /p:ContinuousIntegrationBuild=true
+ # if delete all nuget packages which are named ``0.0.0-detached.nupkg`` (leftover from detached builds)
+ - name: Clean up detached packages
+ run: |
+ find artifacts -name "0.0.0-detached.nupkg" -delete
+
- name: Retrieve secrets from Bitwarden
if: ${{ inputs.publish_nuget }}
uses: bitwarden/sm-action@v2
diff --git a/Directory.Build.props b/Directory.Build.props
index 06f762a7d..ee57a7a76 100644
--- a/Directory.Build.props
+++ b/Directory.Build.props
@@ -1,10 +1,16 @@
+ <_RootGitHeadPath>$([System.IO.Path]::Combine('$(MSBuildThisFileDirectory)', '.git', 'HEAD'))
+ true
+ false
+
+
+
true
false
-
+
<_GitVersionPreReleaseNumberPadded Condition="'$(GitVersion_PreReleaseNumber)' != ''">$([System.String]::Format("{0:0000}", $(GitVersion_PreReleaseNumber)))
<_GitVersionCalculatedSemVer Condition="'$(GitVersion_PreReleaseLabel)' != ''">$(GitVersion_MajorMinorPatch)-$(GitVersion_PreReleaseLabel).$(_GitVersionPreReleaseNumberPadded)
<_GitVersionCalculatedSemVer Condition="'$(GitVersion_PreReleaseLabel)' == ''">$(GitVersion_MajorMinorPatch)
@@ -15,7 +21,15 @@
$(_GitVersionCalculatedSemVer)+$(GitVersion_ShortSha)
-
+
+ 0.0.0-detached
+ 0.0.0-detached
+ 0.0.0.0
+ 0.0.0.0
+ 0.0.0-detached
+
+
+
diff --git a/docs/versioning.md b/docs/versioning.md
index fc94972b8..661c1b600 100644
--- a/docs/versioning.md
+++ b/docs/versioning.md
@@ -2,6 +2,9 @@
LiteDB uses GitVersion for semantic versioning across local builds and CI. The configuration lives in `GitVersion.yml` and is consumed by both MSBuild (via `GitVersion.MsBuild`) and the GitHub workflows.
+> [!NOTE]
+> Environments that expose the repository as a detached worktree or otherwise hide the `.git/HEAD` sentinel (for example, some test harnesses) automatically fall back to a static `0.0.0-detached` version so builds can proceed without GitVersion.
+
## Branch semantics
- `master` is the mainline branch. Each direct commit or merge increments the patch number unless an annotated `v*` tag (or `+semver:` directive) requests a larger bump.
From d9d8a8d4d57f7d10c1f60f28a143b5a92d4f9c93 Mon Sep 17 00:00:00 2001
From: Jonas Kamsker <11245306+JKamsker@users.noreply.github.com>
Date: Sun, 28 Sep 2025 10:49:11 +0200
Subject: [PATCH 34/53] Disable GitVersion in shallow clones (#2656)
---
Directory.Build.props | 6 ++++--
1 file changed, 4 insertions(+), 2 deletions(-)
diff --git a/Directory.Build.props b/Directory.Build.props
index ee57a7a76..b8ded0683 100644
--- a/Directory.Build.props
+++ b/Directory.Build.props
@@ -1,7 +1,9 @@
- <_RootGitHeadPath>$([System.IO.Path]::Combine('$(MSBuildThisFileDirectory)', '.git', 'HEAD'))
- true
+ <_RootGitDirectory>$([System.IO.Path]::Combine('$(MSBuildThisFileDirectory)', '.git'))
+ <_RootGitHeadPath>$([System.IO.Path]::Combine('$(_RootGitDirectory)', 'HEAD'))
+ <_RootGitShallowPath>$([System.IO.Path]::Combine('$(_RootGitDirectory)', 'shallow'))
+ true
false
From 66ec615a1b03058a84e2841caf0a72b2a4a2ae1d Mon Sep 17 00:00:00 2001
From: Jonas Kamsker <11245306+JKamsker@users.noreply.github.com>
Date: Tue, 30 Sep 2025 09:58:42 +0200
Subject: [PATCH 35/53] Implement vectors (#2678)
Introduces vector search capabilities for AI/ML applications like semantic search and RAG.
- Adds a new `BsonVector` type (`float[]`) and an HNSW-inspired index for fast Approximate Nearest Neighbor (ANN) search.
- Supports Cosine, Euclidean, and Dot Product distance metrics.
- Exposes a new fluent query API via `TopKNear()` and `WhereNear()` extensions.
- Adds SQL/`BsonExpression` support with the `VECTOR_SIM()` function.
- Includes a new demo project (`LiteDB.Demo.Tools.VectorSearch`) for a complete, end-to-end example using Google Gemini embeddings.
---
.../Queries/QueryWithVectorSimilarity.cs | 76 +
LiteDB.Benchmarks/Models/FileMetaBase.cs | 2 +
.../Models/Generators/FileMetaGenerator.cs | 4 +-
.../Commands/IngestCommand.cs | 249 ++
.../Commands/SearchCommand.cs | 165 +
.../Commands/VectorSearchCommandSettings.cs | 125 +
.../Configuration/GeminiEmbeddingOptions.cs | 95 +
.../Embedding/GeminiEmbeddingService.cs | 190 +
.../Embedding/IEmbeddingService.cs | 10 +
.../LiteDB.Demo.Tools.VectorSearch.csproj | 19 +
.../Models/IndexedDocument.cs | 27 +
.../Models/IndexedDocumentChunk.cs | 19 +
LiteDB.Demo.Tools.VectorSearch/Program.cs | 40 +
LiteDB.Demo.Tools.VectorSearch/Readme.md | 39 +
.../Services/DocumentStore.cs | 156 +
.../Utilities/TextUtilities.cs | 176 +
.../Utilities/VectorMath.cs | 50 +
LiteDB.Tests/BsonValue/BsonVector_Tests.cs | 323 ++
LiteDB.Tests/Document/Decimal_Tests.cs | 4 +-
LiteDB.Tests/Document/Implicit_Tests.cs | 8 +-
LiteDB.Tests/Document/Json_Tests.cs | 13 +
LiteDB.Tests/Engine/DropCollection_Tests.cs | 305 +-
LiteDB.Tests/LiteDB.Tests.csproj | 18 +-
.../Query/VectorExtensionSurface_Tests.cs | 52 +
LiteDB.Tests/Query/VectorIndex_Tests.cs | 977 ++++++
.../Resources/ingest-20250922-234735.json | 3090 +++++++++++++++++
LiteDB.sln | 58 +
LiteDB/Client/Database/Collections/Index.cs | 66 +-
LiteDB/Client/Database/ILiteQueryable.cs | 3 +-
LiteDB/Client/Database/LiteQueryable.cs | 120 +
LiteDB/Client/Database/LiteRepository.cs | 57 +
LiteDB/Client/Shared/SharedEngine.cs | 6 +
.../Vector/LiteCollectionVectorExtensions.cs | 46 +
.../Vector/LiteQueryableVectorExtensions.cs | 63 +
.../Vector/LiteRepositoryVectorExtensions.cs | 46 +
LiteDB/Client/Vector/VectorDistanceMetric.cs | 12 +
LiteDB/Client/Vector/VectorIndexOptions.cs | 31 +
LiteDB/Document/BsonType.cs | 5 +-
LiteDB/Document/BsonValue.cs | 46 +
LiteDB/Document/BsonVector.cs | 28 +
LiteDB/Document/Expression/Methods/Vector.cs | 55 +
.../Parser/BsonExpressionFunctions.cs | 5 +
.../Parser/BsonExpressionOperators.cs | 8 +-
.../Expression/Parser/BsonExpressionParser.cs | 14 +-
.../Expression/Parser/BsonExpressionType.cs | 5 +-
LiteDB/Document/Json/JsonWriter.cs | 6 +
LiteDB/Engine/Disk/Serializer/BufferReader.cs | 26 +-
LiteDB/Engine/Disk/Serializer/BufferWriter.cs | 19 +
LiteDB/Engine/Engine/Delete.cs | 6 +
LiteDB/Engine/Engine/Index.cs | 83 +-
LiteDB/Engine/Engine/Insert.cs | 12 +-
LiteDB/Engine/Engine/Rebuild.cs | 24 +-
LiteDB/Engine/Engine/Update.cs | 11 +-
LiteDB/Engine/Engine/Upsert.cs | 5 +-
LiteDB/Engine/FileReader/FileReaderV8.cs | 49 +-
LiteDB/Engine/FileReader/IndexInfo.cs | 2 +
LiteDB/Engine/ILiteEngine.cs | 2 +
LiteDB/Engine/Pages/BasePage.cs | 4 +-
LiteDB/Engine/Pages/CollectionPage.cs | 87 +-
LiteDB/Engine/Pages/VectorIndexPage.cs | 55 +
.../Query/IndexQuery/VectorIndexQuery.cs | 87 +
LiteDB/Engine/Query/Query.cs | 43 +-
LiteDB/Engine/Query/QueryOptimization.cs | 261 +-
LiteDB/Engine/Services/SnapShot.cs | 82 +-
LiteDB/Engine/Services/VectorIndexService.cs | 978 ++++++
.../Engine/Structures/VectorIndexMetadata.cs | 79 +
LiteDB/Engine/Structures/VectorIndexNode.cs | 293 ++
.../Utils/Extensions/BufferSliceExtensions.cs | 35 +
LiteDB/Utils/Tokenizer.cs | 3 +-
README.md | 6 +-
70 files changed, 9047 insertions(+), 117 deletions(-)
create mode 100644 LiteDB.Benchmarks/Benchmarks/Queries/QueryWithVectorSimilarity.cs
create mode 100644 LiteDB.Demo.Tools.VectorSearch/Commands/IngestCommand.cs
create mode 100644 LiteDB.Demo.Tools.VectorSearch/Commands/SearchCommand.cs
create mode 100644 LiteDB.Demo.Tools.VectorSearch/Commands/VectorSearchCommandSettings.cs
create mode 100644 LiteDB.Demo.Tools.VectorSearch/Configuration/GeminiEmbeddingOptions.cs
create mode 100644 LiteDB.Demo.Tools.VectorSearch/Embedding/GeminiEmbeddingService.cs
create mode 100644 LiteDB.Demo.Tools.VectorSearch/Embedding/IEmbeddingService.cs
create mode 100644 LiteDB.Demo.Tools.VectorSearch/LiteDB.Demo.Tools.VectorSearch.csproj
create mode 100644 LiteDB.Demo.Tools.VectorSearch/Models/IndexedDocument.cs
create mode 100644 LiteDB.Demo.Tools.VectorSearch/Models/IndexedDocumentChunk.cs
create mode 100644 LiteDB.Demo.Tools.VectorSearch/Program.cs
create mode 100644 LiteDB.Demo.Tools.VectorSearch/Readme.md
create mode 100644 LiteDB.Demo.Tools.VectorSearch/Services/DocumentStore.cs
create mode 100644 LiteDB.Demo.Tools.VectorSearch/Utilities/TextUtilities.cs
create mode 100644 LiteDB.Demo.Tools.VectorSearch/Utilities/VectorMath.cs
create mode 100644 LiteDB.Tests/BsonValue/BsonVector_Tests.cs
create mode 100644 LiteDB.Tests/Query/VectorExtensionSurface_Tests.cs
create mode 100644 LiteDB.Tests/Query/VectorIndex_Tests.cs
create mode 100644 LiteDB.Tests/Resources/ingest-20250922-234735.json
create mode 100644 LiteDB/Client/Vector/LiteCollectionVectorExtensions.cs
create mode 100644 LiteDB/Client/Vector/LiteQueryableVectorExtensions.cs
create mode 100644 LiteDB/Client/Vector/LiteRepositoryVectorExtensions.cs
create mode 100644 LiteDB/Client/Vector/VectorDistanceMetric.cs
create mode 100644 LiteDB/Client/Vector/VectorIndexOptions.cs
create mode 100644 LiteDB/Document/BsonVector.cs
create mode 100644 LiteDB/Document/Expression/Methods/Vector.cs
create mode 100644 LiteDB/Engine/Pages/VectorIndexPage.cs
create mode 100644 LiteDB/Engine/Query/IndexQuery/VectorIndexQuery.cs
create mode 100644 LiteDB/Engine/Services/VectorIndexService.cs
create mode 100644 LiteDB/Engine/Structures/VectorIndexMetadata.cs
create mode 100644 LiteDB/Engine/Structures/VectorIndexNode.cs
diff --git a/LiteDB.Benchmarks/Benchmarks/Queries/QueryWithVectorSimilarity.cs b/LiteDB.Benchmarks/Benchmarks/Queries/QueryWithVectorSimilarity.cs
new file mode 100644
index 000000000..de44989c9
--- /dev/null
+++ b/LiteDB.Benchmarks/Benchmarks/Queries/QueryWithVectorSimilarity.cs
@@ -0,0 +1,76 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using BenchmarkDotNet.Attributes;
+using LiteDB;
+using LiteDB.Benchmarks.Models;
+using LiteDB.Benchmarks.Models.Generators;
+using LiteDB.Vector;
+
+namespace LiteDB.Benchmarks.Benchmarks.Queries
+{
+ [BenchmarkCategory(Constants.Categories.QUERIES)]
+ public class QueryWithVectorSimilarity : BenchmarkBase
+ {
+ private ILiteCollection _fileMetaCollection;
+ private ILiteCollection _unindexedCollection;
+ private float[] _queryVector;
+
+ [GlobalSetup]
+ public void GlobalSetup()
+ {
+ File.Delete(DatabasePath);
+
+ DatabaseInstance = new LiteDatabase(ConnectionString());
+ _fileMetaCollection = DatabaseInstance.GetCollection("withIndex");
+ _unindexedCollection = DatabaseInstance.GetCollection("withoutIndex");
+
+ _fileMetaCollection.EnsureIndex(fileMeta => fileMeta.ShouldBeShown);
+ _unindexedCollection.EnsureIndex(fileMeta => fileMeta.ShouldBeShown);
+ _fileMetaCollection.EnsureIndex(fileMeta => fileMeta.Vectors, new VectorIndexOptions(128));
+
+ var rnd = new Random();
+ var data = FileMetaGenerator.GenerateList(DatasetSize);
+
+ _fileMetaCollection.Insert(data); // executed once per each N value
+ _unindexedCollection.Insert(data);
+
+ _queryVector = Enumerable.Range(0, 128).Select(_ => (float)rnd.NextDouble()).ToArray();
+
+ DatabaseInstance.Checkpoint();
+ }
+
+ [Benchmark]
+ public List WhereNear_Filter()
+ {
+ return _unindexedCollection.Query()
+ .WhereNear(x => x.Vectors, _queryVector, maxDistance: 0.5)
+ .ToList();
+ }
+
+ [Benchmark]
+ public List WhereNear_Filter_Indexed()
+ {
+ return _fileMetaCollection.Query()
+ .WhereNear(x => x.Vectors, _queryVector, maxDistance: 0.5)
+ .ToList();
+ }
+
+ [Benchmark]
+ public List TopKNear_OrderLimit()
+ {
+ return _unindexedCollection.Query()
+ .TopKNear(x => x.Vectors, _queryVector, k: 10)
+ .ToList();
+ }
+
+ [Benchmark]
+ public List TopKNear_OrderLimit_Indexed()
+ {
+ return _fileMetaCollection.Query()
+ .TopKNear(x => x.Vectors, _queryVector, k: 10)
+ .ToList();
+ }
+ }
+}
\ No newline at end of file
diff --git a/LiteDB.Benchmarks/Models/FileMetaBase.cs b/LiteDB.Benchmarks/Models/FileMetaBase.cs
index ca2f7e2c7..15b7f63aa 100644
--- a/LiteDB.Benchmarks/Models/FileMetaBase.cs
+++ b/LiteDB.Benchmarks/Models/FileMetaBase.cs
@@ -28,6 +28,8 @@ public class FileMetaBase
public bool ShouldBeShown { get; set; }
+ public float[] Vectors { get; set; }
+
public virtual bool IsValid => ValidFrom == null || ValidFrom <= DateTimeOffset.UtcNow && ValidTo == null || ValidTo > DateTimeOffset.UtcNow;
}
}
\ No newline at end of file
diff --git a/LiteDB.Benchmarks/Models/Generators/FileMetaGenerator.cs b/LiteDB.Benchmarks/Models/Generators/FileMetaGenerator.cs
index 6a551b502..903597882 100644
--- a/LiteDB.Benchmarks/Models/Generators/FileMetaGenerator.cs
+++ b/LiteDB.Benchmarks/Models/Generators/FileMetaGenerator.cs
@@ -1,5 +1,6 @@
using System;
using System.Collections.Generic;
+using System.Linq;
namespace LiteDB.Benchmarks.Models.Generators
{
@@ -18,7 +19,8 @@ private static T Generate()
Title = $"Document-{docGuid}",
MimeType = "application/pdf",
IsFavorite = _random.Next(10) >= 9,
- ShouldBeShown = _random.Next(10) >= 7
+ ShouldBeShown = _random.Next(10) >= 7,
+ Vectors = Enumerable.Range(0, 128).Select(_ => (float)_random.NextDouble()).ToArray()
};
if (_random.Next(10) >= 5)
diff --git a/LiteDB.Demo.Tools.VectorSearch/Commands/IngestCommand.cs b/LiteDB.Demo.Tools.VectorSearch/Commands/IngestCommand.cs
new file mode 100644
index 000000000..788528100
--- /dev/null
+++ b/LiteDB.Demo.Tools.VectorSearch/Commands/IngestCommand.cs
@@ -0,0 +1,249 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Threading;
+using System.Threading.Tasks;
+using LiteDB.Demo.Tools.VectorSearch.Configuration;
+using LiteDB.Demo.Tools.VectorSearch.Embedding;
+using LiteDB.Demo.Tools.VectorSearch.Models;
+using LiteDB.Demo.Tools.VectorSearch.Services;
+using LiteDB.Demo.Tools.VectorSearch.Utilities;
+using Spectre.Console;
+using Spectre.Console.Cli;
+using ValidationResult = Spectre.Console.ValidationResult;
+
+namespace LiteDB.Demo.Tools.VectorSearch.Commands
+{
+ internal sealed class IngestCommand : AsyncCommand
+ {
+ public override async Task ExecuteAsync(CommandContext context, IngestCommandSettings settings)
+ {
+ if (!Directory.Exists(settings.SourceDirectory))
+ {
+ throw new InvalidOperationException($"Source directory '{settings.SourceDirectory}' does not exist.");
+ }
+
+ var embeddingOptions = settings.CreateEmbeddingOptions();
+
+ using var documentStore = new DocumentStore(settings.DatabasePath);
+ using var embeddingService = await GeminiEmbeddingService.CreateAsync(embeddingOptions, CancellationToken.None);
+
+ var searchOption = settings.Recursive ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly;
+ var files = Directory.EnumerateFiles(settings.SourceDirectory, "*", searchOption)
+ .Where(TextUtilities.IsSupportedDocument)
+ .OrderBy(x => x)
+ .Select(Path.GetFullPath)
+ .ToArray();
+
+ if (files.Length == 0)
+ {
+ AnsiConsole.MarkupLine("[yellow]No supported text documents were found. Nothing to ingest.[/]");
+ return 0;
+ }
+
+ var skipUnchanged = !settings.Force;
+ var processed = 0;
+ var skipped = 0;
+ var errors = new List<(string Path, string Error)>();
+
+ await AnsiConsole.Progress()
+ .Columns(new ProgressColumn[]
+ {
+ new TaskDescriptionColumn(),
+ new ProgressBarColumn(),
+ new PercentageColumn(),
+ new ElapsedTimeColumn(),
+ new RemainingTimeColumn()
+ })
+ .StartAsync(async ctx =>
+ {
+ var task = ctx.AddTask("Embedding documents", maxValue: files.Length);
+
+ foreach (var path in files)
+ {
+ try
+ {
+ var info = new FileInfo(path);
+ var rawContent = TextUtilities.ReadDocument(path);
+ var contentHash = TextUtilities.ComputeContentHash(rawContent);
+
+ var existing = documentStore.FindByPath(path);
+ if (existing != null && skipUnchanged && string.Equals(existing.ContentHash, contentHash, StringComparison.Ordinal))
+ {
+ skipped++;
+ continue;
+ }
+
+ var chunkRecords = new List();
+ var chunkIndex = 0;
+ var ensuredIndex = false;
+
+ foreach (var chunk in TextUtilities.SplitIntoChunks(rawContent, settings.ChunkLength, settings.ChunkOverlap))
+ {
+ var normalizedChunk = TextUtilities.NormalizeForEmbedding(chunk, embeddingOptions.MaxInputLength);
+ if (string.IsNullOrWhiteSpace(normalizedChunk))
+ {
+ chunkIndex++;
+ continue;
+ }
+
+ var embedding = await embeddingService.EmbedAsync(normalizedChunk, CancellationToken.None);
+
+ if (!ensuredIndex)
+ {
+ documentStore.EnsureChunkVectorIndex(embedding.Length);
+ ensuredIndex = true;
+ }
+
+ chunkRecords.Add(new IndexedDocumentChunk
+ {
+ Path = path,
+ ChunkIndex = chunkIndex,
+ Snippet = chunk.Trim(),
+ Embedding = embedding
+ });
+
+ chunkIndex++;
+ }
+
+ var record = existing ?? new IndexedDocument();
+ record.Path = path;
+ record.Title = Path.GetFileName(path);
+ record.Preview = TextUtilities.BuildPreview(rawContent, settings.PreviewLength);
+ record.Embedding = Array.Empty();
+ record.LastModifiedUtc = info.LastWriteTimeUtc;
+ record.SizeBytes = info.Length;
+ record.ContentHash = contentHash;
+ record.IngestedUtc = DateTime.UtcNow;
+
+ if (chunkRecords.Count == 0)
+ {
+ documentStore.Upsert(record);
+ documentStore.ReplaceDocumentChunks(path, Array.Empty());
+ skipped++;
+ continue;
+ }
+
+ documentStore.Upsert(record);
+ documentStore.ReplaceDocumentChunks(path, chunkRecords);
+ processed++;
+ }
+ catch (Exception ex)
+ {
+ errors.Add((path, ex.Message));
+ }
+ finally
+ {
+ task.Increment(1);
+ }
+ }
+ });
+
+ if (settings.PruneMissing)
+ {
+ var indexedPaths = documentStore.GetTrackedPaths();
+ var currentPaths = new HashSet(files, StringComparer.OrdinalIgnoreCase);
+ var missing = indexedPaths.Where(path => !currentPaths.Contains(path)).ToArray();
+
+ if (missing.Length > 0)
+ {
+ documentStore.RemoveMissingDocuments(missing);
+ AnsiConsole.MarkupLine($"[yellow]Removed {missing.Length} documents that no longer exist on disk.[/]");
+ }
+ }
+
+ var summary = new Table().Border(TableBorder.Rounded);
+ summary.AddColumn("Metric");
+ summary.AddColumn("Value");
+ summary.AddRow("Processed", processed.ToString());
+ summary.AddRow("Skipped", skipped.ToString());
+ summary.AddRow("Errors", errors.Count.ToString());
+
+ AnsiConsole.Write(summary);
+
+ if (errors.Count > 0)
+ {
+ var errorTable = new Table().Border(TableBorder.Rounded);
+ errorTable.AddColumn("File");
+ errorTable.AddColumn("Error");
+
+ foreach (var (path, message) in errors)
+ {
+ errorTable.AddRow(Markup.Escape(path), Markup.Escape(message));
+ }
+
+ AnsiConsole.Write(errorTable);
+ return 1;
+ }
+
+ return 0;
+ }
+ }
+
+ internal sealed class IngestCommandSettings : VectorSearchCommandSettings
+ {
+ [CommandOption("-s|--source ")]
+ public string SourceDirectory { get; set; } = string.Empty;
+
+ [CommandOption("--preview-length ")]
+ public int PreviewLength { get; set; } = 240;
+
+ [CommandOption("--no-recursive")]
+ public bool NoRecursive { get; set; }
+
+ [CommandOption("--force")]
+ public bool Force { get; set; }
+
+ [CommandOption("--prune-missing")]
+ public bool PruneMissing { get; set; }
+
+ [CommandOption("--chunk-length ")]
+ public int ChunkLength { get; set; } = 600;
+
+ [CommandOption("--chunk-overlap ")]
+ public int ChunkOverlap { get; set; } = 100;
+
+ public bool Recursive => !NoRecursive;
+
+ public override ValidationResult Validate()
+ {
+ var baseResult = base.Validate();
+ if (!baseResult.Successful)
+ {
+ return baseResult;
+ }
+
+ if (string.IsNullOrWhiteSpace(SourceDirectory))
+ {
+ return ValidationResult.Error("A source directory is required.");
+ }
+
+ if (PreviewLength <= 0)
+ {
+ return ValidationResult.Error("--preview-length must be greater than zero.");
+ }
+
+ if (ChunkLength <= 0)
+ {
+ return ValidationResult.Error("--chunk-length must be greater than zero.");
+ }
+
+ if (ChunkOverlap < 0)
+ {
+ return ValidationResult.Error("--chunk-overlap must be zero or greater.");
+ }
+
+ if (ChunkOverlap >= ChunkLength)
+ {
+ return ValidationResult.Error("--chunk-overlap must be smaller than --chunk-length.");
+ }
+
+ return ValidationResult.Success();
+ }
+ }
+}
+
+
+
+
diff --git a/LiteDB.Demo.Tools.VectorSearch/Commands/SearchCommand.cs b/LiteDB.Demo.Tools.VectorSearch/Commands/SearchCommand.cs
new file mode 100644
index 000000000..b5c778a86
--- /dev/null
+++ b/LiteDB.Demo.Tools.VectorSearch/Commands/SearchCommand.cs
@@ -0,0 +1,165 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading;
+using System.Threading.Tasks;
+using LiteDB.Demo.Tools.VectorSearch.Embedding;
+using LiteDB.Demo.Tools.VectorSearch.Models;
+using LiteDB.Demo.Tools.VectorSearch.Services;
+using LiteDB.Demo.Tools.VectorSearch.Utilities;
+using Spectre.Console;
+using Spectre.Console.Cli;
+using ValidationResult = Spectre.Console.ValidationResult;
+
+namespace LiteDB.Demo.Tools.VectorSearch.Commands
+{
+ internal sealed class SearchCommand : AsyncCommand
+ {
+ public override async Task ExecuteAsync(CommandContext context, SearchCommandSettings settings)
+ {
+ using var documentStore = new DocumentStore(settings.DatabasePath);
+
+ var embeddingOptions = settings.CreateEmbeddingOptions();
+ using var embeddingService = await GeminiEmbeddingService.CreateAsync(embeddingOptions, CancellationToken.None);
+
+ var queryText = settings.Query;
+ if (string.IsNullOrWhiteSpace(queryText))
+ {
+ queryText = AnsiConsole.Ask("Enter a search prompt:");
+ }
+
+ if (string.IsNullOrWhiteSpace(queryText))
+ {
+ AnsiConsole.MarkupLine("[red]A non-empty query is required.[/]");
+ return 1;
+ }
+
+ var normalized = TextUtilities.NormalizeForEmbedding(queryText, embeddingOptions.MaxInputLength);
+ var queryEmbedding = await embeddingService.EmbedAsync(normalized, CancellationToken.None);
+
+ var chunkResults = documentStore.TopNearestChunks(queryEmbedding, settings.Top)
+ .Select(chunk => new SearchHit(chunk, VectorMath.CosineSimilarity(chunk.Embedding, queryEmbedding)))
+ .ToList();
+
+ if (settings.MaxDistance.HasValue)
+ {
+ chunkResults = chunkResults
+ .Where(hit => VectorMath.CosineDistance(hit.Chunk.Embedding, queryEmbedding) <= settings.MaxDistance.Value)
+ .ToList();
+ }
+
+ if (chunkResults.Count == 0)
+ {
+ AnsiConsole.MarkupLine("[yellow]No matching documents were found.[/]");
+ return 0;
+ }
+
+ chunkResults.Sort((left, right) => right.Similarity.CompareTo(left.Similarity));
+
+ var table = new Table().Border(TableBorder.Rounded);
+ table.AddColumn("#");
+ table.AddColumn("Score");
+ table.AddColumn("Document");
+ if (!settings.HidePath)
+ {
+ table.AddColumn("Path");
+ }
+ table.AddColumn("Snippet");
+
+ var rank = 1;
+ var documentCache = new Dictionary(StringComparer.OrdinalIgnoreCase);
+
+ foreach (var hit in chunkResults)
+ {
+ var snippet = hit.Chunk.Snippet;
+ if (snippet.Length > settings.PreviewLength)
+ {
+ snippet = snippet[..settings.PreviewLength] + "\u2026";
+ }
+
+ if (!documentCache.TryGetValue(hit.Chunk.Path, out var parentDocument))
+ {
+ parentDocument = documentStore.FindByPath(hit.Chunk.Path);
+ documentCache[hit.Chunk.Path] = parentDocument;
+ }
+
+ var chunkNumber = hit.Chunk.ChunkIndex + 1;
+ var documentLabel = parentDocument != null
+ ? $"{parentDocument.Title} (Chunk {chunkNumber})"
+ : $"Chunk {chunkNumber}";
+
+ var rowData = new List
+ {
+ Markup.Escape(rank.ToString()),
+ Markup.Escape(hit.Similarity.ToString("F3")),
+ Markup.Escape(documentLabel)
+ };
+
+ if (!settings.HidePath)
+ {
+ var pathValue = parentDocument?.Path ?? hit.Chunk.Path;
+ rowData.Add(Markup.Escape(pathValue));
+ }
+
+ rowData.Add(Markup.Escape(snippet));
+
+ table.AddRow(rowData.ToArray());
+
+ rank++;
+ }
+
+ AnsiConsole.Write(table);
+ return 0;
+ }
+
+ private sealed record SearchHit(IndexedDocumentChunk Chunk, double Similarity);
+ }
+
+ internal sealed class SearchCommandSettings : VectorSearchCommandSettings
+ {
+ [CommandOption("-q|--query ")]
+ public string? Query { get; set; }
+
+ [CommandOption("--top ")]
+ public int Top { get; set; } = 5;
+
+ [CommandOption("--max-distance ")]
+ public double? MaxDistance { get; set; }
+
+ [CommandOption("--preview-length ")]
+ public int PreviewLength { get; set; } = 160;
+
+ [CommandOption("--hide-path")]
+ public bool HidePath { get; set; }
+
+ public override ValidationResult Validate()
+ {
+ var baseResult = base.Validate();
+ if (!baseResult.Successful)
+ {
+ return baseResult;
+ }
+
+ if (Top <= 0)
+ {
+ return ValidationResult.Error("--top must be greater than zero.");
+ }
+
+ if (MaxDistance.HasValue && MaxDistance <= 0)
+ {
+ return ValidationResult.Error("--max-distance must be greater than zero when specified.");
+ }
+
+ if (PreviewLength <= 0)
+ {
+ return ValidationResult.Error("--preview-length must be greater than zero.");
+ }
+
+ return ValidationResult.Success();
+ }
+ }
+}
+
+
+
+
diff --git a/LiteDB.Demo.Tools.VectorSearch/Commands/VectorSearchCommandSettings.cs b/LiteDB.Demo.Tools.VectorSearch/Commands/VectorSearchCommandSettings.cs
new file mode 100644
index 000000000..3ef95ad43
--- /dev/null
+++ b/LiteDB.Demo.Tools.VectorSearch/Commands/VectorSearchCommandSettings.cs
@@ -0,0 +1,125 @@
+using System;
+using Spectre.Console.Cli;
+using LiteDB.Demo.Tools.VectorSearch.Configuration;
+using ValidationResult = Spectre.Console.ValidationResult;
+
+namespace LiteDB.Demo.Tools.VectorSearch.Commands
+{
+ internal abstract class VectorSearchCommandSettings : CommandSettings
+ {
+ private const string DefaultModel = "gemini-embedding-001";
+ private const string DefaultLocation = "us-central1";
+ private const string ApiKeyEnvironmentVariable = "GOOGLE_VERTEX_API_KEY";
+ private const string ApiKeyFallbackEnvironmentVariable = "GOOGLE_API_KEY";
+
+ [CommandOption("-d|--database ")]
+ public string DatabasePath { get; set; } = "vector-search.db";
+
+ [CommandOption("--project-id ")]
+ public string? ProjectId { get; set; }
+
+ [CommandOption("--location ")]
+ public string? Location { get; set; }
+
+ [CommandOption("--model ")]
+ public string? Model { get; set; }
+
+ [CommandOption("--api-key ")]
+ public string? ApiKey { get; set; }
+
+ [CommandOption("--max-input-length ")]
+ public int MaxInputLength { get; set; } = 7000;
+
+ public GeminiEmbeddingOptions CreateEmbeddingOptions()
+ {
+ var model = ResolveModel();
+ var apiKey = ResolveApiKey();
+
+ if (!string.IsNullOrWhiteSpace(apiKey))
+ {
+ return GeminiEmbeddingOptions.ForApiKey(apiKey!, model, MaxInputLength);
+ }
+
+ var projectId = ResolveProjectIdOrNull();
+ if (string.IsNullOrWhiteSpace(projectId))
+ {
+ throw new InvalidOperationException("Provide --api-key/GOOGLE_VERTEX_API_KEY or --project-id/GOOGLE_PROJECT_ID to configure Gemini embeddings.");
+ }
+
+ var location = ResolveLocation();
+ return GeminiEmbeddingOptions.ForServiceAccount(projectId!, location, model, MaxInputLength);
+ }
+
+ public override ValidationResult Validate()
+ {
+ if (MaxInputLength <= 0)
+ {
+ return ValidationResult.Error("--max-input-length must be greater than zero.");
+ }
+
+ if (string.IsNullOrWhiteSpace(DatabasePath))
+ {
+ return ValidationResult.Error("A database path must be provided.");
+ }
+
+ var hasApiKey = !string.IsNullOrWhiteSpace(ResolveApiKey());
+ var hasProject = !string.IsNullOrWhiteSpace(ResolveProjectIdOrNull());
+
+ if (!hasApiKey && !hasProject)
+ {
+ return ValidationResult.Error("Authentication required. Supply --api-key (or GOOGLE_VERTEX_API_KEY/GOOGLE_API_KEY) or --project-id (or GOOGLE_PROJECT_ID).");
+ }
+
+ return ValidationResult.Success();
+ }
+
+ private string? ResolveProjectIdOrNull()
+ {
+ if (!string.IsNullOrWhiteSpace(ProjectId))
+ {
+ return ProjectId;
+ }
+
+ var fromEnv = Environment.GetEnvironmentVariable("GOOGLE_PROJECT_ID");
+ return string.IsNullOrWhiteSpace(fromEnv) ? null : fromEnv;
+ }
+
+ private string ResolveLocation()
+ {
+ if (!string.IsNullOrWhiteSpace(Location))
+ {
+ return Location;
+ }
+
+ var fromEnv = Environment.GetEnvironmentVariable("GOOGLE_VERTEX_LOCATION");
+ return string.IsNullOrWhiteSpace(fromEnv) ? DefaultLocation : fromEnv;
+ }
+
+ private string ResolveModel()
+ {
+ if (!string.IsNullOrWhiteSpace(Model))
+ {
+ return Model;
+ }
+
+ var fromEnv = Environment.GetEnvironmentVariable("GOOGLE_VERTEX_EMBEDDING_MODEL");
+ return string.IsNullOrWhiteSpace(fromEnv) ? DefaultModel : fromEnv;
+ }
+
+ private string? ResolveApiKey()
+ {
+ if (!string.IsNullOrWhiteSpace(ApiKey))
+ {
+ return ApiKey;
+ }
+
+ var fromEnv = Environment.GetEnvironmentVariable(ApiKeyEnvironmentVariable);
+ if (string.IsNullOrWhiteSpace(fromEnv))
+ {
+ fromEnv = Environment.GetEnvironmentVariable(ApiKeyFallbackEnvironmentVariable);
+ }
+
+ return string.IsNullOrWhiteSpace(fromEnv) ? null : fromEnv;
+ }
+ }
+}
diff --git a/LiteDB.Demo.Tools.VectorSearch/Configuration/GeminiEmbeddingOptions.cs b/LiteDB.Demo.Tools.VectorSearch/Configuration/GeminiEmbeddingOptions.cs
new file mode 100644
index 000000000..47a981045
--- /dev/null
+++ b/LiteDB.Demo.Tools.VectorSearch/Configuration/GeminiEmbeddingOptions.cs
@@ -0,0 +1,95 @@
+using System;
+
+namespace LiteDB.Demo.Tools.VectorSearch.Configuration
+{
+ internal sealed class GeminiEmbeddingOptions
+ {
+ private const string ApiModelPrefix = "models/";
+
+ private GeminiEmbeddingOptions(string? projectId, string? location, string model, int maxInputLength, string? apiKey)
+ {
+ Model = TrimModelPrefix(model);
+
+ if (maxInputLength <= 0)
+ {
+ throw new ArgumentOutOfRangeException(nameof(maxInputLength));
+ }
+
+ ProjectId = projectId;
+ Location = location;
+ MaxInputLength = maxInputLength;
+ ApiKey = string.IsNullOrWhiteSpace(apiKey) ? null : apiKey;
+ }
+
+ public static GeminiEmbeddingOptions ForServiceAccount(string projectId, string location, string model, int maxInputLength)
+ {
+ if (string.IsNullOrWhiteSpace(projectId))
+ {
+ throw new ArgumentNullException(nameof(projectId));
+ }
+
+ if (string.IsNullOrWhiteSpace(location))
+ {
+ throw new ArgumentNullException(nameof(location));
+ }
+
+ return new GeminiEmbeddingOptions(projectId, location, model, maxInputLength, apiKey: null);
+ }
+
+ public static GeminiEmbeddingOptions ForApiKey(string apiKey, string model, int maxInputLength)
+ {
+ if (string.IsNullOrWhiteSpace(apiKey))
+ {
+ throw new ArgumentNullException(nameof(apiKey));
+ }
+
+ return new GeminiEmbeddingOptions(projectId: null, location: null, model, maxInputLength, apiKey);
+ }
+
+ public string? ProjectId { get; }
+
+ public string? Location { get; }
+
+ public string Model { get; }
+
+ public int MaxInputLength { get; }
+
+ public string? ApiKey { get; }
+
+ public bool UseApiKey => !string.IsNullOrWhiteSpace(ApiKey);
+
+ public string GetVertexEndpoint()
+ {
+ if (string.IsNullOrWhiteSpace(ProjectId) || string.IsNullOrWhiteSpace(Location))
+ {
+ throw new InvalidOperationException("Vertex endpoint requires both project id and location.");
+ }
+
+ return $"https://{Location}-aiplatform.googleapis.com/v1/projects/{ProjectId}/locations/{Location}/publishers/google/models/{Model}:predict";
+ }
+
+ public string GetApiEndpoint()
+ {
+ return $"https://generativelanguage.googleapis.com/v1beta/{GetApiModelIdentifier()}:embedContent"; //models/{GetApiModelIdentifier()}:embedContent";
+ }
+
+ public string GetApiModelIdentifier()
+ {
+ return Model.StartsWith(ApiModelPrefix, StringComparison.Ordinal)
+ ? Model
+ : $"{ApiModelPrefix}{Model}";
+ }
+
+ private static string TrimModelPrefix(string model)
+ {
+ if (string.IsNullOrWhiteSpace(model))
+ {
+ throw new ArgumentNullException(nameof(model));
+ }
+
+ return model.StartsWith(ApiModelPrefix, StringComparison.OrdinalIgnoreCase)
+ ? model.Substring(ApiModelPrefix.Length)
+ : model;
+ }
+ }
+}
diff --git a/LiteDB.Demo.Tools.VectorSearch/Embedding/GeminiEmbeddingService.cs b/LiteDB.Demo.Tools.VectorSearch/Embedding/GeminiEmbeddingService.cs
new file mode 100644
index 000000000..d475228b9
--- /dev/null
+++ b/LiteDB.Demo.Tools.VectorSearch/Embedding/GeminiEmbeddingService.cs
@@ -0,0 +1,190 @@
+using System;
+using System.Net.Http;
+using System.Net.Http.Headers;
+using System.Text;
+using System.Text.Json;
+using System.Threading;
+using System.Threading.Tasks;
+using Google.Apis.Auth.OAuth2;
+using LiteDB.Demo.Tools.VectorSearch.Configuration;
+
+namespace LiteDB.Demo.Tools.VectorSearch.Embedding
+{
+ internal sealed class GeminiEmbeddingService : IEmbeddingService, IDisposable
+ {
+ private static readonly JsonSerializerOptions SerializerOptions = new JsonSerializerOptions
+ {
+ PropertyNamingPolicy = JsonNamingPolicy.CamelCase
+ };
+
+ private readonly HttpClient _httpClient;
+ private readonly GeminiEmbeddingOptions _options;
+ private readonly ITokenAccess? _tokenAccessor;
+ private bool _disposed;
+
+ private GeminiEmbeddingService(HttpClient httpClient, GeminiEmbeddingOptions options, ITokenAccess? tokenAccessor)
+ {
+ _httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient));
+ _options = options ?? throw new ArgumentNullException(nameof(options));
+ _tokenAccessor = tokenAccessor;
+ }
+
+ public static async Task CreateAsync(GeminiEmbeddingOptions options, CancellationToken cancellationToken)
+ {
+ if (options == null)
+ {
+ throw new ArgumentNullException(nameof(options));
+ }
+
+ ITokenAccess? tokenAccessor = null;
+
+ if (!options.UseApiKey)
+ {
+ var credential = await GoogleCredential.GetApplicationDefaultAsync(cancellationToken);
+ credential = credential.CreateScoped("https://www.googleapis.com/auth/cloud-platform");
+ tokenAccessor = credential;
+ }
+
+ var httpClient = new HttpClient();
+ return new GeminiEmbeddingService(httpClient, options, tokenAccessor);
+ }
+
+ public async Task EmbedAsync(string text, CancellationToken cancellationToken)
+ {
+ if (string.IsNullOrWhiteSpace(text))
+ {
+ throw new ArgumentException("Text must be provided for embedding.", nameof(text));
+ }
+
+ EnsureNotDisposed();
+
+ var normalized = text.Length <= _options.MaxInputLength
+ ? text
+ : text[.._options.MaxInputLength];
+
+ var endpoint = _options.UseApiKey ? _options.GetApiEndpoint() : _options.GetVertexEndpoint();
+ object payload = _options.UseApiKey
+ ? new
+ {
+ model = _options.GetApiModelIdentifier(),
+ content = new
+ {
+ parts = new[]
+ {
+ new
+ {
+ text = normalized
+ }
+ }
+ }
+ }
+ : new
+ {
+ instances = new[]
+ {
+ new
+ {
+ content = new
+ {
+ parts = new[]
+ {
+ new
+ {
+ text = normalized
+ }
+ }
+ }
+ }
+ }
+ };
+
+ using var request = new HttpRequestMessage(HttpMethod.Post, endpoint);
+ var json = System.Text.Json.JsonSerializer.Serialize(payload, SerializerOptions);
+ var content = new StringContent(json, Encoding.UTF8);
+ content.Headers.ContentType = new MediaTypeHeaderValue("application/json");
+ request.Content = content;
+
+ if (_options.UseApiKey)
+ {
+ request.Headers.TryAddWithoutValidation("x-goog-api-key", _options.ApiKey);
+ }
+ else
+ {
+ if (_tokenAccessor == null)
+ {
+ throw new InvalidOperationException("Google credentials are required when no API key is provided.");
+ }
+
+ var token = await _tokenAccessor.GetAccessTokenForRequestAsync(cancellationToken: cancellationToken);
+ request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", token);
+ }
+
+ using var response = await _httpClient.SendAsync(request, cancellationToken);
+
+ if (!response.IsSuccessStatusCode)
+ {
+ var details = await response.Content.ReadAsStringAsync(cancellationToken);
+ throw new InvalidOperationException($"Embedding request failed ({response.StatusCode}). {details}");
+ }
+
+ using var stream = await response.Content.ReadAsStreamAsync(cancellationToken);
+ using var document = await JsonDocument.ParseAsync(stream, cancellationToken: cancellationToken);
+
+ if (TryReadValues(document.RootElement, out var values))
+ {
+ return values;
+ }
+
+ throw new InvalidOperationException("Embedding response did not contain any vector values.");
+ }
+
+ private static bool TryReadValues(JsonElement root, out float[] values)
+ {
+ if (root.TryGetProperty("predictions", out var predictions) && predictions.GetArrayLength() > 0)
+ {
+ var embeddings = predictions[0].GetProperty("embeddings").GetProperty("values");
+ values = ReadFloatArray(embeddings);
+ return true;
+ }
+
+ if (root.TryGetProperty("embedding", out var embedding) && embedding.TryGetProperty("values", out var apiValues))
+ {
+ values = ReadFloatArray(apiValues);
+ return true;
+ }
+
+ values = Array.Empty();
+ return false;
+ }
+
+ private static float[] ReadFloatArray(JsonElement element)
+ {
+ var array = new float[element.GetArrayLength()];
+ for (var i = 0; i < array.Length; i++)
+ {
+ array[i] = (float)element[i].GetDouble();
+ }
+
+ return array;
+ }
+
+ private void EnsureNotDisposed()
+ {
+ if (_disposed)
+ {
+ throw new ObjectDisposedException(nameof(GeminiEmbeddingService));
+ }
+ }
+
+ public void Dispose()
+ {
+ if (_disposed)
+ {
+ return;
+ }
+
+ _httpClient.Dispose();
+ _disposed = true;
+ }
+ }
+}
diff --git a/LiteDB.Demo.Tools.VectorSearch/Embedding/IEmbeddingService.cs b/LiteDB.Demo.Tools.VectorSearch/Embedding/IEmbeddingService.cs
new file mode 100644
index 000000000..2b0f4ff17
--- /dev/null
+++ b/LiteDB.Demo.Tools.VectorSearch/Embedding/IEmbeddingService.cs
@@ -0,0 +1,10 @@
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace LiteDB.Demo.Tools.VectorSearch.Embedding
+{
+ internal interface IEmbeddingService
+ {
+ Task EmbedAsync(string text, CancellationToken cancellationToken);
+ }
+}
diff --git a/LiteDB.Demo.Tools.VectorSearch/LiteDB.Demo.Tools.VectorSearch.csproj b/LiteDB.Demo.Tools.VectorSearch/LiteDB.Demo.Tools.VectorSearch.csproj
new file mode 100644
index 000000000..97e2ef33d
--- /dev/null
+++ b/LiteDB.Demo.Tools.VectorSearch/LiteDB.Demo.Tools.VectorSearch.csproj
@@ -0,0 +1,19 @@
+
+
+
+ Exe
+ net8.0
+ enable
+ enable
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/LiteDB.Demo.Tools.VectorSearch/Models/IndexedDocument.cs b/LiteDB.Demo.Tools.VectorSearch/Models/IndexedDocument.cs
new file mode 100644
index 000000000..deb4c5c67
--- /dev/null
+++ b/LiteDB.Demo.Tools.VectorSearch/Models/IndexedDocument.cs
@@ -0,0 +1,27 @@
+using System;
+using LiteDB;
+
+namespace LiteDB.Demo.Tools.VectorSearch.Models
+{
+ public sealed class IndexedDocument
+ {
+ public ObjectId Id { get; set; } = ObjectId.Empty;
+
+ public string Path { get; set; } = string.Empty;
+
+ public string Title { get; set; } = string.Empty;
+
+ public string Preview { get; set; } = string.Empty;
+
+ public float[] Embedding { get; set; } = Array.Empty();
+
+ public DateTime LastModifiedUtc { get; set; }
+
+ public long SizeBytes { get; set; }
+
+ public string ContentHash { get; set; } = string.Empty;
+
+ public DateTime IngestedUtc { get; set; }
+ }
+}
+
diff --git a/LiteDB.Demo.Tools.VectorSearch/Models/IndexedDocumentChunk.cs b/LiteDB.Demo.Tools.VectorSearch/Models/IndexedDocumentChunk.cs
new file mode 100644
index 000000000..a4b6e9ce9
--- /dev/null
+++ b/LiteDB.Demo.Tools.VectorSearch/Models/IndexedDocumentChunk.cs
@@ -0,0 +1,19 @@
+using System;
+using LiteDB;
+
+namespace LiteDB.Demo.Tools.VectorSearch.Models
+{
+ public sealed class IndexedDocumentChunk
+ {
+ public ObjectId Id { get; set; } = ObjectId.Empty;
+
+ public string Path { get; set; } = string.Empty;
+
+ public int ChunkIndex { get; set; }
+
+ public string Snippet { get; set; } = string.Empty;
+
+ public float[] Embedding { get; set; } = Array.Empty();
+ }
+}
+
diff --git a/LiteDB.Demo.Tools.VectorSearch/Program.cs b/LiteDB.Demo.Tools.VectorSearch/Program.cs
new file mode 100644
index 000000000..ec54e8c56
--- /dev/null
+++ b/LiteDB.Demo.Tools.VectorSearch/Program.cs
@@ -0,0 +1,40 @@
+using System.Diagnostics;
+using System.Threading.Tasks;
+using LiteDB.Demo.Tools.VectorSearch.Commands;
+using Spectre.Console;
+using Spectre.Console.Cli;
+
+namespace LiteDB.Demo.Tools.VectorSearch
+{
+ public static class Program
+ {
+ public static async Task Main(string[] args)
+ {
+ var app = new CommandApp();
+
+ app.Configure(config =>
+ {
+ config.SetApplicationName("litedb-vector-search");
+ config.SetExceptionHandler(ex =>
+ {
+ AnsiConsole.WriteException(ex, ExceptionFormats.ShortenEverything);
+ return -1;
+ });
+
+ config.AddCommand("ingest")
+ .WithDescription("Embed text documents from a folder into LiteDB for vector search.");
+
+ config.AddCommand("search")
+ .WithDescription("Search previously embedded documents using vector similarity.");
+
+ if (Debugger.IsAttached)
+ {
+ config.PropagateExceptions();
+ }
+ });
+
+ return await app.RunAsync(args);
+ }
+ }
+}
+
diff --git a/LiteDB.Demo.Tools.VectorSearch/Readme.md b/LiteDB.Demo.Tools.VectorSearch/Readme.md
new file mode 100644
index 000000000..e21692d7a
--- /dev/null
+++ b/LiteDB.Demo.Tools.VectorSearch/Readme.md
@@ -0,0 +1,39 @@
+## Vector Search Demo CLI
+
+`LiteDB.Demo.Tools.VectorSearch` showcases the new vector index APIs with an end-to-end ingestion and query experience. It embeds text documents using Google Gemini embeddings and persists metadata plus the resulting vectors in LiteDB.
+
+### Requirements
+
+- Supply Gemini credentials using **one** of the following approaches:
+ - API key with `--api-key`, `GOOGLE_VERTEX_API_KEY`, or `GOOGLE_API_KEY` (Get from [AI Studio](https://aistudio.google.com/api-keys))
+ - Service account credentials via `GOOGLE_APPLICATION_CREDENTIALS` (or other default `GoogleCredential` providers) together with project metadata.
+- When targeting Vertex AI with a service account, the following settings apply (optionally via command options):
+ - `GOOGLE_PROJECT_ID`
+ - `GOOGLE_VERTEX_LOCATION` (defaults to `us-central1`)
+- Model selection is controlled with `--model` or `GOOGLE_VERTEX_EMBEDDING_MODEL` and defaults to `gemini-embedding-001`.
+
+### Usage
+
+Restore and build the demo project:
+
+```bash
+dotnet build LiteDB.Demo.Tools.VectorSearch.csproj -c Release
+```
+
+Index a folder of `.txt`/`.md` files (API key example):
+
+```bash
+dotnet run --project LiteDB.Demo.Tools.VectorSearch.csproj -- ingest --source ./docs --database vector.db --api-key "$env:GOOGLE_VERTEX_API_KEY"
+```
+
+Run a semantic search over the ingested content (Vertex AI example):
+
+```bash
+dotnet run --project LiteDB.Demo.Tools.VectorSearch.csproj -- search --database vector.db --query "Explain document storage guarantees"
+```
+
+Use `--help` on either command to list all supported options (preview length, pruning behaviour, auth mode, custom model identifiers, etc.).
+
+## License
+
+[MIT](http://opensource.org/licenses/MIT)
\ No newline at end of file
diff --git a/LiteDB.Demo.Tools.VectorSearch/Services/DocumentStore.cs b/LiteDB.Demo.Tools.VectorSearch/Services/DocumentStore.cs
new file mode 100644
index 000000000..db9db0c0e
--- /dev/null
+++ b/LiteDB.Demo.Tools.VectorSearch/Services/DocumentStore.cs
@@ -0,0 +1,156 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using LiteDB;
+using LiteDB.Demo.Tools.VectorSearch.Models;
+using LiteDB.Vector;
+
+namespace LiteDB.Demo.Tools.VectorSearch.Services
+{
+ internal sealed class DocumentStore : IDisposable
+ {
+ private const string DocumentCollectionName = "documents";
+ private const string ChunkCollectionName = "chunks";
+
+ private readonly LiteDatabase _database;
+ private readonly ILiteCollection _documents;
+ private readonly ILiteCollection _chunks;
+ private ushort? _chunkVectorDimensions;
+
+ public DocumentStore(string databasePath)
+ {
+ if (string.IsNullOrWhiteSpace(databasePath))
+ {
+ throw new ArgumentException("Database path must be provided.", nameof(databasePath));
+ }
+
+ var fullPath = Path.GetFullPath(databasePath);
+ _database = new LiteDatabase(fullPath);
+ _documents = _database.GetCollection(DocumentCollectionName);
+ _documents.EnsureIndex(x => x.Path, true);
+
+ _chunks = _database.GetCollection(ChunkCollectionName);
+ _chunks.EnsureIndex(x => x.Path);
+ _chunks.EnsureIndex(x => x.ChunkIndex);
+ }
+
+ public IndexedDocument? FindByPath(string absolutePath)
+ {
+ if (string.IsNullOrWhiteSpace(absolutePath))
+ {
+ return null;
+ }
+
+ return _documents.FindOne(x => x.Path == absolutePath);
+ }
+
+ public void EnsureChunkVectorIndex(int dimensions)
+ {
+ if (dimensions <= 0)
+ {
+ throw new ArgumentOutOfRangeException(nameof(dimensions), dimensions, "Vector dimensions must be positive.");
+ }
+
+ var targetDimensions = (ushort)dimensions;
+ if (_chunkVectorDimensions == targetDimensions)
+ {
+ return;
+ }
+
+ _chunks.EnsureIndex(x => x.Embedding, new VectorIndexOptions(targetDimensions, VectorDistanceMetric.Cosine));
+ _chunkVectorDimensions = targetDimensions;
+ }
+
+ public void Upsert(IndexedDocument document)
+ {
+ if (document == null)
+ {
+ throw new ArgumentNullException(nameof(document));
+ }
+
+ _documents.Upsert(document);
+ }
+
+ public void ReplaceDocumentChunks(string documentPath, IEnumerable chunks)
+ {
+ if (string.IsNullOrWhiteSpace(documentPath))
+ {
+ throw new ArgumentException("Document path must be provided.", nameof(documentPath));
+ }
+
+ _chunks.DeleteMany(chunk => chunk.Path == documentPath);
+
+ if (chunks == null)
+ {
+ return;
+ }
+
+ foreach (var chunk in chunks)
+ {
+ if (chunk == null)
+ {
+ continue;
+ }
+
+ chunk.Path = documentPath;
+
+ if (chunk.Id == ObjectId.Empty)
+ {
+ chunk.Id = ObjectId.NewObjectId();
+ }
+
+ _chunks.Insert(chunk);
+ }
+ }
+
+ public IEnumerable TopNearestChunks(float[] embedding, int count)
+ {
+ if (embedding == null)
+ {
+ throw new ArgumentNullException(nameof(embedding));
+ }
+
+ if (count <= 0)
+ {
+ throw new ArgumentOutOfRangeException(nameof(count), count, "Quantity must be positive.");
+ }
+
+ EnsureChunkVectorIndex(embedding.Length);
+
+ return _chunks.Query()
+ .TopKNear(x => x.Embedding, embedding, count)
+ .ToEnumerable();
+ }
+
+ public IReadOnlyCollection GetTrackedPaths()
+ {
+ return _documents.FindAll()
+ .Select(doc => doc.Path)
+ .Where(path => !string.IsNullOrWhiteSpace(path))
+ .Distinct(StringComparer.OrdinalIgnoreCase)
+ .ToArray();
+ }
+
+ public void RemoveMissingDocuments(IEnumerable existingDocumentPaths)
+ {
+ if (existingDocumentPaths == null)
+ {
+ return;
+ }
+
+ var keep = new HashSet(existingDocumentPaths, StringComparer.OrdinalIgnoreCase);
+
+ foreach (var doc in _documents.FindAll().Where(doc => !keep.Contains(doc.Path)))
+ {
+ _documents.Delete(doc.Id);
+ _chunks.DeleteMany(chunk => chunk.Path == doc.Path);
+ }
+ }
+
+ public void Dispose()
+ {
+ _database.Dispose();
+ }
+ }
+}
diff --git a/LiteDB.Demo.Tools.VectorSearch/Utilities/TextUtilities.cs b/LiteDB.Demo.Tools.VectorSearch/Utilities/TextUtilities.cs
new file mode 100644
index 000000000..1ead98ed9
--- /dev/null
+++ b/LiteDB.Demo.Tools.VectorSearch/Utilities/TextUtilities.cs
@@ -0,0 +1,176 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Security.Cryptography;
+using System.Text;
+
+namespace LiteDB.Demo.Tools.VectorSearch.Utilities
+{
+ internal static class TextUtilities
+ {
+ private static readonly HashSet _supportedExtensions = new(StringComparer.OrdinalIgnoreCase)
+ {
+ ".txt",
+ ".md",
+ ".markdown",
+ ".mdown"
+ };
+
+ private static readonly char[] _chunkBreakCharacters = { '\n', ' ', '\t' };
+
+ public static bool IsSupportedDocument(string path)
+ {
+ if (string.IsNullOrWhiteSpace(path))
+ {
+ return false;
+ }
+
+ var extension = Path.GetExtension(path);
+ return !string.IsNullOrEmpty(extension) && _supportedExtensions.Contains(extension);
+ }
+
+ public static string ReadDocument(string path)
+ {
+ return File.ReadAllText(path);
+ }
+
+ public static string NormalizeForEmbedding(string content, int maxLength)
+ {
+ if (string.IsNullOrWhiteSpace(content))
+ {
+ return string.Empty;
+ }
+
+ if (maxLength <= 0)
+ {
+ return string.Empty;
+ }
+
+ var normalized = content.Replace("\r\n", "\n", StringComparison.Ordinal)
+ .Replace('\r', '\n')
+ .Trim();
+
+ if (normalized.Length <= maxLength)
+ {
+ return normalized;
+ }
+
+ return normalized[..maxLength];
+ }
+
+ public static string BuildPreview(string content, int maxLength)
+ {
+ if (string.IsNullOrWhiteSpace(content) || maxLength <= 0)
+ {
+ return string.Empty;
+ }
+
+ var collapsed = new StringBuilder(Math.Min(content.Length, maxLength));
+ var previousWhitespace = false;
+
+ foreach (var ch in content)
+ {
+ if (char.IsControl(ch) && ch != '\n' && ch != '\t')
+ {
+ continue;
+ }
+
+ if (char.IsWhiteSpace(ch))
+ {
+ if (!previousWhitespace)
+ {
+ collapsed.Append(' ');
+ }
+
+ previousWhitespace = true;
+ }
+ else
+ {
+ previousWhitespace = false;
+ collapsed.Append(ch);
+ }
+
+ if (collapsed.Length >= maxLength)
+ {
+ break;
+ }
+ }
+
+ var preview = collapsed.ToString().Trim();
+ return preview.Length <= maxLength ? preview : preview[..maxLength];
+ }
+
+ public static string ComputeContentHash(string content)
+ {
+ if (content == null)
+ {
+ return string.Empty;
+ }
+
+ using var sha256 = SHA256.Create();
+ var bytes = Encoding.UTF8.GetBytes(content);
+ var hash = sha256.ComputeHash(bytes);
+
+ return Convert.ToHexString(hash);
+ }
+
+ public static IEnumerable SplitIntoChunks(string content, int chunkLength, int chunkOverlap)
+ {
+ if (string.IsNullOrWhiteSpace(content))
+ {
+ yield break;
+ }
+
+ if (chunkLength <= 0)
+ {
+ yield break;
+ }
+
+ if (chunkOverlap < 0 || chunkOverlap >= chunkLength)
+ {
+ throw new ArgumentOutOfRangeException(nameof(chunkOverlap), chunkOverlap, "Chunk overlap must be non-negative and smaller than the chunk length.");
+ }
+
+ var normalized = content.Replace("\r\n", "\n", StringComparison.Ordinal)
+ .Replace('\r', '\n');
+
+ var step = chunkLength - chunkOverlap;
+ var position = 0;
+
+ if (step <= 0)
+ {
+ yield break;
+ }
+
+ while (position < normalized.Length)
+ {
+ var remaining = normalized.Length - position;
+ var take = Math.Min(chunkLength, remaining);
+ var window = normalized.Substring(position, take);
+
+ if (take == chunkLength && position + take < normalized.Length)
+ {
+ var lastBreak = window.LastIndexOfAny(_chunkBreakCharacters);
+ if (lastBreak >= step)
+ {
+ window = window[..lastBreak];
+ take = window.Length;
+ }
+ }
+
+ var chunk = window.Trim();
+ if (!string.IsNullOrWhiteSpace(chunk))
+ {
+ yield return chunk;
+ }
+
+ if (position + take >= normalized.Length)
+ {
+ yield break;
+ }
+
+ position += step;
+ }
+ }
+ }
+}
diff --git a/LiteDB.Demo.Tools.VectorSearch/Utilities/VectorMath.cs b/LiteDB.Demo.Tools.VectorSearch/Utilities/VectorMath.cs
new file mode 100644
index 000000000..b3609896f
--- /dev/null
+++ b/LiteDB.Demo.Tools.VectorSearch/Utilities/VectorMath.cs
@@ -0,0 +1,50 @@
+using System;
+using System.Collections.Generic;
+
+namespace LiteDB.Demo.Tools.VectorSearch.Utilities
+{
+ internal static class VectorMath
+ {
+ public static double CosineSimilarity(IReadOnlyList left, IReadOnlyList right)
+ {
+ if (left == null || right == null)
+ {
+ return 0d;
+ }
+
+ var length = Math.Min(left.Count, right.Count);
+
+ if (length == 0)
+ {
+ return 0d;
+ }
+
+ double dot = 0d;
+ double leftMagnitude = 0d;
+ double rightMagnitude = 0d;
+
+ for (var i = 0; i < length; i++)
+ {
+ var l = left[i];
+ var r = right[i];
+
+ dot += l * r;
+ leftMagnitude += l * l;
+ rightMagnitude += r * r;
+ }
+
+ if (leftMagnitude <= double.Epsilon || rightMagnitude <= double.Epsilon)
+ {
+ return 0d;
+ }
+
+ return dot / (Math.Sqrt(leftMagnitude) * Math.Sqrt(rightMagnitude));
+ }
+
+ public static double CosineDistance(IReadOnlyList left, IReadOnlyList right)
+ {
+ var similarity = CosineSimilarity(left, right);
+ return 1d - similarity;
+ }
+ }
+}
diff --git a/LiteDB.Tests/BsonValue/BsonVector_Tests.cs b/LiteDB.Tests/BsonValue/BsonVector_Tests.cs
new file mode 100644
index 000000000..19dd0916a
--- /dev/null
+++ b/LiteDB.Tests/BsonValue/BsonVector_Tests.cs
@@ -0,0 +1,323 @@
+using System.Collections.Generic;
+using System.Linq;
+using FluentAssertions;
+using LiteDB.Vector;
+using Xunit;
+
+namespace LiteDB.Tests.BsonValue_Types;
+
+public class BsonVector_Tests
+{
+
+ private static readonly Collation _collation = Collation.Binary;
+ private static readonly BsonDocument _root = new BsonDocument();
+
+ [Fact]
+ public void BsonVector_RoundTrip_Success()
+ {
+ var original = new BsonDocument
+ {
+ ["vec"] = new BsonVector(new float[] { 1.0f, 2.5f, -3.75f })
+ };
+
+ var bytes = BsonSerializer.Serialize(original);
+ var deserialized = BsonSerializer.Deserialize(bytes);
+
+ var vec = deserialized["vec"].AsVector;
+ Assert.Equal(3, vec.Length);
+ Assert.Equal(1.0f, vec[0]);
+ Assert.Equal(2.5f, vec[1]);
+ Assert.Equal(-3.75f, vec[2]);
+ }
+
+ [Fact]
+ public void BsonVector_RoundTrip_UInt16Limit()
+ {
+ var values = Enumerable.Range(0, ushort.MaxValue).Select(i => (float)(i % 32)).ToArray();
+
+ var original = new BsonDocument
+ {
+ ["vec"] = new BsonVector(values)
+ };
+
+ var bytes = BsonSerializer.Serialize(original);
+ var deserialized = BsonSerializer.Deserialize(bytes);
+
+ deserialized["vec"].AsVector.Should().Equal(values);
+ }
+
+ private class VectorDoc
+ {
+ public int Id { get; set; }
+ public float[] Embedding { get; set; }
+ }
+
+ [Fact]
+ public void VectorSim_Query_ReturnsExpectedNearest()
+ {
+ using var db = new LiteDatabase(":memory:");
+ var col = db.GetCollection("vectors");
+
+ // Insert vectorized documents
+ col.Insert(new VectorDoc { Id = 1, Embedding = new float[] { 1.0f, 0.0f } });
+ col.Insert(new VectorDoc { Id = 2, Embedding = new float[] { 0.0f, 1.0f } });
+ col.Insert(new VectorDoc { Id = 3, Embedding = new float[] { 1.0f, 1.0f } });
+
+ // Create index on the embedding field (if applicable to your implementation)
+ col.EnsureIndex("Embedding", "Embedding");
+
+ // Query: Find vectors nearest to [1, 0]
+ var target = new float[] { 1.0f, 0.0f };
+ var results = col.Query()
+ .WhereNear(r => r.Embedding, [1.0f, 0.0f], maxDistance: .28)
+ .ToList();
+
+ results.Should().NotBeEmpty();
+ results.Select(x => x.Id).Should().Contain(1);
+ results.Select(x => x.Id).Should().NotContain(2);
+ results.Select(x => x.Id).Should().NotContain(3); // too far away
+ }
+
+ [Fact]
+ public void VectorSim_Query_WhereVectorSimilar_AppliesAlias()
+ {
+ using var db = new LiteDatabase(":memory:");
+ var col = db.GetCollection("vectors");
+
+ col.Insert(new VectorDoc { Id = 1, Embedding = new float[] { 1.0f, 0.0f } });
+ col.Insert(new VectorDoc { Id = 2, Embedding = new float[] { 0.0f, 1.0f } });
+ col.Insert(new VectorDoc { Id = 3, Embedding = new float[] { 1.0f, 1.0f } });
+
+ var target = new float[] { 1.0f, 0.0f };
+
+ var nearResults = col.Query()
+ .WhereNear(r => r.Embedding, target, maxDistance: .28)
+ .ToList()
+ .Select(r => r.Id)
+ .OrderBy(id => id)
+ .ToList();
+
+ var similarResults = col.Query()
+ .WhereNear(r => r.Embedding, target, maxDistance: .28)
+ .ToList()
+ .Select(r => r.Id)
+ .OrderBy(id => id)
+ .ToList();
+
+ similarResults.Should().Equal(nearResults);
+ }
+
+ [Fact]
+ public void VectorSim_Query_BsonExpressionOverload_ReturnsExpectedNearest()
+ {
+ using var db = new LiteDatabase(":memory:");
+ var col = db.GetCollection("vectors");
+
+ col.Insert(new VectorDoc { Id = 1, Embedding = new float[] { 1.0f, 0.0f } });
+ col.Insert(new VectorDoc { Id = 2, Embedding = new float[] { 0.0f, 1.0f } });
+ col.Insert(new VectorDoc { Id = 3, Embedding = new float[] { 1.0f, 1.0f } });
+
+ var target = new float[] { 1.0f, 0.0f };
+ var fieldExpr = BsonExpression.Create("$.Embedding");
+
+ var results = col.Query()
+ .WhereNear(fieldExpr, target, maxDistance: .28)
+ .ToList();
+
+ results.Select(x => x.Id).Should().ContainSingle(id => id == 1);
+ }
+
+ [Fact]
+ public void VectorSim_ExpressionQuery_WorksViaSQL()
+ {
+ using var db = new LiteDatabase(":memory:");
+ var col = db.GetCollection("vectors");
+
+ col.Insert(new BsonDocument
+ {
+ ["_id"] = 1,
+ ["Embedding"] = new BsonVector(new float[] { 1.0f, 0.0f })
+ });
+ col.Insert(new BsonDocument
+ {
+ ["_id"] = 2,
+ ["Embedding"] = new BsonVector(new float[] { 0.0f, 1.0f })
+ });
+ col.Insert(new BsonDocument
+ {
+ ["_id"] = 3,
+ ["Embedding"] = new BsonVector(new float[] { 1.0f, 1.0f })
+ });
+
+ var query = "SELECT * FROM vectors WHERE $.Embedding VECTOR_SIM [1.0, 0.0] <= 0.25";
+ var rawResults = db.Execute(query).ToList();
+
+ var docs = rawResults
+ .Where(r => r.IsDocument)
+ .SelectMany(r =>
+ {
+ var doc = r.AsDocument;
+ if (doc.TryGetValue("expr", out var expr) && expr.IsArray)
+ {
+ return expr.AsArray
+ .Where(x => x.IsDocument)
+ .Select(x => x.AsDocument);
+ }
+
+ return new[] { doc };
+ })
+ .ToList();
+
+ docs.Select(d => d["_id"].AsInt32).Should().Contain(1);
+ docs.Select(d => d["_id"].AsInt32).Should().NotContain(2);
+ docs.Select(d => d["_id"].AsInt32).Should().NotContain(3); // cosine ~ 0.293
+ }
+
+ [Fact]
+ public void VectorSim_InfixExpression_ParsesAndEvaluates()
+ {
+ var expr = BsonExpression.Create("$.Embedding VECTOR_SIM [1.0, 0.0]");
+
+ expr.Type.Should().Be(BsonExpressionType.VectorSim);
+
+ var doc = new BsonDocument
+ {
+ ["Embedding"] = new BsonArray { 1.0, 0.0 }
+ };
+
+ var result = expr.ExecuteScalar(doc);
+
+ result.IsDouble.Should().BeTrue();
+ result.AsDouble.Should().BeApproximately(0.0, 1e-6);
+ }
+
+ [Fact]
+ public void VectorSim_FunctionCall_ParsesAndEvaluates()
+ {
+ var expr = BsonExpression.Create("VECTOR_SIM($.Embedding, [1.0, 0.0])");
+
+ expr.Type.Should().Be(BsonExpressionType.VectorSim);
+
+ var doc = new BsonDocument
+ {
+ ["Embedding"] = new BsonArray { 1.0, 0.0 }
+ };
+
+ var result = expr.ExecuteScalar(doc);
+
+ result.IsDouble.Should().BeTrue();
+ result.AsDouble.Should().BeApproximately(0.0, 1e-6);
+ }
+
+ [Fact]
+ public void VectorSim_ReturnsZero_ForIdenticalVectors()
+ {
+ var left = new BsonArray { 1.0, 0.0 };
+ var right = new BsonVector(new float[] { 1.0f, 0.0f });
+
+ var result = BsonExpressionMethods.VECTOR_SIM(left, right);
+
+ Assert.NotNull(result);
+ Assert.True(result.IsDouble);
+ Assert.Equal(0.0, result.AsDouble, 6); // Cosine distance = 0.0
+ }
+
+ [Fact]
+ public void VectorSim_ReturnsOne_ForOrthogonalVectors()
+ {
+ var left = new BsonArray { 1.0, 0.0 };
+ var right = new BsonVector(new float[] { 0.0f, 1.0f });
+
+ var result = BsonExpressionMethods.VECTOR_SIM(left, right);
+
+ Assert.NotNull(result);
+ Assert.True(result.IsDouble);
+ Assert.Equal(1.0, result.AsDouble, 6); // Cosine distance = 1.0
+ }
+
+ [Fact]
+ public void VectorSim_ReturnsNull_ForInvalidInput()
+ {
+ var left = new BsonArray { "a", "b" };
+ var right = new BsonVector(new float[] { 1.0f, 0.0f });
+
+ var result = BsonExpressionMethods.VECTOR_SIM(left, right);
+
+ Assert.True(result.IsNull);
+ }
+
+ [Fact]
+ public void VectorSim_ReturnsNull_ForMismatchedLengths()
+ {
+ var left = new BsonArray { 1.0, 2.0, 3.0 };
+ var right = new BsonVector(new float[] { 1.0f, 2.0f });
+
+ var result = BsonExpressionMethods.VECTOR_SIM(left, right);
+
+ Assert.True(result.IsNull);
+ }
+
+
+ [Fact]
+ public void VectorSim_TopK_ReturnsCorrectOrder()
+ {
+ using var db = new LiteDatabase(":memory:");
+ var col = db.GetCollection("vectors");
+
+ col.Insert(new VectorDoc { Id = 1, Embedding = new float[] { 1.0f, 0.0f } }); // sim = 0.0
+ col.Insert(new VectorDoc { Id = 2, Embedding = new float[] { 0.0f, 1.0f } }); // sim = 1.0
+ col.Insert(new VectorDoc { Id = 3, Embedding = new float[] { 1.0f, 1.0f } }); // sim ≈ 0.293
+
+ var target = new float[] { 1.0f, 0.0f };
+
+ var results = col.Query()
+ .TopKNear(x => x.Embedding, target, 2)
+ .ToList();
+
+ var ids = results.Select(r => r.Id).ToList();
+ ids.Should().BeEquivalentTo(new[] { 1, 3 }, options => options.WithStrictOrdering());
+ }
+
+ [Fact]
+ public void BsonVector_CompareTo_SortsLexicographically()
+ {
+ var values = new List
+ {
+ new BsonVector(new float[] { 1.0f }),
+ new BsonVector(new float[] { 0.0f, 2.0f }),
+ new BsonVector(new float[] { 0.0f, 1.0f, 0.5f }),
+ new BsonVector(new float[] { 0.0f, 1.0f })
+ };
+
+ values.Sort();
+
+ values.Should().Equal(
+ new BsonVector(new float[] { 0.0f, 1.0f }),
+ new BsonVector(new float[] { 0.0f, 1.0f, 0.5f }),
+ new BsonVector(new float[] { 0.0f, 2.0f }),
+ new BsonVector(new float[] { 1.0f }));
+ }
+
+ [Fact]
+ public void BsonVector_Index_OrderIsDeterministic()
+ {
+ using var db = new LiteDatabase(":memory:");
+ var col = db.GetCollection("vectors");
+
+ var docs = new[]
+ {
+ new VectorDoc { Id = 1, Embedding = new float[] { 0.0f, 1.0f } },
+ new VectorDoc { Id = 2, Embedding = new float[] { 0.0f, 1.0f, 0.5f } },
+ new VectorDoc { Id = 3, Embedding = new float[] { 0.0f, 2.0f } },
+ new VectorDoc { Id = 4, Embedding = new float[] { 1.0f } }
+ };
+
+ col.InsertBulk(docs);
+
+ col.EnsureIndex(x => x.Embedding);
+
+ var ordered = col.Query().OrderBy(x => x.Embedding).ToList();
+
+ ordered.Select(x => x.Id).Should().Equal(1, 2, 3, 4);
+ }
+}
diff --git a/LiteDB.Tests/Document/Decimal_Tests.cs b/LiteDB.Tests/Document/Decimal_Tests.cs
index c29d2a063..682712f40 100644
--- a/LiteDB.Tests/Document/Decimal_Tests.cs
+++ b/LiteDB.Tests/Document/Decimal_Tests.cs
@@ -10,8 +10,8 @@ public void BsonValue_New_Decimal_Type()
{
var d0 = 0m;
var d1 = 1m;
- var dmin = new BsonValue(decimal.MinValue);
- var dmax = new BsonValue(decimal.MaxValue);
+ var dmin = new LiteDB.BsonValue(decimal.MinValue);
+ var dmax = new LiteDB.BsonValue(decimal.MaxValue);
JsonSerializer.Serialize(d0).Should().Be("{\"$numberDecimal\":\"0\"}");
JsonSerializer.Serialize(d1).Should().Be("{\"$numberDecimal\":\"1\"}");
diff --git a/LiteDB.Tests/Document/Implicit_Tests.cs b/LiteDB.Tests/Document/Implicit_Tests.cs
index b81468e3c..51ce3b9a6 100644
--- a/LiteDB.Tests/Document/Implicit_Tests.cs
+++ b/LiteDB.Tests/Document/Implicit_Tests.cs
@@ -13,9 +13,9 @@ public void BsonValue_Implicit_Convert()
long l = long.MaxValue;
ulong u = ulong.MaxValue;
- BsonValue bi = i;
- BsonValue bl = l;
- BsonValue bu = u;
+ LiteDB.BsonValue bi = i;
+ LiteDB.BsonValue bl = l;
+ LiteDB.BsonValue bu = u;
bi.IsInt32.Should().BeTrue();
bl.IsInt64.Should().BeTrue();
@@ -35,7 +35,7 @@ public void BsonDocument_Inner()
customer["CreateDate"] = DateTime.Now;
customer["Phones"] = new BsonArray { "8000-0000", "9000-000" };
customer["IsActive"] = true;
- customer["IsAdmin"] = new BsonValue(true);
+ customer["IsAdmin"] = new LiteDB.BsonValue(true);
customer["Address"] = new BsonDocument
{
["Street"] = "Av. Protasio Alves"
diff --git a/LiteDB.Tests/Document/Json_Tests.cs b/LiteDB.Tests/Document/Json_Tests.cs
index 154e79532..2bf162caf 100644
--- a/LiteDB.Tests/Document/Json_Tests.cs
+++ b/LiteDB.Tests/Document/Json_Tests.cs
@@ -94,5 +94,18 @@ public void Json_DoubleNaN_Tests()
Assert.False(double.IsNegativeInfinity(bson["doubleNegativeInfinity"].AsDouble));
Assert.False(double.IsPositiveInfinity(bson["doublePositiveInfinity"].AsDouble));
}
+
+ [Fact]
+ public void Json_Writes_BsonVector_As_Array()
+ {
+ var document = new BsonDocument
+ {
+ ["Embedding"] = new BsonVector(new float[] { 1.0f, 2.5f, -3.75f })
+ };
+
+ var json = JsonSerializer.Serialize(document);
+
+ json.Should().Contain("\"Embedding\":[1.0,2.5,-3.75]");
+ }
}
}
\ No newline at end of file
diff --git a/LiteDB.Tests/Engine/DropCollection_Tests.cs b/LiteDB.Tests/Engine/DropCollection_Tests.cs
index 0d81ef06b..ded4d9435 100644
--- a/LiteDB.Tests/Engine/DropCollection_Tests.cs
+++ b/LiteDB.Tests/Engine/DropCollection_Tests.cs
@@ -1,12 +1,31 @@
-using System.Linq;
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Reflection;
using FluentAssertions;
+using LiteDB;
+using LiteDB.Engine;
using LiteDB.Tests.Utils;
+using LiteDB.Vector;
using Xunit;
namespace LiteDB.Tests.Engine
{
public class DropCollection_Tests
{
+ private class VectorDocument
+ {
+ public int Id { get; set; }
+
+ public float[] Embedding { get; set; }
+ }
+
+ private const string VectorIndexName = "embedding_idx";
+
+ private static readonly FieldInfo EngineField = typeof(LiteDatabase).GetField("_engine", BindingFlags.NonPublic | BindingFlags.Instance);
+ private static readonly MethodInfo AutoTransactionMethod = typeof(LiteEngine).GetMethod("AutoTransaction", BindingFlags.NonPublic | BindingFlags.Instance);
+
[Fact]
public void DropCollection()
{
@@ -16,7 +35,7 @@ public void DropCollection()
var col = db.GetCollection("col");
- col.Insert(new BsonDocument {["a"] = 1});
+ col.Insert(new BsonDocument { ["a"] = 1 });
db.GetCollectionNames().Should().Contain("col");
@@ -46,5 +65,285 @@ public void InsertDropCollection()
}
}
}
+
+ [Fact]
+ public void DropCollection_WithVectorIndex_ReclaimsPages_ByCounting()
+ {
+ using var file = new TempFile();
+
+ const ushort dimensions = 6;
+
+ using (var db = DatabaseFactory.Create(TestDatabaseType.Disk, file.Filename))
+ {
+ var collection = db.GetCollection("docs");
+
+ collection.EnsureIndex(
+ VectorIndexName,
+ BsonExpression.Create("$.embedding"),
+ new VectorIndexOptions(dimensions, VectorDistanceMetric.Cosine));
+
+ for (var i = 0; i < 8; i++)
+ {
+ var embedding = new BsonArray(Enumerable.Range(0, dimensions)
+ .Select(j => new BsonValue(i + (j * 0.1))));
+
+ collection.Insert(new BsonDocument
+ {
+ ["_id"] = i + 1,
+ ["embedding"] = embedding
+ });
+ }
+
+ db.Checkpoint();
+ }
+
+ var beforeCounts = CountPagesByType(file.Filename);
+ beforeCounts.TryGetValue(PageType.VectorIndex, out var vectorPagesBefore);
+ vectorPagesBefore.Should().BeGreaterThan(0, "creating a vector index should allocate vector pages");
+
+ var drop = () =>
+ {
+ using var db = DatabaseFactory.Create(TestDatabaseType.Disk, file.Filename);
+ db.DropCollection("docs");
+ db.Checkpoint();
+ };
+
+ drop.Should().NotThrow();
+
+ var afterCounts = CountPagesByType(file.Filename);
+ afterCounts.TryGetValue(PageType.VectorIndex, out var vectorPagesAfter);
+ vectorPagesAfter.Should().BeLessThan(vectorPagesBefore, "dropping the collection should reclaim vector pages");
+ }
+
+ [Fact]
+ public void DropCollection_WithVectorIndex_ReclaimsPages_SimpleVectors()
+ {
+ using var file = new TempFile();
+
+ using (var db = DatabaseFactory.Create(TestDatabaseType.Disk, file.Filename))
+ {
+ var collection = db.GetCollection("vectors");
+ var options = new VectorIndexOptions(8, VectorDistanceMetric.Cosine);
+
+ collection.Insert(new List
+ {
+ new VectorDocument { Id = 1, Embedding = new[] { 1f, 0.5f, -0.25f, 0.75f, 1.5f, -0.5f, 0.25f, -1f } },
+ new VectorDocument { Id = 2, Embedding = new[] { -0.5f, 0.25f, 0.75f, -1.5f, 1f, 0.5f, -0.25f, 0.125f } },
+ new VectorDocument { Id = 3, Embedding = new[] { 0.5f, -0.75f, 1.25f, 0.875f, -0.375f, 0.625f, -1.125f, 0.25f } }
+ });
+
+ collection.EnsureIndex(VectorIndexName, x => x.Embedding, options);
+
+ db.Checkpoint();
+
+ Action drop = () => db.DropCollection("vectors");
+
+ drop.Should().NotThrow(
+ "dropping a collection with vector indexes should release vector index pages instead of treating them like skip-list indexes");
+
+ db.Checkpoint();
+ }
+
+ using (var reopened = DatabaseFactory.Create(TestDatabaseType.Disk, file.Filename))
+ {
+ reopened.GetCollectionNames().Should().NotContain("vectors");
+ }
+ }
+
+ [Fact]
+ public void DropCollection_WithVectorIndex_ReclaimsTrackedPages()
+ {
+ using var file = new TempFile();
+
+ HashSet vectorPages;
+ HashSet vectorDataPages;
+
+ var dimensions = (DataService.MAX_DATA_BYTES_PER_PAGE / sizeof(float)) + 64;
+ dimensions.Should().BeLessThan(ushort.MaxValue);
+
+ using (var db = DatabaseFactory.Create(TestDatabaseType.Disk, file.Filename))
+ {
+ var collection = db.GetCollection("docs");
+ var documents = Enumerable.Range(1, 6)
+ .Select(i => new VectorDocument
+ {
+ Id = i,
+ Embedding = CreateLargeVector(i, dimensions)
+ })
+ .ToList();
+
+ collection.Insert(documents);
+
+ var indexOptions = new VectorIndexOptions((ushort)dimensions, VectorDistanceMetric.Euclidean);
+ collection.EnsureIndex(VectorIndexName, BsonExpression.Create("$.Embedding"), indexOptions);
+
+ (vectorPages, vectorDataPages) = CollectVectorPageUsage(db, "docs");
+
+ vectorPages.Should().NotBeEmpty();
+ vectorDataPages.Should().NotBeEmpty();
+
+ db.Checkpoint();
+ }
+
+ Action drop = () =>
+ {
+ using var db = DatabaseFactory.Create(TestDatabaseType.Disk, file.Filename);
+ db.DropCollection("docs");
+ db.Checkpoint();
+ };
+
+ drop.Should().NotThrow();
+
+ using (var db = DatabaseFactory.Create(TestDatabaseType.Disk, file.Filename))
+ {
+ var vectorPageTypes = GetPageTypes(db, vectorPages);
+ foreach (var kvp in vectorPageTypes)
+ {
+ kvp.Value.Should().Be(PageType.Empty, $"vector index page {kvp.Key} should be reclaimed after dropping the collection");
+ }
+
+ var dataPageTypes = GetPageTypes(db, vectorDataPages);
+ foreach (var kvp in dataPageTypes)
+ {
+ kvp.Value.Should().Be(PageType.Empty, $"vector data page {kvp.Key} should be reclaimed after dropping the collection");
+ }
+
+ db.GetCollectionNames().Should().NotContain("docs");
+ }
+ }
+
+ private static Dictionary CountPagesByType(string filename)
+ {
+ var counts = new Dictionary();
+ var buffer = new byte[Constants.PAGE_SIZE];
+
+ using var stream = File.OpenRead(filename);
+
+ while (stream.Read(buffer, 0, buffer.Length) == buffer.Length)
+ {
+ var pageType = (PageType)buffer[BasePage.P_PAGE_TYPE];
+ counts.TryGetValue(pageType, out var current);
+ counts[pageType] = current + 1;
+ }
+
+ return counts;
+ }
+
+ private static T ExecuteInTransaction(LiteDatabase db, Func action)
+ {
+ var engine = (LiteEngine)EngineField.GetValue(db);
+ var method = AutoTransactionMethod.MakeGenericMethod(typeof(T));
+ return (T)method.Invoke(engine, new object[] { action });
+ }
+
+ private static T InspectVectorIndex(LiteDatabase db, string collection, Func selector)
+ {
+ return ExecuteInTransaction(db, transaction =>
+ {
+ var snapshot = transaction.CreateSnapshot(LockMode.Read, collection, false);
+ var metadata = snapshot.CollectionPage.GetVectorIndexMetadata(VectorIndexName);
+
+ if (metadata == null)
+ {
+ return default;
+ }
+
+ return selector(snapshot, metadata);
+ });
+ }
+
+ private static (HashSet VectorPages, HashSet DataPages) CollectVectorPageUsage(LiteDatabase db, string collection)
+ {
+ var (vectorPages, dataPages) = InspectVectorIndex(db, collection, (snapshot, metadata) =>
+ {
+ var trackedVectorPages = new HashSet();
+ var trackedDataPages = new HashSet();
+
+ if (metadata.Root.IsEmpty)
+ {
+ return (trackedVectorPages, trackedDataPages);
+ }
+
+ var queue = new Queue();
+ var visited = new HashSet();
+ queue.Enqueue(metadata.Root);
+
+ while (queue.Count > 0)
+ {
+ var address = queue.Dequeue();
+ if (!visited.Add(address))
+ {
+ continue;
+ }
+
+ var page = snapshot.GetPage(address.PageID);
+ var node = page.GetNode(address.Index);
+
+ trackedVectorPages.Add(page.PageID);
+
+ for (var level = 0; level < node.LevelCount; level++)
+ {
+ foreach (var neighbor in node.GetNeighbors(level))
+ {
+ if (!neighbor.IsEmpty)
+ {
+ queue.Enqueue(neighbor);
+ }
+ }
+ }
+
+ if (!node.HasInlineVector)
+ {
+ var block = node.ExternalVector;
+ while (!block.IsEmpty)
+ {
+ trackedDataPages.Add(block.PageID);
+
+ var dataPage = snapshot.GetPage(block.PageID);
+ var dataBlock = dataPage.GetBlock(block.Index);
+ block = dataBlock.NextBlock;
+ }
+ }
+ }
+
+ return (trackedVectorPages, trackedDataPages);
+ });
+
+ if (vectorPages == null || dataPages == null)
+ {
+ return (new HashSet(), new HashSet());
+ }
+
+ return (vectorPages, dataPages);
+ }
+
+ private static Dictionary GetPageTypes(LiteDatabase db, IEnumerable pageIds)
+ {
+ return ExecuteInTransaction(db, transaction =>
+ {
+ var snapshot = transaction.CreateSnapshot(LockMode.Read, "$", false);
+ var map = new Dictionary();
+
+ foreach (var pageID in pageIds.Distinct())
+ {
+ var page = snapshot.GetPage(pageID);
+ map[pageID] = page.PageType;
+ }
+
+ return map;
+ });
+ }
+
+ private static float[] CreateLargeVector(int seed, int dimensions)
+ {
+ var vector = new float[dimensions];
+
+ for (var i = 0; i < dimensions; i++)
+ {
+ vector[i] = (float)Math.Sin((seed * 0.37) + (i * 0.11));
+ }
+
+ return vector;
+ }
}
-}
\ No newline at end of file
+}
diff --git a/LiteDB.Tests/LiteDB.Tests.csproj b/LiteDB.Tests/LiteDB.Tests.csproj
index 2c0dec1d3..75e9ef4cf 100644
--- a/LiteDB.Tests/LiteDB.Tests.csproj
+++ b/LiteDB.Tests/LiteDB.Tests.csproj
@@ -11,11 +11,14 @@
1701;1702;1705;1591;0618
-
-
- PreserveNewest
-
-
+
+
+ PreserveNewest
+
+
+ PreserveNewest
+
+
@@ -27,8 +30,9 @@
-
-
+
+
+
all
diff --git a/LiteDB.Tests/Query/VectorExtensionSurface_Tests.cs b/LiteDB.Tests/Query/VectorExtensionSurface_Tests.cs
new file mode 100644
index 000000000..80af3bad8
--- /dev/null
+++ b/LiteDB.Tests/Query/VectorExtensionSurface_Tests.cs
@@ -0,0 +1,52 @@
+using FluentAssertions;
+using LiteDB;
+using LiteDB.Vector;
+using Xunit;
+
+namespace LiteDB.Tests.QueryTest
+{
+ public class VectorExtensionSurface_Tests
+ {
+ private class VectorDocument
+ {
+ public int Id { get; set; }
+
+ public float[] Embedding { get; set; }
+ }
+
+ [Fact]
+ public void Collection_Extension_Produces_Vector_Index_Plan()
+ {
+ using var db = new LiteDatabase(":memory:");
+ var collection = db.GetCollection("vectors");
+
+ collection.Insert(new VectorDocument { Id = 1, Embedding = new[] { 1f, 0f } });
+ collection.Insert(new VectorDocument { Id = 2, Embedding = new[] { 0f, 1f } });
+
+ collection.EnsureIndex(x => x.Embedding, new VectorIndexOptions(2));
+
+ var plan = collection.Query()
+ .WhereNear(x => x.Embedding, new[] { 1f, 0f }, maxDistance: 0.25)
+ .GetPlan();
+
+ plan["index"]["mode"].AsString.Should().Be("VECTOR INDEX SEARCH");
+ plan["index"]["expr"].AsString.Should().Be("$.Embedding");
+ }
+
+ [Fact]
+ public void Repository_Extension_Delegates_To_Vector_Index_Implementation()
+ {
+ using var db = new LiteDatabase(":memory:");
+ ILiteRepository repository = new LiteRepository(db);
+
+ repository.EnsureIndex(x => x.Embedding, new VectorIndexOptions(2));
+
+ var plan = repository.Query()
+ .WhereNear(x => x.Embedding, new[] { 1f, 0f }, maxDistance: 0.25)
+ .GetPlan();
+
+ plan["index"]["mode"].AsString.Should().Be("VECTOR INDEX SEARCH");
+ plan["index"]["expr"].AsString.Should().Be("$.Embedding");
+ }
+ }
+}
diff --git a/LiteDB.Tests/Query/VectorIndex_Tests.cs b/LiteDB.Tests/Query/VectorIndex_Tests.cs
new file mode 100644
index 000000000..ca5b61238
--- /dev/null
+++ b/LiteDB.Tests/Query/VectorIndex_Tests.cs
@@ -0,0 +1,977 @@
+using FluentAssertions;
+using LiteDB;
+using LiteDB.Engine;
+using LiteDB.Tests;
+using LiteDB.Vector;
+using MathNet.Numerics.LinearAlgebra;
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Reflection;
+using System.Text.Json;
+using Xunit;
+using LiteDB.Tests.Utils;
+
+namespace LiteDB.Tests.QueryTest
+{
+ public class VectorIndex_Tests
+ {
+ private class VectorDocument
+ {
+ public int Id { get; set; }
+ public float[] Embedding { get; set; }
+ public bool Flag { get; set; }
+ }
+
+ private static readonly FieldInfo EngineField = typeof(LiteDatabase).GetField("_engine", BindingFlags.NonPublic | BindingFlags.Instance);
+ private static readonly FieldInfo HeaderField = typeof(LiteEngine).GetField("_header", BindingFlags.NonPublic | BindingFlags.Instance);
+ private static readonly MethodInfo AutoTransactionMethod = typeof(LiteEngine).GetMethod("AutoTransaction", BindingFlags.NonPublic | BindingFlags.Instance);
+ private static readonly MethodInfo ReadExternalVectorMethod = typeof(VectorIndexService).GetMethod("ReadExternalVector", BindingFlags.NonPublic | BindingFlags.Instance);
+
+ private static T InspectVectorIndex(LiteDatabase db, string collection, Func selector)
+ {
+ var engine = (LiteEngine)EngineField.GetValue(db);
+ var header = (HeaderPage)HeaderField.GetValue(engine);
+ var collation = header.Pragmas.Collation;
+ var method = AutoTransactionMethod.MakeGenericMethod(typeof(T));
+
+ return (T)method.Invoke(engine, new object[]
+ {
+ new Func(transaction =>
+ {
+ var snapshot = transaction.CreateSnapshot(LockMode.Read, collection, false);
+ var metadata = snapshot.CollectionPage.GetVectorIndexMetadata("embedding_idx");
+
+ return metadata == null ? default : selector(snapshot, collation, metadata);
+ })
+ });
+ }
+
+ private static int CountNodes(Snapshot snapshot, PageAddress root)
+ {
+ if (root.IsEmpty)
+ {
+ return 0;
+ }
+
+ var visited = new HashSet();
+ var queue = new Queue();
+ queue.Enqueue(root);
+
+ var count = 0;
+
+ while (queue.Count > 0)
+ {
+ var address = queue.Dequeue();
+ if (!visited.Add(address))
+ {
+ continue;
+ }
+
+ var node = snapshot.GetPage(address.PageID).GetNode(address.Index);
+ count++;
+
+ for (var level = 0; level < node.LevelCount; level++)
+ {
+ foreach (var neighbor in node.GetNeighbors(level))
+ {
+ if (!neighbor.IsEmpty)
+ {
+ queue.Enqueue(neighbor);
+ }
+ }
+ }
+ }
+
+ return count;
+ }
+
+ private static float[] CreateVector(Random random, int dimensions)
+ {
+ var vector = new float[dimensions];
+ var hasNonZero = false;
+
+ for (var i = 0; i < dimensions; i++)
+ {
+ var value = (float)(random.NextDouble() * 2d - 1d);
+ vector[i] = value;
+
+ if (!hasNonZero && Math.Abs(value) > 1e-6f)
+ {
+ hasNonZero = true;
+ }
+ }
+
+ if (!hasNonZero)
+ {
+ vector[random.Next(dimensions)] = 1f;
+ }
+
+ return vector;
+ }
+
+ private static float[] ReadExternalVector(DataService dataService, PageAddress start, int dimensions, out int blocksRead)
+ {
+ var totalBytes = dimensions * sizeof(float);
+ var bytesCopied = 0;
+ var vector = new float[dimensions];
+ blocksRead = 0;
+
+ foreach (var slice in dataService.Read(start))
+ {
+ blocksRead++;
+
+ if (bytesCopied >= totalBytes)
+ {
+ break;
+ }
+
+ var available = Math.Min(slice.Count, totalBytes - bytesCopied);
+ Buffer.BlockCopy(slice.Array, slice.Offset, vector, bytesCopied, available);
+ bytesCopied += available;
+ }
+
+ if (bytesCopied != totalBytes)
+ {
+ throw new InvalidOperationException("Vector data block is incomplete.");
+ }
+
+ return vector;
+ }
+
+ private static (double Distance, double Similarity) ComputeReferenceMetrics(float[] candidate, float[] target, VectorDistanceMetric metric)
+ {
+ var builder = Vector.Build;
+ var candidateVector = builder.DenseOfEnumerable(candidate.Select(v => (double)v));
+ var targetVector = builder.DenseOfEnumerable(target.Select(v => (double)v));
+
+ switch (metric)
+ {
+ case VectorDistanceMetric.Cosine:
+ var candidateNorm = candidateVector.L2Norm();
+ var targetNorm = targetVector.L2Norm();
+
+ if (candidateNorm == 0d || targetNorm == 0d)
+ {
+ return (double.NaN, double.NaN);
+ }
+
+ var cosineSimilarity = candidateVector.DotProduct(targetVector) / (candidateNorm * targetNorm);
+ return (1d - cosineSimilarity, double.NaN);
+
+ case VectorDistanceMetric.Euclidean:
+ return ((candidateVector - targetVector).L2Norm(), double.NaN);
+
+ case VectorDistanceMetric.DotProduct:
+ var dot = candidateVector.DotProduct(targetVector);
+ return (-dot, dot);
+
+ default:
+ throw new ArgumentOutOfRangeException(nameof(metric), metric, null);
+ }
+ }
+
+ private static List<(int Id, double Distance, double Similarity)> ComputeExpectedRanking(
+ IEnumerable documents,
+ float[] target,
+ VectorDistanceMetric metric,
+ int? limit = null)
+ {
+ var ordered = documents
+ .Select(doc =>
+ {
+ var (distance, similarity) = ComputeReferenceMetrics(doc.Embedding, target, metric);
+ return (doc.Id, Distance: distance, Similarity: similarity);
+ })
+ .Where(result => metric == VectorDistanceMetric.DotProduct
+ ? !double.IsNaN(result.Similarity)
+ : !double.IsNaN(result.Distance))
+ .OrderBy(result => metric == VectorDistanceMetric.DotProduct ? -result.Similarity : result.Distance)
+ .ThenBy(result => result.Id)
+ .ToList();
+
+ if (limit.HasValue)
+ {
+ ordered = ordered.Take(limit.Value).ToList();
+ }
+
+ return ordered;
+ }
+
+
+
+
+ [Fact]
+ public void EnsureVectorIndex_CreatesAndReuses()
+ {
+ using var db = new LiteDatabase(":memory:");
+ var collection = db.GetCollection("vectors");
+
+ collection.Insert(new[]
+ {
+ new VectorDocument { Id = 1, Embedding = new[] { 1f, 0f }, Flag = true },
+ new VectorDocument { Id = 2, Embedding = new[] { 0f, 1f }, Flag = false }
+ });
+
+ var expression = BsonExpression.Create("$.Embedding");
+ var options = new VectorIndexOptions(2, VectorDistanceMetric.Cosine);
+
+ collection.EnsureIndex("embedding_idx", expression, options).Should().BeTrue();
+ collection.EnsureIndex("embedding_idx", expression, options).Should().BeFalse();
+
+ Action conflicting = () => collection.EnsureIndex("embedding_idx", expression, new VectorIndexOptions(2, VectorDistanceMetric.Euclidean));
+
+ conflicting.Should().Throw();
+ }
+
+ [Fact]
+ public void EnsureVectorIndex_PreservesEnumerableExpressionsForVectorIndexes()
+ {
+ using var db = new LiteDatabase(":memory:");
+ var collection = db.GetCollection("documents");
+
+ var resourcePath = Path.Combine(AppContext.BaseDirectory, "Resources", "ingest-20250922-234735.json");
+ var json = File.ReadAllText(resourcePath);
+
+ using var parsed = JsonDocument.Parse(json);
+ var embedding = parsed.RootElement
+ .GetProperty("Embedding")
+ .EnumerateArray()
+ .Select(static value => value.GetSingle())
+ .ToArray();
+
+ var options = new VectorIndexOptions((ushort)embedding.Length, VectorDistanceMetric.Cosine);
+
+ collection.EnsureIndex(x => x.Embedding, options);
+
+ var document = new VectorDocument
+ {
+ Id = 1,
+ Embedding = embedding,
+ Flag = false
+ };
+
+ Action act = () => collection.Upsert(document);
+
+ act.Should().NotThrow();
+
+ var stored = collection.FindById(1);
+
+ stored.Should().NotBeNull();
+ stored.Embedding.Should().Equal(embedding);
+
+ var storesInline = InspectVectorIndex(db, "documents", (snapshot, collation, metadata) =>
+ {
+ if (metadata.Root.IsEmpty)
+ {
+ return true;
+ }
+
+ var page = snapshot.GetPage(metadata.Root.PageID);
+ var node = page.GetNode(metadata.Root.Index);
+ return node.HasInlineVector;
+ });
+
+ storesInline.Should().BeFalse();
+ }
+
+ [Fact]
+ public void WhereNear_UsesVectorIndex_WhenAvailable()
+ {
+ using var db = new LiteDatabase(":memory:");
+ var collection = db.GetCollection("vectors");
+
+ collection.Insert(new[]
+ {
+ new VectorDocument { Id = 1, Embedding = new[] { 1f, 0f }, Flag = true },
+ new VectorDocument { Id = 2, Embedding = new[] { 0f, 1f }, Flag = false },
+ new VectorDocument { Id = 3, Embedding = new[] { 1f, 1f }, Flag = false }
+ });
+
+ collection.EnsureIndex("embedding_idx", BsonExpression.Create("$.Embedding"), new VectorIndexOptions(2, VectorDistanceMetric.Cosine));
+
+ var query = collection.Query()
+ .WhereNear(x => x.Embedding, new[] { 1f, 0f }, maxDistance: 0.25);
+
+ var plan = query.GetPlan();
+
+ plan["index"]["mode"].AsString.Should().Be("VECTOR INDEX SEARCH");
+ plan["index"]["expr"].AsString.Should().Be("$.Embedding");
+ plan.ContainsKey("filters").Should().BeFalse();
+
+ var results = query.ToArray();
+
+ results.Select(x => x.Id).Should().Equal(new[] { 1 });
+ }
+
+ [Fact]
+ public void WhereNear_FallsBack_WhenNoVectorIndexExists()
+ {
+ using var db = new LiteDatabase(":memory:");
+ var collection = db.GetCollection("vectors");
+
+ collection.Insert(new[]
+ {
+ new VectorDocument { Id = 1, Embedding = new[] { 1f, 0f }, Flag = true },
+ new VectorDocument { Id = 2, Embedding = new[] { 0f, 1f }, Flag = false }
+ });
+
+ var query = collection.Query()
+ .WhereNear(x => x.Embedding, new[] { 1f, 0f }, maxDistance: 0.25);
+
+ var plan = query.GetPlan();
+
+ plan["index"]["mode"].AsString.Should().StartWith("FULL INDEX SCAN");
+ plan["index"]["name"].AsString.Should().Be("_id");
+ plan["filters"].AsArray.Count.Should().Be(1);
+
+ var results = query.ToArray();
+
+ results.Select(x => x.Id).Should().Equal(new[] { 1 });
+ }
+
+ [Fact]
+ public void WhereNear_FallsBack_WhenDimensionMismatch()
+ {
+ using var db = new LiteDatabase(":memory:");
+ var collection = db.GetCollection("vectors");
+
+ collection.Insert(new[]
+ {
+ new VectorDocument { Id = 1, Embedding = new[] { 1f, 0f, 0f }, Flag = true },
+ new VectorDocument { Id = 2, Embedding = new[] { 0f, 1f, 0f }, Flag = false }
+ });
+
+ collection.EnsureIndex("embedding_idx", BsonExpression.Create("$.Embedding"), new VectorIndexOptions(3, VectorDistanceMetric.Cosine));
+
+ var query = collection.Query()
+ .WhereNear(x => x.Embedding, new[] { 1f, 0f }, maxDistance: 0.25);
+
+ var plan = query.GetPlan();
+
+ plan["index"]["mode"].AsString.Should().StartWith("FULL INDEX SCAN");
+ plan["index"]["name"].AsString.Should().Be("_id");
+
+ query.ToArray();
+ }
+
+ [Fact]
+ public void TopKNear_UsesVectorIndex()
+ {
+ using var db = new LiteDatabase(":memory:");
+ var collection = db.GetCollection("vectors");
+
+ collection.Insert(new[]
+ {
+ new VectorDocument { Id = 1, Embedding = new[] { 1f, 0f }, Flag = true },
+ new VectorDocument { Id = 2, Embedding = new[] { 0f, 1f }, Flag = false },
+ new VectorDocument { Id = 3, Embedding = new[] { 1f, 1f }, Flag = false }
+ });
+
+ collection.EnsureIndex("embedding_idx", BsonExpression.Create("$.Embedding"), new VectorIndexOptions(2, VectorDistanceMetric.Cosine));
+
+ var query = collection.Query()
+ .TopKNear(x => x.Embedding, new[] { 1f, 0f }, k: 2);
+
+ var plan = query.GetPlan();
+
+ plan["index"]["mode"].AsString.Should().Be("VECTOR INDEX SEARCH");
+ plan.ContainsKey("orderBy").Should().BeFalse();
+
+ var results = query.ToArray();
+
+ results.Select(x => x.Id).Should().Equal(new[] { 1, 3 });
+ }
+
+ [Fact]
+ public void OrderBy_VectorSimilarity_WithCompositeOrdering_UsesVectorIndex()
+ {
+ using var db = new LiteDatabase(":memory:");
+ var collection = db.GetCollection("vectors");
+
+ collection.Insert(new[]
+ {
+ new VectorDocument { Id = 1, Embedding = new[] { 1f, 0f }, Flag = true },
+ new VectorDocument { Id = 2, Embedding = new[] { 1f, 0f }, Flag = false },
+ new VectorDocument { Id = 3, Embedding = new[] { 0f, 1f }, Flag = true }
+ });
+
+ collection.EnsureIndex(
+ "embedding_idx",
+ BsonExpression.Create("$.Embedding"),
+ new VectorIndexOptions(2, VectorDistanceMetric.Cosine));
+
+ var similarity = BsonExpression.Create("VECTOR_SIM($.Embedding, [1.0, 0.0])");
+
+ var query = (LiteQueryable)collection.Query()
+ .OrderBy(similarity, Query.Ascending)
+ .ThenBy(x => x.Flag);
+
+ var queryField = typeof(LiteQueryable).GetField("_query", BindingFlags.NonPublic | BindingFlags.Instance);
+ var definition = (Query)queryField.GetValue(query);
+
+ definition.OrderBy.Should().HaveCount(2);
+ definition.OrderBy[0].Expression.Type.Should().Be(BsonExpressionType.VectorSim);
+
+ definition.VectorField = "$.Embedding";
+ definition.VectorTarget = new[] { 1f, 0f };
+ definition.VectorMaxDistance = double.MaxValue;
+
+ var plan = query.GetPlan();
+
+ plan["index"]["mode"].AsString.Should().Be("VECTOR INDEX SEARCH");
+ plan["index"]["expr"].AsString.Should().Be("$.Embedding");
+ plan.ContainsKey("orderBy").Should().BeFalse();
+
+ var results = query.ToArray();
+
+ results.Should().HaveCount(3);
+ results.Select(x => x.Id).Should().BeEquivalentTo(new[] { 1, 2, 3 });
+ }
+
+ [Fact]
+ public void WhereNear_DotProductHonorsMinimumSimilarity()
+ {
+ using var db = new LiteDatabase(":memory:");
+ var collection = db.GetCollection("vectors");
+
+ collection.Insert(new[]
+ {
+ new VectorDocument { Id = 1, Embedding = new[] { 1f, 0f } },
+ new VectorDocument { Id = 2, Embedding = new[] { 0.6f, 0.6f } },
+ new VectorDocument { Id = 3, Embedding = new[] { 0f, 1f } }
+ });
+
+ collection.EnsureIndex(
+ "embedding_idx",
+ BsonExpression.Create("$.Embedding"),
+ new VectorIndexOptions(2, VectorDistanceMetric.DotProduct));
+
+ var highThreshold = collection.Query()
+ .WhereNear(x => x.Embedding, new[] { 1f, 0f }, maxDistance: 0.75)
+ .ToArray();
+
+ highThreshold.Select(x => x.Id).Should().Equal(new[] { 1 });
+
+ var mediumThreshold = collection.Query()
+ .WhereNear(x => x.Embedding, new[] { 1f, 0f }, maxDistance: 0.4)
+ .ToArray();
+
+ mediumThreshold.Select(x => x.Id).Should().Equal(new[] { 1, 2 });
+ }
+
+ [Fact]
+ public void VectorIndex_Search_Prunes_Node_Visits()
+ {
+ using var db = new LiteDatabase(":memory:");
+ var collection = db.GetCollection("vectors");
+
+ const int nearClusterSize = 64;
+ const int farClusterSize = 64;
+
+ var documents = new List();
+
+ for (var i = 0; i < nearClusterSize; i++)
+ {
+ documents.Add(new VectorDocument
+ {
+ Id = i + 1,
+ Embedding = new[] { 1f, i / 100f },
+ Flag = true
+ });
+ }
+
+ for (var i = 0; i < farClusterSize; i++)
+ {
+ documents.Add(new VectorDocument
+ {
+ Id = i + nearClusterSize + 1,
+ Embedding = new[] { -1f, 2f + i / 100f },
+ Flag = false
+ });
+ }
+
+ collection.Insert(documents);
+ collection.Count().Should().Be(documents.Count);
+
+ collection.EnsureIndex(
+ "embedding_idx",
+ BsonExpression.Create("$.Embedding"),
+ new VectorIndexOptions(2, VectorDistanceMetric.Euclidean));
+
+ var stats = InspectVectorIndex(
+ db,
+ "vectors",
+ (snapshot, collation, metadata) =>
+ {
+ var service = new VectorIndexService(snapshot, collation);
+ var matches = service.Search(metadata, new[] { 1f, 0f }, maxDistance: 0.25, limit: 5).ToList();
+ var total = CountNodes(snapshot, metadata.Root);
+
+ return (Visited: service.LastVisitedCount, Total: total, Matches: matches.Select(x => x.Document["Id"].AsInt32).ToArray());
+ });
+
+ stats.Total.Should().BeGreaterThan(stats.Visited);
+ stats.Total.Should().BeGreaterOrEqualTo(nearClusterSize);
+ stats.Matches.Should().OnlyContain(id => id <= nearClusterSize);
+ }
+
+ [Fact]
+ public void VectorIndex_PersistsNodes_WhenDocumentsChange()
+ {
+ using var db = new LiteDatabase(":memory:");
+ var collection = db.GetCollection("vectors");
+
+ collection.Insert(new[]
+ {
+ new VectorDocument { Id = 1, Embedding = new[] { 1f, 0f }, Flag = true },
+ new VectorDocument { Id = 2, Embedding = new[] { 0f, 1f }, Flag = false },
+ new VectorDocument { Id = 3, Embedding = new[] { 1f, 1f }, Flag = true }
+ });
+
+ collection.EnsureIndex("embedding_idx", BsonExpression.Create("$.Embedding"), new VectorIndexOptions(2, VectorDistanceMetric.Euclidean));
+
+ InspectVectorIndex(db, "vectors", (snapshot, collation, metadata) =>
+ {
+ metadata.Root.IsEmpty.Should().BeFalse();
+
+ var service = new VectorIndexService(snapshot, collation);
+ var target = new[] { 1f, 1f };
+ service.Search(metadata, target, double.MaxValue, null).Count().Should().Be(3);
+
+ return 0;
+ });
+
+ collection.Update(new VectorDocument { Id = 2, Embedding = new[] { 1f, 2f }, Flag = false });
+
+ InspectVectorIndex(db, "vectors", (snapshot, collation, metadata) =>
+ {
+ var service = new VectorIndexService(snapshot, collation);
+ var target = new[] { 1f, 1f };
+ service.Search(metadata, target, double.MaxValue, null).Count().Should().Be(3);
+
+ return 0;
+ });
+
+ collection.Update(new VectorDocument { Id = 3, Embedding = null, Flag = true });
+
+ InspectVectorIndex(db, "vectors", (snapshot, collation, metadata) =>
+ {
+ var service = new VectorIndexService(snapshot, collation);
+ var target = new[] { 1f, 1f };
+ service.Search(metadata, target, double.MaxValue, null).Select(x => x.Document["_id"].AsInt32).Should().BeEquivalentTo(new[] { 1, 2 });
+
+ return 0;
+ });
+
+ collection.DeleteMany(x => x.Id == 1);
+
+ InspectVectorIndex(db, "vectors", (snapshot, collation, metadata) =>
+ {
+ var service = new VectorIndexService(snapshot, collation);
+ var target = new[] { 1f, 1f };
+ var results = service.Search(metadata, target, double.MaxValue, null).ToArray();
+
+ results.Select(x => x.Document["_id"].AsInt32).Should().BeEquivalentTo(new[] { 2 });
+ metadata.Root.IsEmpty.Should().BeFalse();
+
+ return 0;
+ });
+
+ collection.DeleteAll();
+
+ InspectVectorIndex(db, "vectors", (snapshot, collation, metadata) =>
+ {
+ var service = new VectorIndexService(snapshot, collation);
+ var target = new[] { 1f, 1f };
+ service.Search(metadata, target, double.MaxValue, null).Should().BeEmpty();
+ metadata.Root.IsEmpty.Should().BeTrue();
+ metadata.Reserved.Should().Be(uint.MaxValue);
+
+ return 0;
+ });
+ }
+
+ [Theory]
+ [InlineData(VectorDistanceMetric.Cosine)]
+ [InlineData(VectorDistanceMetric.Euclidean)]
+ [InlineData(VectorDistanceMetric.DotProduct)]
+ public void VectorDistance_Computation_MatchesMathNet(VectorDistanceMetric metric)
+ {
+ var random = new Random(1789);
+ const int dimensions = 6;
+
+ for (var i = 0; i < 20; i++)
+ {
+ var candidate = CreateVector(random, dimensions);
+ var target = CreateVector(random, dimensions);
+
+ var distance = VectorIndexService.ComputeDistance(candidate, target, metric, out var similarity);
+ var (expectedDistance, expectedSimilarity) = ComputeReferenceMetrics(candidate, target, metric);
+
+ if (double.IsNaN(expectedDistance))
+ {
+ double.IsNaN(distance).Should().BeTrue();
+ }
+ else
+ {
+ distance.Should().BeApproximately(expectedDistance, 1e-6);
+ }
+
+ if (double.IsNaN(expectedSimilarity))
+ {
+ double.IsNaN(similarity).Should().BeTrue();
+ }
+ else
+ {
+ similarity.Should().BeApproximately(expectedSimilarity, 1e-6);
+ }
+ }
+
+ if (metric == VectorDistanceMetric.Cosine)
+ {
+ var zero = new float[dimensions];
+ var other = CreateVector(random, dimensions);
+
+ var distance = VectorIndexService.ComputeDistance(zero, other, metric, out var similarity);
+
+ double.IsNaN(distance).Should().BeTrue();
+ double.IsNaN(similarity).Should().BeTrue();
+ }
+ }
+
+ [Theory]
+ [InlineData(VectorDistanceMetric.Cosine)]
+ [InlineData(VectorDistanceMetric.Euclidean)]
+ [InlineData(VectorDistanceMetric.DotProduct)]
+ public void VectorIndex_Search_MatchesReferenceRanking(VectorDistanceMetric metric)
+ {
+ using var db = new LiteDatabase(":memory:");
+ var collection = db.GetCollection("vectors");
+
+ var random = new Random(4242);
+ const int dimensions = 6;
+
+ var documents = Enumerable.Range(1, 32)
+ .Select(i => new VectorDocument
+ {
+ Id = i,
+ Embedding = CreateVector(random, dimensions),
+ Flag = i % 2 == 0
+ })
+ .ToList();
+
+ collection.Insert(documents);
+
+ collection.EnsureIndex(
+ "embedding_idx",
+ BsonExpression.Create("$.Embedding"),
+ new VectorIndexOptions((ushort)dimensions, metric));
+
+ var target = CreateVector(random, dimensions);
+ foreach (var limit in new[] { 5, 12 })
+ {
+ var expectedTop = ComputeExpectedRanking(documents, target, metric, limit);
+
+ var actual = InspectVectorIndex(db, "vectors", (snapshot, collation, metadata) =>
+ {
+ var service = new VectorIndexService(snapshot, collation);
+ return service.Search(metadata, target, double.MaxValue, limit)
+ .Select(result =>
+ {
+ var mapped = BsonMapper.Global.ToObject(result.Document);
+ return (Id: mapped.Id, Score: result.Distance);
+ })
+ .ToList();
+ });
+
+ actual.Should().HaveCount(expectedTop.Count);
+
+ for (var i = 0; i < expectedTop.Count; i++)
+ {
+ actual[i].Id.Should().Be(expectedTop[i].Id);
+
+ if (metric == VectorDistanceMetric.DotProduct)
+ {
+ actual[i].Score.Should().BeApproximately(expectedTop[i].Similarity, 1e-6);
+ }
+ else
+ {
+ actual[i].Score.Should().BeApproximately(expectedTop[i].Distance, 1e-6);
+ }
+ }
+ }
+ }
+
+ [Theory]
+ [InlineData(VectorDistanceMetric.Cosine)]
+ [InlineData(VectorDistanceMetric.Euclidean)]
+ [InlineData(VectorDistanceMetric.DotProduct)]
+ public void WhereNear_MatchesReferenceOrdering(VectorDistanceMetric metric)
+ {
+ using var db = new LiteDatabase(":memory:");
+ var collection = db.GetCollection("vectors");
+
+ var random = new Random(9182);
+ const int dimensions = 6;
+
+ var documents = Enumerable.Range(1, 40)
+ .Select(i => new VectorDocument
+ {
+ Id = i,
+ Embedding = CreateVector(random, dimensions),
+ Flag = i % 3 == 0
+ })
+ .ToList();
+
+ collection.Insert(documents);
+
+ collection.EnsureIndex(
+ "embedding_idx",
+ BsonExpression.Create("$.Embedding"),
+ new VectorIndexOptions((ushort)dimensions, metric));
+
+ var target = CreateVector(random, dimensions);
+ const int limit = 12;
+
+ var query = collection.Query()
+ .WhereNear(x => x.Embedding, target, double.MaxValue)
+ .Limit(limit);
+
+ var plan = query.GetPlan();
+ plan["index"]["mode"].AsString.Should().Be("VECTOR INDEX SEARCH");
+
+ var results = query.ToArray();
+
+ results.Should().HaveCount(limit);
+
+ var searchIds = InspectVectorIndex(db, "vectors", (snapshot, collation, metadata) =>
+ {
+ var service = new VectorIndexService(snapshot, collation);
+ return service.Search(metadata, target, double.MaxValue, limit)
+ .Select(result => BsonMapper.Global.ToObject(result.Document).Id)
+ .ToArray();
+ });
+
+ results.Select(x => x.Id).Should().Equal(searchIds);
+ }
+
+ [Theory]
+ [InlineData(VectorDistanceMetric.Cosine)]
+ [InlineData(VectorDistanceMetric.Euclidean)]
+ [InlineData(VectorDistanceMetric.DotProduct)]
+ public void TopKNear_MatchesReferenceOrdering(VectorDistanceMetric metric)
+ {
+ using var db = new LiteDatabase(":memory:");
+ var collection = db.GetCollection("vectors");
+
+ var random = new Random(5461);
+ const int dimensions = 6;
+
+ var documents = Enumerable.Range(1, 48)
+ .Select(i => new VectorDocument
+ {
+ Id = i,
+ Embedding = CreateVector(random, dimensions),
+ Flag = i % 4 == 0
+ })
+ .ToList();
+
+ collection.Insert(documents);
+
+ collection.EnsureIndex(
+ "embedding_idx",
+ BsonExpression.Create("$.Embedding"),
+ new VectorIndexOptions((ushort)dimensions, metric));
+
+ var target = CreateVector(random, dimensions);
+ const int limit = 7;
+ var expected = ComputeExpectedRanking(documents, target, metric, limit);
+
+ var results = collection.Query()
+ .TopKNear(x => x.Embedding, target, limit)
+ .ToArray();
+
+ results.Should().HaveCount(expected.Count);
+ results.Select(x => x.Id).Should().Equal(expected.Select(x => x.Id));
+ }
+
+ [Fact]
+ public void VectorIndex_HandlesVectorsSpanningMultipleDataBlocks_PersistedUpdate()
+ {
+ using var file = new TempFile();
+
+ var dimensions = ((DataService.MAX_DATA_BYTES_PER_PAGE / sizeof(float)) * 10) + 16;
+ dimensions.Should().BeLessThan(ushort.MaxValue);
+
+ var random = new Random(7321);
+ var originalDocuments = Enumerable.Range(1, 12)
+ .Select(i => new VectorDocument
+ {
+ Id = i,
+ Embedding = CreateVector(random, dimensions),
+ Flag = i % 2 == 0
+ })
+ .ToList();
+
+ var updateRandom = new Random(9813);
+ var documents = originalDocuments
+ .Select(doc => new VectorDocument
+ {
+ Id = doc.Id,
+ Embedding = CreateVector(updateRandom, dimensions),
+ Flag = doc.Flag
+ })
+ .ToList();
+
+ using (var setup = new LiteDatabase(file.Filename))
+ {
+ var setupCollection = setup.GetCollection("vectors");
+ setupCollection.Insert(originalDocuments);
+
+ var indexOptions = new VectorIndexOptions((ushort)dimensions, VectorDistanceMetric.Euclidean);
+ setupCollection.EnsureIndex("embedding_idx", BsonExpression.Create("$.Embedding"), indexOptions);
+
+ foreach (var doc in documents)
+ {
+ setupCollection.Update(doc);
+ }
+
+ setup.Checkpoint();
+ }
+
+ using var db = new LiteDatabase(file.Filename);
+ var collection = db.GetCollection("vectors");
+
+ var (inlineDetected, mismatches) = InspectVectorIndex(db, "vectors", (snapshot, collation, metadata) =>
+ {
+ metadata.Should().NotBeNull();
+ metadata.Dimensions.Should().Be((ushort)dimensions);
+
+ var dataService = new DataService(snapshot, uint.MaxValue);
+ var queue = new Queue();
+ var visited = new HashSet();
+ var collectedMismatches = new List();
+ var inlineSeen = false;
+
+ if (!metadata.Root.IsEmpty)
+ {
+ queue.Enqueue(metadata.Root);
+ }
+
+ while (queue.Count > 0)
+ {
+ var address = queue.Dequeue();
+ if (!visited.Add(address))
+ {
+ continue;
+ }
+
+ var node = snapshot.GetPage(address.PageID).GetNode(address.Index);
+ inlineSeen |= node.HasInlineVector;
+
+ for (var level = 0; level < node.LevelCount; level++)
+ {
+ foreach (var neighbor in node.GetNeighbors(level))
+ {
+ if (!neighbor.IsEmpty)
+ {
+ queue.Enqueue(neighbor);
+ }
+ }
+ }
+
+ var storedVector = node.HasInlineVector
+ ? node.ReadVector()
+ : ReadExternalVector(dataService, node.ExternalVector, metadata.Dimensions);
+
+ using var reader = new BufferReader(dataService.Read(node.DataBlock));
+ var document = reader.ReadDocument().GetValue();
+ var typed = BsonMapper.Global.ToObject(document);
+
+ var expected = documents.Single(d => d.Id == typed.Id).Embedding;
+ if (!VectorsMatch(expected, storedVector))
+ {
+ collectedMismatches.Add(typed.Id);
+ }
+ }
+
+ return (inlineSeen, collectedMismatches);
+ });
+
+ Assert.False(inlineDetected);
+ mismatches.Should().BeEmpty();
+
+ foreach (var doc in documents)
+ {
+ var persisted = collection.FindById(doc.Id);
+ Assert.NotNull(persisted);
+ Assert.True(VectorsMatch(doc.Embedding, persisted.Embedding));
+
+ var result = InspectVectorIndex(db, "vectors", (snapshot, collation, metadata) =>
+ {
+ var service = new VectorIndexService(snapshot, collation);
+ return service.Search(metadata, doc.Embedding, double.MaxValue, 1).FirstOrDefault();
+ });
+
+ Assert.NotNull(result.Document);
+ var mapped = BsonMapper.Global.ToObject(result.Document);
+ mapped.Id.Should().Be(doc.Id);
+ result.Distance.Should().BeApproximately(0d, 1e-6);
+ }
+ }
+
+ private static bool VectorsMatch(float[] expected, float[] actual)
+ {
+ if (expected == null || actual == null)
+ {
+ return false;
+ }
+
+ if (expected.Length != actual.Length)
+ {
+ return false;
+ }
+
+ for (var i = 0; i < expected.Length; i++)
+ {
+ if (BitConverter.SingleToInt32Bits(expected[i]) != BitConverter.SingleToInt32Bits(actual[i]))
+ {
+ return false;
+ }
+ }
+
+ return true;
+ }
+
+ private static float[] ReadExternalVector(DataService dataService, PageAddress start, int dimensions)
+ {
+ Assert.False(start.IsEmpty);
+ Assert.True(dimensions > 0);
+
+ var totalBytes = dimensions * sizeof(float);
+ var vector = new float[dimensions];
+ var bytesCopied = 0;
+
+ foreach (var slice in dataService.Read(start))
+ {
+ if (bytesCopied >= totalBytes)
+ {
+ break;
+ }
+
+ var available = Math.Min(slice.Count, totalBytes - bytesCopied);
+ Assert.Equal(0, available % sizeof(float));
+
+ Buffer.BlockCopy(slice.Array, slice.Offset, vector, bytesCopied, available);
+ bytesCopied += available;
+ }
+
+ Assert.Equal(totalBytes, bytesCopied);
+
+ return vector;
+ }
+
+ }
+}
diff --git a/LiteDB.Tests/Resources/ingest-20250922-234735.json b/LiteDB.Tests/Resources/ingest-20250922-234735.json
new file mode 100644
index 000000000..1bf07fb4b
--- /dev/null
+++ b/LiteDB.Tests/Resources/ingest-20250922-234735.json
@@ -0,0 +1,3090 @@
+{
+ "Id": {
+ "Timestamp": 0,
+ "Machine": 0,
+ "Pid": 0,
+ "Increment": 0,
+ "CreationTime": "1970-01-01T00:00:00Z"
+ },
+ "Path": "C:\\Users\\W31rd0\\source\\repos\\External\\LiteDB\\docs-md\\bsondocument\\index.md",
+ "Title": "index.md",
+ "Preview": "BsonDocument - LiteDB :: A .NET embedded NoSQL database [Fork me on GitHub](https://github.com/mbdavid/litedb) * [HOME](/) * [DOCS](/docs/) * [API](/api/) * [DOWNLOAD](https://www.nuget.org/packages/LiteDB/) [ = "LiteDB.Benchmarks", "LiteDB
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LiteDB.Stress", "LiteDB.Stress\LiteDB.Stress.csproj", "{FFBC5669-DA32-4907-8793-7B414279DA3B}"
EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "LiteDB.Demo.Tools.VectorSearch", "LiteDB.Demo.Tools.VectorSearch\LiteDB.Demo.Tools.VectorSearch.csproj", "{64EEF08C-CE83-4929-B5E4-583BBC332941}"
+EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ConsoleApp1", "ConsoleApp1\ConsoleApp1.csproj", "{E8763934-E46A-4AAF-A2B5-E812016DAF84}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "LiteDB.RollbackRepro", "LiteDB.RollbackRepro\LiteDB.RollbackRepro.csproj", "{BE1D6CA2-134A-404A-8F1A-C48E4E240159}"
@@ -22,29 +24,85 @@ EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
+ Debug|x64 = Debug|x64
+ Debug|x86 = Debug|x86
Release|Any CPU = Release|Any CPU
+ Release|x64 = Release|x64
+ Release|x86 = Release|x86
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{9497DA19-1FCA-4C2E-A1AB-8DFAACBC76E1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{9497DA19-1FCA-4C2E-A1AB-8DFAACBC76E1}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {9497DA19-1FCA-4C2E-A1AB-8DFAACBC76E1}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {9497DA19-1FCA-4C2E-A1AB-8DFAACBC76E1}.Debug|x64.Build.0 = Debug|Any CPU
+ {9497DA19-1FCA-4C2E-A1AB-8DFAACBC76E1}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {9497DA19-1FCA-4C2E-A1AB-8DFAACBC76E1}.Debug|x86.Build.0 = Debug|Any CPU
{9497DA19-1FCA-4C2E-A1AB-8DFAACBC76E1}.Release|Any CPU.ActiveCfg = Release|Any CPU
{9497DA19-1FCA-4C2E-A1AB-8DFAACBC76E1}.Release|Any CPU.Build.0 = Release|Any CPU
+ {9497DA19-1FCA-4C2E-A1AB-8DFAACBC76E1}.Release|x64.ActiveCfg = Release|Any CPU
+ {9497DA19-1FCA-4C2E-A1AB-8DFAACBC76E1}.Release|x64.Build.0 = Release|Any CPU
+ {9497DA19-1FCA-4C2E-A1AB-8DFAACBC76E1}.Release|x86.ActiveCfg = Release|Any CPU
+ {9497DA19-1FCA-4C2E-A1AB-8DFAACBC76E1}.Release|x86.Build.0 = Release|Any CPU
{74E32E43-2A57-4A38-BD8C-9108B0DCAEAA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{74E32E43-2A57-4A38-BD8C-9108B0DCAEAA}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {74E32E43-2A57-4A38-BD8C-9108B0DCAEAA}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {74E32E43-2A57-4A38-BD8C-9108B0DCAEAA}.Debug|x64.Build.0 = Debug|Any CPU
+ {74E32E43-2A57-4A38-BD8C-9108B0DCAEAA}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {74E32E43-2A57-4A38-BD8C-9108B0DCAEAA}.Debug|x86.Build.0 = Debug|Any CPU
{74E32E43-2A57-4A38-BD8C-9108B0DCAEAA}.Release|Any CPU.ActiveCfg = Release|Any CPU
{74E32E43-2A57-4A38-BD8C-9108B0DCAEAA}.Release|Any CPU.Build.0 = Release|Any CPU
+ {74E32E43-2A57-4A38-BD8C-9108B0DCAEAA}.Release|x64.ActiveCfg = Release|Any CPU
+ {74E32E43-2A57-4A38-BD8C-9108B0DCAEAA}.Release|x64.Build.0 = Release|Any CPU
+ {74E32E43-2A57-4A38-BD8C-9108B0DCAEAA}.Release|x86.ActiveCfg = Release|Any CPU
+ {74E32E43-2A57-4A38-BD8C-9108B0DCAEAA}.Release|x86.Build.0 = Release|Any CPU
{99887C89-CAE4-4A8D-AC4B-87E28B9B1F87}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{99887C89-CAE4-4A8D-AC4B-87E28B9B1F87}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {99887C89-CAE4-4A8D-AC4B-87E28B9B1F87}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {99887C89-CAE4-4A8D-AC4B-87E28B9B1F87}.Debug|x64.Build.0 = Debug|Any CPU
+ {99887C89-CAE4-4A8D-AC4B-87E28B9B1F87}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {99887C89-CAE4-4A8D-AC4B-87E28B9B1F87}.Debug|x86.Build.0 = Debug|Any CPU
{99887C89-CAE4-4A8D-AC4B-87E28B9B1F87}.Release|Any CPU.ActiveCfg = Release|Any CPU
{99887C89-CAE4-4A8D-AC4B-87E28B9B1F87}.Release|Any CPU.Build.0 = Release|Any CPU
+ {99887C89-CAE4-4A8D-AC4B-87E28B9B1F87}.Release|x64.ActiveCfg = Release|Any CPU
+ {99887C89-CAE4-4A8D-AC4B-87E28B9B1F87}.Release|x64.Build.0 = Release|Any CPU
+ {99887C89-CAE4-4A8D-AC4B-87E28B9B1F87}.Release|x86.ActiveCfg = Release|Any CPU
+ {99887C89-CAE4-4A8D-AC4B-87E28B9B1F87}.Release|x86.Build.0 = Release|Any CPU
{DF9C82C1-446F-458A-AA50-78E58BA17273}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{DF9C82C1-446F-458A-AA50-78E58BA17273}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {DF9C82C1-446F-458A-AA50-78E58BA17273}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {DF9C82C1-446F-458A-AA50-78E58BA17273}.Debug|x64.Build.0 = Debug|Any CPU
+ {DF9C82C1-446F-458A-AA50-78E58BA17273}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {DF9C82C1-446F-458A-AA50-78E58BA17273}.Debug|x86.Build.0 = Debug|Any CPU
{DF9C82C1-446F-458A-AA50-78E58BA17273}.Release|Any CPU.ActiveCfg = Release|Any CPU
{DF9C82C1-446F-458A-AA50-78E58BA17273}.Release|Any CPU.Build.0 = Release|Any CPU
+ {DF9C82C1-446F-458A-AA50-78E58BA17273}.Release|x64.ActiveCfg = Release|Any CPU
+ {DF9C82C1-446F-458A-AA50-78E58BA17273}.Release|x64.Build.0 = Release|Any CPU
+ {DF9C82C1-446F-458A-AA50-78E58BA17273}.Release|x86.ActiveCfg = Release|Any CPU
+ {DF9C82C1-446F-458A-AA50-78E58BA17273}.Release|x86.Build.0 = Release|Any CPU
{FFBC5669-DA32-4907-8793-7B414279DA3B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{FFBC5669-DA32-4907-8793-7B414279DA3B}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {FFBC5669-DA32-4907-8793-7B414279DA3B}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {FFBC5669-DA32-4907-8793-7B414279DA3B}.Debug|x64.Build.0 = Debug|Any CPU
+ {FFBC5669-DA32-4907-8793-7B414279DA3B}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {FFBC5669-DA32-4907-8793-7B414279DA3B}.Debug|x86.Build.0 = Debug|Any CPU
{FFBC5669-DA32-4907-8793-7B414279DA3B}.Release|Any CPU.ActiveCfg = Release|Any CPU
{FFBC5669-DA32-4907-8793-7B414279DA3B}.Release|Any CPU.Build.0 = Release|Any CPU
+ {FFBC5669-DA32-4907-8793-7B414279DA3B}.Release|x64.ActiveCfg = Release|Any CPU
+ {FFBC5669-DA32-4907-8793-7B414279DA3B}.Release|x64.Build.0 = Release|Any CPU
+ {FFBC5669-DA32-4907-8793-7B414279DA3B}.Release|x86.ActiveCfg = Release|Any CPU
+ {FFBC5669-DA32-4907-8793-7B414279DA3B}.Release|x86.Build.0 = Release|Any CPU
+ {64EEF08C-CE83-4929-B5E4-583BBC332941}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {64EEF08C-CE83-4929-B5E4-583BBC332941}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {64EEF08C-CE83-4929-B5E4-583BBC332941}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {64EEF08C-CE83-4929-B5E4-583BBC332941}.Debug|x64.Build.0 = Debug|Any CPU
+ {64EEF08C-CE83-4929-B5E4-583BBC332941}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {64EEF08C-CE83-4929-B5E4-583BBC332941}.Debug|x86.Build.0 = Debug|Any CPU
+ {64EEF08C-CE83-4929-B5E4-583BBC332941}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {64EEF08C-CE83-4929-B5E4-583BBC332941}.Release|Any CPU.Build.0 = Release|Any CPU
+ {64EEF08C-CE83-4929-B5E4-583BBC332941}.Release|x64.ActiveCfg = Release|Any CPU
+ {64EEF08C-CE83-4929-B5E4-583BBC332941}.Release|x64.Build.0 = Release|Any CPU
+ {64EEF08C-CE83-4929-B5E4-583BBC332941}.Release|x86.ActiveCfg = Release|Any CPU
+ {64EEF08C-CE83-4929-B5E4-583BBC332941}.Release|x86.Build.0 = Release|Any CPU
{E8763934-E46A-4AAF-A2B5-E812016DAF84}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{E8763934-E46A-4AAF-A2B5-E812016DAF84}.Debug|Any CPU.Build.0 = Debug|Any CPU
{E8763934-E46A-4AAF-A2B5-E812016DAF84}.Release|Any CPU.ActiveCfg = Release|Any CPU
diff --git a/LiteDB/Client/Database/Collections/Index.cs b/LiteDB/Client/Database/Collections/Index.cs
index aac550567..0db9c9900 100644
--- a/LiteDB/Client/Database/Collections/Index.cs
+++ b/LiteDB/Client/Database/Collections/Index.cs
@@ -3,6 +3,7 @@
using System.Linq;
using System.Linq.Expressions;
using System.Text.RegularExpressions;
+using LiteDB.Vector;
using static LiteDB.Constants;
namespace LiteDB
@@ -23,6 +24,21 @@ public bool EnsureIndex(string name, BsonExpression expression, bool unique = fa
return _engine.EnsureIndex(_collection, name, expression, unique);
}
+ internal bool EnsureVectorIndex(string name, BsonExpression expression, VectorIndexOptions options)
+ {
+ if (string.IsNullOrEmpty(name)) throw new ArgumentNullException(nameof(name));
+ if (expression == null) throw new ArgumentNullException(nameof(expression));
+ if (options == null) throw new ArgumentNullException(nameof(options));
+
+ return _engine.EnsureVectorIndex(_collection, name, expression, options);
+ }
+
+ [Obsolete("Add `using LiteDB.Vector;` and call LiteCollectionVectorExtensions.EnsureIndex instead.")]
+ public bool EnsureIndex(string name, BsonExpression expression, VectorIndexOptions options)
+ {
+ return this.EnsureVectorIndex(name, expression, options);
+ }
+
///
/// Create a new permanent index in all documents inside this collections if index not exists already. Returns true if index was created or false if already exits
///
@@ -37,6 +53,22 @@ public bool EnsureIndex(BsonExpression expression, bool unique = false)
return this.EnsureIndex(name, expression, unique);
}
+ internal bool EnsureVectorIndex(BsonExpression expression, VectorIndexOptions options)
+ {
+ if (expression == null) throw new ArgumentNullException(nameof(expression));
+ if (options == null) throw new ArgumentNullException(nameof(options));
+
+ var name = Regex.Replace(expression.Source, @"[^a-z0-9]", "", RegexOptions.IgnoreCase | RegexOptions.Compiled);
+
+ return this.EnsureVectorIndex(name, expression, options);
+ }
+
+ [Obsolete("Add `using LiteDB.Vector;` and call LiteCollectionVectorExtensions.EnsureIndex instead.")]
+ public bool EnsureIndex(BsonExpression expression, VectorIndexOptions options)
+ {
+ return this.EnsureVectorIndex(expression, options);
+ }
+
///
/// Create a new permanent index in all documents inside this collections if index not exists already.
///
@@ -49,6 +81,21 @@ public bool EnsureIndex(Expression> keySelector, bool unique = fal
return this.EnsureIndex(expression, unique);
}
+ internal bool EnsureVectorIndex(Expression> keySelector, VectorIndexOptions options)
+ {
+ if (options == null) throw new ArgumentNullException(nameof(options));
+
+ var expression = this.GetIndexExpression(keySelector, convertEnumerableToMultiKey: false);
+
+ return this.EnsureVectorIndex(expression, options);
+ }
+
+ [Obsolete("Add `using LiteDB.Vector;` and call LiteCollectionVectorExtensions.EnsureIndex instead.")]
+ public bool EnsureIndex(Expression> keySelector, VectorIndexOptions options)
+ {
+ return this.EnsureVectorIndex(keySelector, options);
+ }
+
///
/// Create a new permanent index in all documents inside this collections if index not exists already.
///
@@ -62,14 +109,29 @@ public bool EnsureIndex(string name, Expression> keySelector, bool
return this.EnsureIndex(name, expression, unique);
}
+ internal bool EnsureVectorIndex(string name, Expression> keySelector, VectorIndexOptions options)
+ {
+ if (options == null) throw new ArgumentNullException(nameof(options));
+
+ var expression = this.GetIndexExpression(keySelector, convertEnumerableToMultiKey: false);
+
+ return this.EnsureVectorIndex(name, expression, options);
+ }
+
+ [Obsolete("Add `using LiteDB.Vector;` and call LiteCollectionVectorExtensions.EnsureIndex instead.")]
+ public bool EnsureIndex(string name, Expression> keySelector, VectorIndexOptions options)
+ {
+ return this.EnsureVectorIndex(name, keySelector, options);
+ }
+
///
/// Get index expression based on LINQ expression. Convert IEnumerable in MultiKey indexes
///
- private BsonExpression GetIndexExpression(Expression> keySelector)
+ private BsonExpression GetIndexExpression(Expression> keySelector, bool convertEnumerableToMultiKey = true)
{
var expression = _mapper.GetIndexExpression(keySelector);
- if (typeof(K).IsEnumerable() && expression.IsScalar == true)
+ if (convertEnumerableToMultiKey && typeof(K).IsEnumerable() && expression.IsScalar == true)
{
if (expression.Type == BsonExpressionType.Path)
{
diff --git a/LiteDB/Client/Database/ILiteQueryable.cs b/LiteDB/Client/Database/ILiteQueryable.cs
index 46de8e413..27c044772 100644
--- a/LiteDB/Client/Database/ILiteQueryable.cs
+++ b/LiteDB/Client/Database/ILiteQueryable.cs
@@ -1,4 +1,4 @@
-using System;
+using System;
using System.Collections.Generic;
using System.Linq;
using System.Linq.Expressions;
@@ -30,6 +30,7 @@ public interface ILiteQueryable : ILiteQueryableResult
ILiteQueryableResult Select(BsonExpression selector);
ILiteQueryableResult Select(Expression> selector);
+
}
public interface ILiteQueryableResult
diff --git a/LiteDB/Client/Database/LiteQueryable.cs b/LiteDB/Client/Database/LiteQueryable.cs
index 07624b34b..9783654b4 100644
--- a/LiteDB/Client/Database/LiteQueryable.cs
+++ b/LiteDB/Client/Database/LiteQueryable.cs
@@ -225,6 +225,126 @@ public ILiteQueryableResult Select(Expression> selector)
return new LiteQueryable(_engine, _mapper, _collection, _query);
}
+ private static void ValidateVectorArguments(float[] target, double maxDistance)
+ {
+ if (target == null || target.Length == 0) throw new ArgumentException("Target vector must be provided.", nameof(target));
+ // Dot-product queries interpret "maxDistance" as a minimum similarity score and may therefore pass negative values.
+ if (double.IsNaN(maxDistance)) throw new ArgumentOutOfRangeException(nameof(maxDistance), "Similarity threshold must be a valid number.");
+ }
+
+ private static BsonExpression CreateVectorSimilarityFilter(BsonExpression fieldExpr, float[] target, double maxDistance)
+ {
+ if (fieldExpr == null) throw new ArgumentNullException(nameof(fieldExpr));
+
+ ValidateVectorArguments(target, maxDistance);
+
+ var targetArray = new BsonArray(target.Select(v => new BsonValue(v)));
+ return BsonExpression.Create($"{fieldExpr.Source} VECTOR_SIM @0 <= @1", targetArray, new BsonValue(maxDistance));
+ }
+
+ internal ILiteQueryable VectorWhereNear(string vectorField, float[] target, double maxDistance)
+ {
+ if (string.IsNullOrWhiteSpace(vectorField)) throw new ArgumentNullException(nameof(vectorField));
+
+ var fieldExpr = BsonExpression.Create($"$.{vectorField}");
+ return this.VectorWhereNear(fieldExpr, target, maxDistance);
+ }
+
+ internal ILiteQueryable VectorWhereNear(BsonExpression fieldExpr, float[] target, double maxDistance)
+ {
+ var filter = CreateVectorSimilarityFilter(fieldExpr, target, maxDistance);
+
+ _query.Where.Add(filter);
+
+ _query.VectorField = fieldExpr.Source;
+ _query.VectorTarget = target?.ToArray();
+ _query.VectorMaxDistance = maxDistance;
+
+ return this;
+ }
+
+ internal ILiteQueryable VectorWhereNear(Expression> field, float[] target, double maxDistance)
+ {
+ if (field == null) throw new ArgumentNullException(nameof(field));
+
+ var fieldExpr = _mapper.GetExpression(field);
+ return this.VectorWhereNear(fieldExpr, target, maxDistance);
+ }
+
+ internal ILiteQueryableResult VectorTopKNear(Expression> field, float[] target, int k)
+ {
+ var fieldExpr = _mapper.GetExpression(field);
+ return this.VectorTopKNear(fieldExpr, target, k);
+ }
+
+ internal ILiteQueryableResult VectorTopKNear(string field, float[] target, int k)
+ {
+ var fieldExpr = BsonExpression.Create($"$.{field}");
+ return this.VectorTopKNear(fieldExpr, target, k);
+ }
+
+ internal ILiteQueryableResult VectorTopKNear(BsonExpression fieldExpr, float[] target, int k)
+ {
+ if (fieldExpr == null) throw new ArgumentNullException(nameof(fieldExpr));
+ if (target == null || target.Length == 0) throw new ArgumentException("Target vector must be provided.", nameof(target));
+ if (k <= 0) throw new ArgumentOutOfRangeException(nameof(k), "Top-K must be greater than zero.");
+
+ var targetArray = new BsonArray(target.Select(v => new BsonValue(v)));
+
+ // Build VECTOR_SIM as order clause
+ var simExpr = BsonExpression.Create($"VECTOR_SIM({fieldExpr.Source}, @0)", targetArray);
+
+ _query.VectorField = fieldExpr.Source;
+ _query.VectorTarget = target?.ToArray();
+ _query.VectorMaxDistance = double.MaxValue;
+
+ return this
+ .OrderBy(simExpr, Query.Ascending)
+ .Limit(k);
+ }
+
+ [Obsolete("Add `using LiteDB.Vector;` and call the LiteQueryableVectorExtensions.WhereNear extension instead.")]
+ public ILiteQueryable WhereNear(string vectorField, float[] target, double maxDistance)
+ {
+ return this.VectorWhereNear(vectorField, target, maxDistance);
+ }
+
+ [Obsolete("Add `using LiteDB.Vector;` and call the LiteQueryableVectorExtensions.WhereNear extension instead.")]
+ public ILiteQueryable WhereNear(BsonExpression fieldExpr, float[] target, double maxDistance)
+ {
+ return this.VectorWhereNear(fieldExpr, target, maxDistance);
+ }
+
+ [Obsolete("Add `using LiteDB.Vector;` and call the LiteQueryableVectorExtensions.WhereNear extension instead.")]
+ public ILiteQueryable WhereNear(Expression> field, float[] target, double maxDistance)
+ {
+ return this.VectorWhereNear(field, target, maxDistance);
+ }
+
+ [Obsolete("Add `using LiteDB.Vector;` and call the LiteQueryableVectorExtensions.FindNearest extension instead.")]
+ public IEnumerable FindNearest(string vectorField, float[] target, double maxDistance)
+ {
+ return this.VectorWhereNear(vectorField, target, maxDistance).ToEnumerable();
+ }
+
+ [Obsolete("Add `using LiteDB.Vector;` and call the LiteQueryableVectorExtensions.TopKNear extension instead.")]
+ public ILiteQueryableResult TopKNear(Expression> field, float[] target, int k)
+ {
+ return this.VectorTopKNear(field, target, k);
+ }
+
+ [Obsolete("Add `using LiteDB.Vector;` and call the LiteQueryableVectorExtensions.TopKNear extension instead.")]
+ public ILiteQueryableResult TopKNear(string field, float[] target, int k)
+ {
+ return this.VectorTopKNear(field, target, k);
+ }
+
+ [Obsolete("Add `using LiteDB.Vector;` and call the LiteQueryableVectorExtensions.TopKNear extension instead.")]
+ public ILiteQueryableResult TopKNear(BsonExpression fieldExpr, float[] target, int k)
+ {
+ return this.VectorTopKNear(fieldExpr, target, k);
+ }
+
#endregion
#region Offset/Limit/ForUpdate
diff --git a/LiteDB/Client/Database/LiteRepository.cs b/LiteDB/Client/Database/LiteRepository.cs
index 91a659bef..4d5008ae7 100644
--- a/LiteDB/Client/Database/LiteRepository.cs
+++ b/LiteDB/Client/Database/LiteRepository.cs
@@ -3,6 +3,7 @@
using System.IO;
using System.Linq;
using System.Linq.Expressions;
+using LiteDB.Vector;
using static LiteDB.Constants;
namespace LiteDB
@@ -16,6 +17,18 @@ public class LiteRepository : ILiteRepository
private readonly ILiteDatabase _db = null;
+ private LiteCollection GetLiteCollection(string collectionName)
+ {
+ var collection = _db.GetCollection(collectionName);
+
+ if (collection is LiteCollection liteCollection)
+ {
+ return liteCollection;
+ }
+
+ throw new InvalidOperationException("The current collection implementation does not support vector operations.");
+ }
+
///
/// Get database instance
///
@@ -173,6 +186,17 @@ public bool EnsureIndex(string name, BsonExpression expression, bool unique =
return _db.GetCollection(collectionName).EnsureIndex(name, expression, unique);
}
+ internal bool EnsureVectorIndex(string name, BsonExpression expression, VectorIndexOptions options, string collectionName = null)
+ {
+ return this.GetLiteCollection(collectionName).EnsureVectorIndex(name, expression, options);
+ }
+
+ [Obsolete("Add `using LiteDB.Vector;` and call LiteRepositoryVectorExtensions.EnsureIndex instead.")]
+ public bool EnsureIndex(string name, BsonExpression expression, VectorIndexOptions options, string collectionName = null)
+ {
+ return this.EnsureVectorIndex