diff --git a/.github/actions/ci/action.yml b/.github/actions/ci/action.yml index 20b0ba5b..107fbc71 100644 --- a/.github/actions/ci/action.yml +++ b/.github/actions/ci/action.yml @@ -15,6 +15,18 @@ inputs: description: 'The verbosity level of the test output.' required: false default: 'normal' + run_consul: + description: 'Run an instance of Consul' + required: false + default: false + run_dynamodb: + description: 'Run an instance of DynamoDB' + required: false + default: false + run_redis: + description: 'Run an instance of Redis' + required: false + default: false runs: using: composite @@ -26,6 +38,24 @@ runs: 8.0 7.0 + - name: Setup Consul if needed + uses: launchdarkly/gh-actions/actions/persistent-stores@persistent-stores-v0.1.0 + if: inputs.run_consul == 'true' + with: + consul: true + + - name: Setup DynamoDB if needed + uses: launchdarkly/gh-actions/actions/persistent-stores@persistent-stores-v0.1.0 + if: inputs.run_dynamodb == 'true' + with: + dynamodb: true + + - name: Setup Redis if needed + uses: launchdarkly/gh-actions/actions/persistent-stores@persistent-stores-v0.1.0 + if: inputs.run_redis == 'true' + with: + redis: true + - name: Copy global.json in place shell: bash run: cp global.example.json global.json diff --git a/.github/actions/full-release/action.yml b/.github/actions/full-release/action.yml index 0058e76e..3eeb004e 100644 --- a/.github/actions/full-release/action.yml +++ b/.github/actions/full-release/action.yml @@ -25,6 +25,18 @@ inputs: token: description: 'The GitHub token to use for publishing documentation.' required: true + run_consul: + description: 'Run an instance of Consul' + required: false + default: false + run_dynamodb: + description: 'Run an instance of DynamoDB' + required: false + default: false + run_redis: + description: 'Run an instance of Redis' + required: false + default: false outputs: hashes: description: sha256sum hashes of built artifacts @@ -38,6 +50,9 @@ runs: with: project_file: ${{ inputs.project_file }} test_project_file: ${{ inputs.test_project_file }} + run_consul: ${{ inputs.run_consul }} + run_dynamodb: ${{ inputs.run_dynamodb }} + run_redis: ${{ inputs.run_redis }} - uses: launchdarkly/gh-actions/actions/release-secrets@release-secrets-v1.2.0 name: Get secrets diff --git a/.github/workflows/release-please.yml b/.github/workflows/release-please.yml index 909f7892..be6bcb98 100644 --- a/.github/workflows/release-please.yml +++ b/.github/workflows/release-please.yml @@ -10,14 +10,20 @@ jobs: runs-on: ubuntu-22.04 outputs: + package-sdk-client-released: ${{ steps.release.outputs['pkgs/sdk/client--release_created'] }} + package-sdk-client-tag_name: ${{ steps.release.outputs['pkgs/sdk/client--tag_name'] }} + package-sdk-server-ai-released: ${{ steps.release.outputs['pkgs/sdk/server-ai--release_created'] }} + package-sdk-server-ai-tag_name: ${{ steps.release.outputs['pkgs/sdk/server-ai--tag_name'] }} + package-sdk-server-consul-released: ${{ steps.release.outputs['pkgs/dotnet-server-sdk-consul--release_created'] }} + package-sdk-server-consul-tag_name: ${{ steps.release.outputs['pkgs/dotnet-server-sdk-consul--tag_name'] }} + package-sdk-server-dynamodb-released: ${{ steps.release.outputs['pkgs/dotnet-server-sdk-dynamodb--release_created'] }} + package-sdk-server-dynamodb-tag_name: ${{ steps.release.outputs['pkgs/dotnet-server-sdk-dynamodb--tag_name'] }} + package-sdk-server-redis-released: ${{ steps.release.outputs['pkgs/dotnet-server-sdk-redis--release_created'] }} + package-sdk-server-redis-tag_name: ${{ steps.release.outputs['pkgs/dotnet-server-sdk-redis--tag_name'] }} package-sdk-server-released: ${{ steps.release.outputs['pkgs/sdk/server--release_created'] }} package-sdk-server-tag_name: ${{ steps.release.outputs['pkgs/sdk/server--tag_name'] }} package-sdk-server-telemetry-released: ${{ steps.release.outputs['pkgs/telemetry--release_created'] }} package-sdk-server-telemetry-tag_name: ${{ steps.release.outputs['pkgs/telemetry--tag_name'] }} - package-sdk-server-ai-released: ${{ steps.release.outputs['pkgs/sdk/server-ai--release_created'] }} - package-sdk-server-ai-tag_name: ${{ steps.release.outputs['pkgs/sdk/server-ai--tag_name'] }} - package-sdk-client-released: ${{ steps.release.outputs['pkgs/sdk/client--release_created'] }} - package-sdk-client-tag_name: ${{ steps.release.outputs['pkgs/sdk/client--tag_name'] }} tag_name: ${{ steps.release.outputs.tag_name }} steps: @@ -27,136 +33,85 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} target-branch: ${{ github.ref_name }} - release-sdk-server: - runs-on: ubuntu-22.04 + # this job calls to the release-sdk-client workflow because the client SDK has to be built on macos + release-sdk-client: needs: release-please + if: ${{ needs.release-please.outputs.package-sdk-client-released == 'true'}} + uses: ./.github/workflows/release-sdk-client.yml + with: + dry_run: false + + # Client SDK provenance job (since it uses a different workflow) + release-sdk-client-provenance: + needs: ['release-please', 'release-sdk-client'] permissions: + actions: read id-token: write contents: write - pull-requests: write - if: ${{ needs.release-please.outputs.package-sdk-server-released == 'true'}} - outputs: - hashes: ${{ steps.full-release.outputs.hashes }} - steps: - - uses: actions/checkout@v4 - - - name: Setup Env from project's Env file - shell: bash - run: echo "$(cat pkgs/sdk/server/github_actions.env)" >> $GITHUB_ENV + uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v2.1.0 + with: + base64-subjects: "${{ needs.release-sdk-client.outputs.hashes }}" + upload-assets: true + upload-tag-name: ${{ needs.release-please.outputs.package-sdk-client-tag_name }} + provenance-name: ${{ format('LaunchDarkly.ClientSdk-{0}_provenance.intoto.jsonl', needs.release-please.outputs.package-sdk-client-tag_name) }} - - uses: ./.github/actions/full-release - id: full-release - with: - workspace_path: ${{ env.WORKSPACE_PATH }} - project_file: ${{ env.PROJECT_FILE }} - build_output_path: ${{ env.BUILD_OUTPUT_PATH }} - test_project_file: ${{ env.TEST_PROJECT_FILE }} - dll_name: ${{ env.BUILD_OUTPUT_DLL_NAME }} - dry_run: false - aws_role: ${{ vars.AWS_ROLE_ARN }} - token: ${{ secrets.GITHUB_TOKEN }} + # Server packages using the shared release workflow + release-sdk-server: + needs: release-please + if: ${{ needs.release-please.outputs.package-sdk-server-released == 'true'}} + uses: ./.github/workflows/release.yml + with: + package_path: pkgs/sdk/server + dry_run: false + generate_provenance: true + tag_name: ${{ needs.release-please.outputs.package-sdk-server-tag_name }} release-sdk-server-ai: - runs-on: ubuntu-22.04 needs: release-please - permissions: - id-token: write - contents: write - pull-requests: write if: ${{ needs.release-please.outputs.package-sdk-server-ai-released == 'true'}} - outputs: - hashes: ${{ steps.full-release.outputs.hashes }} - steps: - - uses: actions/checkout@v4 - - - name: Setup Env from project's Env file - shell: bash - run: echo "$(cat pkgs/sdk/server-ai/github_actions.env)" >> $GITHUB_ENV - - - uses: ./.github/actions/full-release - id: full-release - with: - workspace_path: ${{ env.WORKSPACE_PATH }} - project_file: ${{ env.PROJECT_FILE }} - build_output_path: ${{ env.BUILD_OUTPUT_PATH }} - test_project_file: ${{ env.TEST_PROJECT_FILE }} - dll_name: ${{ env.BUILD_OUTPUT_DLL_NAME }} - dry_run: false - aws_role: ${{ vars.AWS_ROLE_ARN }} - token: ${{ secrets.GITHUB_TOKEN }} + uses: ./.github/workflows/release.yml + with: + package_path: pkgs/sdk/server-ai + dry_run: false + generate_provenance: true + tag_name: ${{ needs.release-please.outputs.package-sdk-server-ai-tag_name }} release-telemetry: - runs-on: ubuntu-22.04 needs: release-please - permissions: - id-token: write - contents: write - pull-requests: write if: ${{ needs.release-please.outputs.package-sdk-server-telemetry-released == 'true'}} - outputs: - hashes: ${{ steps.full-release.outputs.hashes }} - steps: - - uses: actions/checkout@v4 - - - name: Setup Env from project's Env file - shell: bash - run: echo "$(cat pkgs/telemetry/github_actions.env)" >> $GITHUB_ENV - - - uses: ./.github/actions/full-release - id: full-release - with: - workspace_path: ${{ env.WORKSPACE_PATH }} - project_file: ${{ env.PROJECT_FILE }} - build_output_path: ${{ env.BUILD_OUTPUT_PATH }} - test_project_file: ${{ env.TEST_PROJECT_FILE }} - dll_name: ${{ env.BUILD_OUTPUT_DLL_NAME }} - dry_run: false - aws_role: ${{ vars.AWS_ROLE_ARN }} - token: ${{ secrets.GITHUB_TOKEN }} - - # this job calls to the release-sdk-client workflow because the client SDK has to be built on macos - release-sdk-client: - needs: release-please - if: ${{ needs.release-please.outputs.package-sdk-client-released == 'true'}} - uses: ./.github/workflows/release-sdk-client.yml + uses: ./.github/workflows/release.yml with: + package_path: pkgs/telemetry dry_run: false + generate_provenance: true + tag_name: ${{ needs.release-please.outputs.package-sdk-server-telemetry-tag_name }} - release-sdk-server-provenance: - needs: ['release-please', 'release-sdk-server'] - permissions: - actions: read - id-token: write - contents: write - uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v2.1.0 + release-sdk-server-redis: + needs: release-please + if: ${{ needs.release-please.outputs.package-sdk-server-redis-released == 'true'}} + uses: ./.github/workflows/release.yml with: - base64-subjects: "${{ needs.release-sdk-server.outputs.hashes }}" - upload-assets: true - upload-tag-name: ${{ needs.release-please.outputs.package-sdk-server-tag_name }} - provenance-name: ${{ format('LaunchDarkly.ServerSdk-{0}_provenance.intoto.jsonl', needs.release-please.outputs.package-sdk-server-tag_name) }} + package_path: pkgs/dotnet-server-sdk-redis + dry_run: false + generate_provenance: true + tag_name: ${{ needs.release-please.outputs.package-sdk-server-redis-tag_name }} - release-telemetry-provenance: - needs: ['release-please', 'release-telemetry'] - permissions: - actions: read - id-token: write - contents: write - uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v2.1.0 + release-sdk-server-consul: + needs: release-please + if: ${{ needs.release-please.outputs.package-sdk-server-consul-released == 'true'}} + uses: ./.github/workflows/release.yml with: - base64-subjects: "${{ needs.release-telemetry.outputs.hashes }}" - upload-assets: true - upload-tag-name: ${{ needs.release-please.outputs.package-sdk-server-telemetry-tag_name }} - provenance-name: ${{ format('LaunchDarkly.ServerSdk.Telemetry-{0}_provenance.intoto.jsonl', needs.release-please.outputs.package-sdk-server-telemetry-tag_name) }} + package_path: pkgs/dotnet-server-sdk-consul + dry_run: false + generate_provenance: true + tag_name: ${{ needs.release-please.outputs.package-sdk-server-consul-tag_name }} - release-sdk-client-provenance: - needs: ['release-please', 'release-sdk-client'] - permissions: - actions: read - id-token: write - contents: write - uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v2.1.0 + release-sdk-server-dynamodb: + needs: release-please + if: ${{ needs.release-please.outputs.package-sdk-server-dynamodb-released == 'true'}} + uses: ./.github/workflows/release.yml with: - base64-subjects: "${{ needs.release-sdk-client.outputs.hashes }}" - upload-assets: true - upload-tag-name: ${{ needs.release-please.outputs.package-sdk-client-tag_name }} - provenance-name: ${{ format('LaunchDarkly.ClientSdk-{0}_provenance.intoto.jsonl', needs.release-please.outputs.package-sdk-client-tag_name) }} + package_path: pkgs/dotnet-server-sdk-dynamodb + dry_run: false + generate_provenance: true + tag_name: ${{ needs.release-please.outputs.package-sdk-server-dynamodb-tag_name }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 346ec3c4..43828e57 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,42 +1,62 @@ -name: Publish Package +name: Release Server Package on: workflow_dispatch: inputs: - pkg_workspace: + package_path: description: 'The package to publish' - required: true type: choice + required: true options: - # pkgs/sdk/client is omitted here since this workflow is for ubuntu and the client sdk build process requires macos. Use release-sdk-client for client sdk. + # pkgs/sdk/client is omitted here since this workflow is for ubuntu and the client sdk build process requires macos + - pkgs/dotnet-server-sdk-consul + - pkgs/dotnet-server-sdk-dynamodb + - pkgs/dotnet-server-sdk-redis - pkgs/sdk/server - - pkgs/telemetry - pkgs/sdk/server-ai + - pkgs/telemetry dry_run: description: 'Is this a dry run. If so no package will be published.' type: boolean required: true generate_provenance: - description: 'Whether or not to generate provenance for this publish. Default behavior: generate only on main branch.' - type: choice - options: - - Default - - Generate - - Do not generate + description: 'Whether to generate provenance for this publish. Note: Workflow must be run from a tag (not a branch) for provenance generation to succeed.' + type: boolean + required: true + default: true + workflow_call: + inputs: + package_path: + description: 'Path to the package directory (e.g. pkgs/dotnet-server-sdk-redis)' + type: string + required: true + dry_run: + description: 'Is this a dry run. If so no package will be published.' + type: boolean + required: true + generate_provenance: + description: 'Whether to generate provenance for this publish.' + type: boolean + default: true + tag_name: + description: 'The tag name to use for the provenance file' + type: string + required: true jobs: - build: + release: runs-on: ubuntu-22.04 permissions: id-token: write contents: write + pull-requests: write outputs: - full-release-hashes: ${{ steps.full-release.outputs.hashes }} + hashes: ${{ steps.full-release.outputs.hashes }} steps: - uses: actions/checkout@v4 - name: Setup Env from project's Env file shell: bash - run: echo "$(cat ${{ inputs.pkg_workspace }}/github_actions.env)" >> $GITHUB_ENV + run: echo "$(cat ${{ inputs.package_path }}/github_actions.env)" >> $GITHUB_ENV - uses: ./.github/actions/full-release id: full-release @@ -49,17 +69,20 @@ jobs: dry_run: ${{ inputs.dry_run }} aws_role: ${{ vars.AWS_ROLE_ARN }} token: ${{ secrets.GITHUB_TOKEN }} + run_consul: ${{ inputs.package_path == 'pkgs/dotnet-server-sdk-consul' }} + run_dynamodb: ${{ inputs.package_path == 'pkgs/dotnet-server-sdk-dynamodb' }} + run_redis: ${{ inputs.package_path == 'pkgs/dotnet-server-sdk-redis' }} release-provenance: - needs: ['build'] + needs: ['release'] permissions: actions: read id-token: write contents: write uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v2.1.0 - if: | - (inputs.generate_provenance == 'Generate' || (inputs.generate_provenance == 'Default' && github.ref_name == 'main')) + if: inputs.generate_provenance with: - base64-subjects: "${{ needs.build.outputs.full-release-hashes }}" + base64-subjects: "${{ needs.release.outputs.hashes }}" upload-assets: true - provenance-name: ${{inputs.pkg_workspace}}_provenance.intoto.jsonl + upload-tag-name: ${{ inputs.tag_name }} + provenance-name: ${{ format('{0}_provenance.intoto.jsonl', inputs.tag_name) }} diff --git a/.github/workflows/server-sdk-consul-ci.yml b/.github/workflows/server-sdk-consul-ci.yml new file mode 100644 index 00000000..9d5bdb0e --- /dev/null +++ b/.github/workflows/server-sdk-consul-ci.yml @@ -0,0 +1,49 @@ +name: LaunchDarkly.ServerSdk.Consul CI + +on: + push: + branches: [ main, 'feat/**' ] + paths: + - '.github/**' + - 'global.example.json' + - 'pkgs/dotnet-server-sdk-consul/**' + - '!**.md' + pull_request: + branches: [ main, 'feat/**' ] + paths: + - '.github/**' + - 'global.example.json' + - 'pkgs/dotnet-server-sdk-consul/**' + - '!**.md' + +jobs: + build-and-test: + strategy: + matrix: + include: + - os: ubuntu-latest + framework: netstandard2.0 + test_framework: net8.0 + - os: windows-latest + framework: net462 + test_framework: net462 + + runs-on: ${{ matrix.os }} + + defaults: + run: + shell: ${{ matrix.os == 'windows-latest' && 'powershell' || 'bash' }} + + steps: + - uses: actions/checkout@v4 + + - name: Setup Env from project's Env file + shell: bash + run: echo "$(cat pkgs/dotnet-server-sdk-consul/github_actions.env)" >> $GITHUB_ENV + + - uses: ./.github/actions/ci + with: + project_file: ${{ env.PROJECT_FILE }} + test_project_file: ${{ env.TEST_PROJECT_FILE }} + target_test_framework: ${{ matrix.test_framework }} + run_consul: true \ No newline at end of file diff --git a/.github/workflows/server-sdk-dynamodb-ci.yml b/.github/workflows/server-sdk-dynamodb-ci.yml new file mode 100644 index 00000000..e18a1d89 --- /dev/null +++ b/.github/workflows/server-sdk-dynamodb-ci.yml @@ -0,0 +1,49 @@ +name: LaunchDarkly.ServerSdk.DynamoDB CI + +on: + push: + branches: [ main, 'feat/**' ] + paths: + - '.github/**' + - 'global.example.json' + - 'pkgs/dotnet-server-sdk-dynamodb/**' + - '!**.md' + pull_request: + branches: [ main, 'feat/**' ] + paths: + - '.github/**' + - 'global.example.json' + - 'pkgs/dotnet-server-sdk-dynamodb/**' + - '!**.md' + +jobs: + build-and-test: + strategy: + matrix: + include: + - os: ubuntu-latest + framework: netstandard2.0 + test_framework: net8.0 + - os: windows-latest + framework: net462 + test_framework: net462 + + runs-on: ${{ matrix.os }} + + defaults: + run: + shell: ${{ matrix.os == 'windows-latest' && 'powershell' || 'bash' }} + + steps: + - uses: actions/checkout@v4 + + - name: Setup Env from project's Env file + shell: bash + run: echo "$(cat pkgs/dotnet-server-sdk-dynamodb/github_actions.env)" >> $GITHUB_ENV + + - uses: ./.github/actions/ci + with: + project_file: ${{ env.PROJECT_FILE }} + test_project_file: ${{ env.TEST_PROJECT_FILE }} + target_test_framework: ${{ matrix.test_framework }} + run_dynamodb: true \ No newline at end of file diff --git a/.github/workflows/server-sdk-redis-ci.yml b/.github/workflows/server-sdk-redis-ci.yml new file mode 100644 index 00000000..aed38afa --- /dev/null +++ b/.github/workflows/server-sdk-redis-ci.yml @@ -0,0 +1,49 @@ +name: LaunchDarkly.ServerSdk.Redis CI + +on: + push: + branches: [ main, 'feat/**' ] + paths: + - '.github/**' + - 'global.example.json' + - 'pkgs/dotnet-server-sdk-redis/**' + - '!**.md' + pull_request: + branches: [ main, 'feat/**' ] + paths: + - '.github/**' + - 'global.example.json' + - 'pkgs/dotnet-server-sdk-redis/**' + - '!**.md' + +jobs: + build-and-test: + strategy: + matrix: + include: + - os: ubuntu-latest + framework: netstandard2.0 + test_framework: net8.0 + - os: windows-latest + framework: net462 + test_framework: net462 + + runs-on: ${{ matrix.os }} + + defaults: + run: + shell: ${{ matrix.os == 'windows-latest' && 'powershell' || 'bash' }} + + steps: + - uses: actions/checkout@v4 + + - name: Setup Env from project's Env file + shell: bash + run: echo "$(cat pkgs/dotnet-server-sdk-redis/github_actions.env)" >> $GITHUB_ENV + + - uses: ./.github/actions/ci + with: + project_file: ${{ env.PROJECT_FILE }} + test_project_file: ${{ env.TEST_PROJECT_FILE }} + target_test_framework: ${{ matrix.test_framework }} + run_redis: true \ No newline at end of file diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 41d72a40..5fb1fc8d 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,4 +1,7 @@ { + "pkgs/dotnet-server-sdk-consul": "5.0.0", + "pkgs/dotnet-server-sdk-dynamodb": "4.0.0", + "pkgs/dotnet-server-sdk-redis": "5.0.0", "pkgs/sdk/server": "8.8.0", "pkgs/sdk/client": "5.4.0", "pkgs/telemetry": "1.1.0", diff --git a/pkgs/dotnet-server-sdk-consul/CHANGELOG.md b/pkgs/dotnet-server-sdk-consul/CHANGELOG.md new file mode 100644 index 00000000..3043bc8a --- /dev/null +++ b/pkgs/dotnet-server-sdk-consul/CHANGELOG.md @@ -0,0 +1,54 @@ +# Change log + +All notable changes to the LaunchDarkly .NET SDK Consul integration will be documented in this file. This project adheres to [Semantic Versioning](http://semver.org). + +## [5.0.0] - 2023-10-16 +### Changed: +- This release requires the `8.0.0` release of the `LaunchDarkly.ServerSdk` + +## [4.0.0] - 2022-12-07 +This release corresponds to the 7.0.0 release of the LaunchDarkly server-side .NET SDK. Any application code that is being updated to use the 7.0.0 SDK, and was using a 3.x version of `LaunchDarkly.ServerSdk.Consul`, should now use a 4.x version instead. + +There are no functional differences in the behavior of the Consul integration; the differences are only related to changes in the usage of interface types for configuration in the SDK. + +### Changed: +- In `DataStoreBuilder`, the method `CreatePersistentDataStore` has been renamed to `Build`, corresponding to changes in how the SDK uses interface types for configuration. Application code would not normally reference this method. + +## [3.0.0] - 2022-10-24 +This release updates the integration to use the current 1.x stable version of the [Consul.NET](https://www.nuget.org/packages/Consul) client package. Previously, that package had a different maintainer and did not have a stable version. + +Because Consul.NET does not support .NET Framework 4.5.2, this integration now has a minimum .NET Framework version of 4.6.1 (which is the reason for the 3.0.0 major version increment in this release). Its functionality is otherwise unchanged. + +## [2.0.0] - 2021-06-09 +This release is for use with versions 6.0.0 and higher of [`LaunchDarkly.ServerSdk`](https://github.com/launchdarkly/dotnet-server-sdk). + +For more information about changes in the SDK database integrations, see the [5.x to 6.0 migration guide](https://docs-stg.launchdarkly.com/252/sdk/server-side/dotnet/migration-5-to-6). + +### Added: +- Added an overload of `ConsulDataStoreBuilder.Address` that takes a `string` rather than a `Uri`. + +### Changed: +- The namespace is now `LaunchDarkly.Sdk.Server.Integrations`. +- The entry point is now `LaunchDarkly.Sdk.Server.Integrations.Consul` rather than `LaunchDarkly.Client.Integrations.Consul` (or, in earlier versions, `LaunchDarkly.Client.Consul.ConsulComponents`). +- If you pass in an existing Consul client instance with `ConsulDataStoreBuilder.ExistingClient`, the SDK will no longer dispose of the client on shutdown; you are responsible for its lifecycle. +- The logger name is now `LaunchDarkly.Sdk.DataStore.Consul` rather than `LaunchDarkly.Client.Consul.ConsulFeatureStoreCore`. + +### Removed: +- Removed the deprecated `ConsulComponents` entry point and `ConsulFeatureStoreBuilder`. +- The package no longer has a dependency on `Common.Logging` but instead integrates with the SDK's logging mechanism. + + +## [1.1.0] - 2021-01-26 +### Added: +- New classes `LaunchDarkly.Client.Integrations.Consul` and `LaunchDarkly.Client.Integrations.ConsulDataStoreBuilder`, which serve the same purpose as the previous classes but are designed to work with the newer persistent data store API introduced in .NET SDK 5.14.0. + +### Deprecated: +- The old API in the `LaunchDarkly.Client.Consul` namespace. + +## [1.0.1] - 2019-05-10 +### Changed: +- Corresponding to the SDK package name change from `LaunchDarkly.Client` to `LaunchDarkly.ServerSdk`, this package is now called `LaunchDarkly.ServerSdk.Consul`. The functionality of the package, including the namespaces and class names, has not changed. + +## [1.0.0] - 2019-01-11 + +Initial release. diff --git a/pkgs/dotnet-server-sdk-consul/CONTRIBUTING.md b/pkgs/dotnet-server-sdk-consul/CONTRIBUTING.md new file mode 100644 index 00000000..e063dfc9 --- /dev/null +++ b/pkgs/dotnet-server-sdk-consul/CONTRIBUTING.md @@ -0,0 +1,63 @@ +# Contributing to the LaunchDarkly SDK Consul Integration + +The source code for this library is [here](https://github.com/launchdarkly/dotnet-core/tree/main/pkgs/dotnet-server-sdk-consul). We encourage pull-requests and other contributions from the community. Since this library is meant to be used in conjunction with the LaunchDarkly .NET Server SDK, you may want to look at the [.NET Server SDK source code](https://github.com/launchdarkly/dotnet-core/tree/main/pkgs/sdk/server) and our [SDK contributor's guide](https://docs.launchdarkly.com/sdk/concepts/contributors-guide). + +## Submitting bug reports and feature requests + +The LaunchDarkly SDK team monitors the [issue tracker](https://github.com/launchdarkly/dotnet-core/issues) in this repository. Bug reports and feature requests specific to this package should be filed in the issue tracker. The SDK team will respond to all newly filed issues within two business days. + +## Submitting pull requests + +We encourage pull requests and other contributions from the community. Before submitting pull requests, ensure that all temporary or unintended code is removed. Don't worry about adding reviewers to the pull request; the LaunchDarkly SDK team will add themselves. The SDK team will acknowledge all pull requests within two business days. + +## Build instructions + +### Prerequisites + +To set up your SDK build time environment, you must [download .NET development tools and follow the instructions](https://dotnet.microsoft.com/download). .NET 8.0 is preferred, since the .NET 8.0 tools are able to build for all supported target platforms. + +The project has a package dependency on `Consul`. The dependency version is intended to be the _minimum_ compatible version; applications are expected to override this with their own dependency on some higher version. + +The unit test project uses code from the `dotnet-server-sdk-shared-tests` project. See the [README.md](../shared/dotnet-server-sdk-shared-tests/README.md) file in that directory for more information. + +### Building + +To install all required packages: + +``` +dotnet restore +``` + +To build all targets of the project without running any tests: + +``` +dotnet build src/LaunchDarkly.ServerSdk.Consul +``` + +Or, to build only one target (in this case .NET Standard 2.0): + +``` +dotnet build src/LaunchDarkly.ServerSdk.Consul -f netstandard2.0 +``` + +Building the code locally in the default Debug configuration does not sign the assembly and does not require a key file. + +### Testing + +To run all unit tests, for all targets (this includes .NET Framework, so you can only do this in Windows): + +``` +dotnet test test/LaunchDarkly.ServerSdk.Consul.Tests +``` + +Or, to run tests only for one target (in this case .NET 8.0): + +``` +dotnet test test/LaunchDarkly.ServerSdk.Consul.Tests -f net8.0 +``` + +The tests expect you to have Consul running locally on the default port, 8500. One way to do this is with Docker: + +```bash +docker run -p 8500:8500 hashicorp/consul +``` diff --git a/pkgs/dotnet-server-sdk-consul/LICENSE b/pkgs/dotnet-server-sdk-consul/LICENSE new file mode 100644 index 00000000..fd10303d --- /dev/null +++ b/pkgs/dotnet-server-sdk-consul/LICENSE @@ -0,0 +1,13 @@ +Copyright 2018 Catamorphic, Co. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. \ No newline at end of file diff --git a/pkgs/dotnet-server-sdk-consul/LaunchDarkly.ServerSdk.Consul.sln b/pkgs/dotnet-server-sdk-consul/LaunchDarkly.ServerSdk.Consul.sln new file mode 100644 index 00000000..61a4ce28 --- /dev/null +++ b/pkgs/dotnet-server-sdk-consul/LaunchDarkly.ServerSdk.Consul.sln @@ -0,0 +1,37 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio 15 +VisualStudioVersion = 15.0.26730.16 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LaunchDarkly.ServerSdk.Consul", "src\LaunchDarkly.ServerSdk.Consul.csproj", "{C497533B-8F43-4E5A-A6F4-F50880AA549C}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LaunchDarkly.ServerSdk.Consul.Tests", "test\LaunchDarkly.ServerSdk.Consul.Tests.csproj", "{2F11CC6A-3427-423E-AF63-E3600FE63627}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LaunchDarkly.ServerSdk.SharedTests", "..\shared\dotnet-server-sdk-shared-tests\src\LaunchDarkly.ServerSdk.SharedTests.csproj", "{60651D1E-E7F9-49E2-9B12-C409A7F3B9FC}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {C497533B-8F43-4E5A-A6F4-F50880AA549C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C497533B-8F43-4E5A-A6F4-F50880AA549C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C497533B-8F43-4E5A-A6F4-F50880AA549C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C497533B-8F43-4E5A-A6F4-F50880AA549C}.Release|Any CPU.Build.0 = Release|Any CPU + {2F11CC6A-3427-423E-AF63-E3600FE63627}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2F11CC6A-3427-423E-AF63-E3600FE63627}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2F11CC6A-3427-423E-AF63-E3600FE63627}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2F11CC6A-3427-423E-AF63-E3600FE63627}.Release|Any CPU.Build.0 = Release|Any CPU + {60651D1E-E7F9-49E2-9B12-C409A7F3B9FC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {60651D1E-E7F9-49E2-9B12-C409A7F3B9FC}.Debug|Any CPU.Build.0 = Debug|Any CPU + {60651D1E-E7F9-49E2-9B12-C409A7F3B9FC}.Release|Any CPU.ActiveCfg = Release|Any CPU + {60651D1E-E7F9-49E2-9B12-C409A7F3B9FC}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {C92D0169-FDC9-4B5A-A3FA-70CD9609660D} + EndGlobalSection +EndGlobal diff --git a/pkgs/dotnet-server-sdk-consul/README.md b/pkgs/dotnet-server-sdk-consul/README.md new file mode 100644 index 00000000..97f4d675 --- /dev/null +++ b/pkgs/dotnet-server-sdk-consul/README.md @@ -0,0 +1,46 @@ +# LaunchDarkly Server-Side SDK for .NET - Consul integration + +[![NuGet](https://img.shields.io/nuget/v/LaunchDarkly.ServerSdk.Consul.svg?style=flat-square)](https://www.nuget.org/packages/LaunchDarkly.ServerSdk.Consul/) +[![CircleCI](https://circleci.com/gh/launchdarkly/dotnet-server-sdk-consul.svg?style=shield)](https://circleci.com/gh/launchdarkly/dotnet-server-sdk-consul) +[![Documentation](https://img.shields.io/static/v1?label=GitHub+Pages&message=API+reference&color=00add8)](https://launchdarkly.github.io/dotnet-server-sdk-consul) + +This library provides a Consul-backed persistence mechanism (data store) for the [LaunchDarkly server-side .NET SDK](https://github.com/launchdarkly/dotnet-server-sdk), replacing the default in-memory data store. It uses the open-source [Consul.NET](https://www.nuget.org/packages/Consul) package. + +For more information, see also: [Using Consul as a persistent feature store](https://docs.launchdarkly.com/sdk/features/storing-data/consul#net-server-side). + +Version 5.0.0 and above of this library works with version 8.0.0 and above of the LaunchDarkly .NET SDK. For earlier versions of the SDK, see the changelog for which version of this library to use. + +For full usage details and examples, see the [API reference](launchdarkly.github.io/dotnet-server-sdk-consul). + +## Supported .NET versions + +This version of the library is built for the following targets: + +* .NET Framework 4.6.2: works in .NET Framework of that version or higher. +* .NET Standard 2.0: works in .NET Core 3.1, .NET 6 or higher, or in a library targeted to .NET Standard 2.x. + +The .NET build tools should automatically load the most appropriate build of the library for whatever platform your application or library is targeted to. + +It has a dependency on version 1.6.1.1 of [Consul.NET](https://www.nuget.org/packages/Consul). If you are using a higher version of that package, you should install it explicitly as a dependency in your application to override this minimum version. + +## Signing + +The published version of this assembly is digitally signed with Authenticode and [strong-named](https://docs.microsoft.com/en-us/dotnet/framework/app-domains/strong-named-assemblies). Building the code locally in the default Debug configuration does not use strong-naming and does not require a key file. + +## Contributing + +We encourage pull requests and other contributions from the community. Check out our [contributing guidelines](CONTRIBUTING.md) for instructions on how to contribute to this project. + +## About LaunchDarkly + +* LaunchDarkly is a continuous delivery platform that provides feature flags as a service and allows developers to iterate quickly and safely. We allow you to easily flag your features and manage them from the LaunchDarkly dashboard. With LaunchDarkly, you can: + * Roll out a new feature to a subset of your users (like a group of users who opt-in to a beta tester group), gathering feedback and bug reports from real-world use cases. + * Gradually roll out a feature to an increasing percentage of users, and track the effect that the feature has on key metrics (for instance, how likely is a user to complete a purchase if they have feature A versus feature B?). + * Turn off a feature that you realize is causing performance problems in production, without needing to re-deploy, or even restart the application with a changed configuration file. + * Grant access to certain features based on user attributes, like payment plan (eg: users on the ‘gold’ plan get access to more features than users in the ‘silver’ plan). Disable parts of your application to facilitate maintenance, without taking everything offline. +* LaunchDarkly provides feature flag SDKs for a wide variety of languages and technologies. Read [our documentation](https://docs.launchdarkly.com/sdk) for a complete list. +* Explore LaunchDarkly + * [launchdarkly.com](https://www.launchdarkly.com/ "LaunchDarkly Main Website") for more information + * [docs.launchdarkly.com](https://docs.launchdarkly.com/ "LaunchDarkly Documentation") for our documentation and SDK reference guides + * [apidocs.launchdarkly.com](https://apidocs.launchdarkly.com/ "LaunchDarkly API Documentation") for our API documentation + * [blog.launchdarkly.com](https://blog.launchdarkly.com/ "LaunchDarkly Blog Documentation") for the latest product updates diff --git a/pkgs/dotnet-server-sdk-consul/SECURITY.md b/pkgs/dotnet-server-sdk-consul/SECURITY.md new file mode 100644 index 00000000..10f1d1ac --- /dev/null +++ b/pkgs/dotnet-server-sdk-consul/SECURITY.md @@ -0,0 +1,5 @@ +# Reporting and Fixing Security Issues + +Please report all security issues to the LaunchDarkly security team by submitting a bug bounty report to our [HackerOne program](https://hackerone.com/launchdarkly?type=team). LaunchDarkly will triage and address all valid security issues following the response targets defined in our program policy. Valid security issues may be eligible for a bounty. + +Please do not open issues or pull requests for security issues. This makes the problem immediately visible to everyone, including potentially malicious actors. diff --git a/pkgs/dotnet-server-sdk-consul/docfx.json b/pkgs/dotnet-server-sdk-consul/docfx.json new file mode 100644 index 00000000..ceaca94a --- /dev/null +++ b/pkgs/dotnet-server-sdk-consul/docfx.json @@ -0,0 +1,48 @@ +{ + "metadata": [ + { + "src": [ + { + "src": "./src", + "files": [ + "**/*.csproj", + "**/bin/**/**LaunchDarkly**.dll" + ] + } + ], + "dest": "./api", + "properties" : { + "Configuration": "Debug" + } + } + ], + "build": { + "content": [ + { + "files": [ + "**/*.{md,yml}" + ], + "exclude": [ + "docs/**" + ] + } + ], + "resource": [ + { + "files": [ + "images/**" + ] + } + ], + "output": "docs", + "template": [ + "default" + ], + "globalMetadata": { + "_appName": "Consul integration for the LaunchDarkly SDK for Server-side .NET ", + "_appTitle": "Consul integration for the LaunchDarkly SDK for Server-side .NET ", + "_enableSearch": true, + "pdf": false + } + } +} diff --git a/pkgs/dotnet-server-sdk-consul/github_actions.env b/pkgs/dotnet-server-sdk-consul/github_actions.env new file mode 100644 index 00000000..24f6bb9e --- /dev/null +++ b/pkgs/dotnet-server-sdk-consul/github_actions.env @@ -0,0 +1,5 @@ +WORKSPACE_PATH=pkgs/dotnet-server-sdk-consul +PROJECT_FILE=pkgs/dotnet-server-sdk-consul/src/LaunchDarkly.ServerSdk.Consul.csproj +BUILD_OUTPUT_PATH=pkgs/dotnet-server-sdk-consul/src/bin/Release/ +BUILD_OUTPUT_DLL_NAME=LaunchDarkly.ServerSdk.Consul.dll +TEST_PROJECT_FILE=pkgs/dotnet-server-sdk-consul/test/LaunchDarkly.ServerSdk.Consul.Tests.csproj \ No newline at end of file diff --git a/pkgs/dotnet-server-sdk-consul/index.md b/pkgs/dotnet-server-sdk-consul/index.md new file mode 100644 index 00000000..635972e6 --- /dev/null +++ b/pkgs/dotnet-server-sdk-consul/index.md @@ -0,0 +1,50 @@ +The [`LaunchDarkly.ServerSdk.Consul`](https://nuget.org/packages/LaunchDarkly.ServerSdk.Consul) package provides a Consul-backed persistence mechanism (data store) for the [LaunchDarkly .NET SDK](https://github.com/launchdarkly/dotnet-server-sdk), replacing the default in-memory data store. The underlying Consul client implementation is https://github.com/PlayFab/consuldotnet. + +For more information, see also: [Using Consul as a persistent feature store](https://docs.launchdarkly.com/sdk/features/storing-data/consul#net-server-side). + +Version 2.0.0 and above of this library works with version 6.0.0 and above of the LaunchDarkly .NET SDK. For earlier versions of the SDK, use the latest 1.x release of this library. + +The entry point for using this integration is the **** class in . + +## Quick setup + +This assumes that you have already installed the LaunchDarkly .NET SDK. + +1. Add the NuGet package [`LaunchDarkly.ServerSdk.Consul`](https://nuget.org/packages/LaunchDarkly.ServerSdk.Consul) to your project. + +2. Import the package (note that the namespace is different from the package name): + +```csharp + using LaunchDarkly.Sdk.Server.Integrations; +``` + +3. When configuring your `LdClient`, add the Consul data store as a `PersistentDataStore`. You may specify any custom Consul options using the methods of `ConsulDataStoreBuilder`. For instance, to customize the Consul host address: + +```csharp + var ldConfig = Configuration.Default("YOUR_SDK_KEY") + .DataStore( + Components.PersistentDataStore( + Consul.DataStore().Address("http://my-consul-host:8500") + ) + ) + .Build(); + var ldClient = new LdClient(ldConfig); +``` + +By default, the store will try to connect to a local Consul instance on port 8500. + +## Caching behavior + +The LaunchDarkly SDK has a standard caching mechanism for any persistent data store, to reduce database traffic. This is configured through the SDK's `PersistentDataStoreBuilder` class as described in the SDK documentation. For instance, to specify a cache TTL of 5 minutes: + +```csharp + var config = Configuration.Default("YOUR_SDK_KEY") + .DataStore( + Components.PersistentDataStore( + Consul.DataStore().Address("http://my-consul-host:8500") + ).CacheTime(TimeSpan.FromMinutes(5)) + ) + .Build(); +``` + +By default, the store will try to connect to a local Consul instance on port 8500. diff --git a/pkgs/dotnet-server-sdk-consul/src/Consul.cs b/pkgs/dotnet-server-sdk-consul/src/Consul.cs new file mode 100644 index 00000000..a0d1af15 --- /dev/null +++ b/pkgs/dotnet-server-sdk-consul/src/Consul.cs @@ -0,0 +1,54 @@ +using LaunchDarkly.Sdk.Server.Subsystems; + +namespace LaunchDarkly.Sdk.Server.Integrations +{ + /// + /// Integration between the LaunchDarkly SDK and Consul. + /// + public static class Consul + { + /// + /// The default value for . + /// + public static readonly string DefaultPrefix = "launchdarkly"; + + /// + /// Returns a builder object for creating a Consul-backed persistent data store. + /// + /// + /// + /// You can use methods of the builder to specify any non-default Consul options + /// you may want, before passing the builder to + /// . + /// In this example, the store is configured with only the host address: + /// + /// + /// var config = Configuration.Builder("sdk-key") + /// .DataStore( + /// Components.PersistentDataStore( + /// Consul.DataStore().Address("http://my-consul-host:8500") + /// ) + /// ) + /// .Build(); + /// + /// + /// Note that the SDK also has its own options related to data storage that are configured + /// at a different level, because they are independent of what database is being used. For + /// instance, the builder returned by + /// has options for caching: + /// + /// + /// var config = Configuration.Builder("sdk-key") + /// .DataStore( + /// Components.PersistentDataStore( + /// Consul.DataStore().Address("http://my-consul-host:8500") + /// ).CacheSeconds(15) + /// ) + /// .Build(); + /// + /// + /// a data store configuration object + public static ConsulDataStoreBuilder DataStore() => + new ConsulDataStoreBuilder(); + } +} diff --git a/pkgs/dotnet-server-sdk-consul/src/ConsulDataStoreBuilder.cs b/pkgs/dotnet-server-sdk-consul/src/ConsulDataStoreBuilder.cs new file mode 100644 index 00000000..01c31fd8 --- /dev/null +++ b/pkgs/dotnet-server-sdk-consul/src/ConsulDataStoreBuilder.cs @@ -0,0 +1,157 @@ +using System; +using System.Collections.Generic; +using Consul; +using LaunchDarkly.Sdk.Server.Subsystems; + +namespace LaunchDarkly.Sdk.Server.Integrations +{ + /// + /// A builder for configuring the Consul-based persistent data store. + /// + /// + /// + /// Obtain an instance of this class by calling . After calling its methods + /// to specify any desired custom settings, wrap it in a + /// by calling , then pass + /// the result into the SDK configuration with . + /// You do not need to call yourself to build + /// the actual data store; that will be done by the SDK. + /// + /// + /// The Consul client has many configuration options. This class has shortcut methods for + /// some of them, but if you need more sophisticated control over the Consul client, use + /// or + /// . + /// + /// + /// Builder calls can be chained, for example: + /// + /// + /// var config = Configuration.Builder("sdk-key") + /// .DataStore( + /// Components.PersistentDataStore( + /// Consul.DataStore() + /// .Address("http://my-consul-host:8500") + /// .Prefix("app1") + /// ) + /// .CacheSeconds(15) + /// ) + /// .Build(); + /// + /// + public sealed class ConsulDataStoreBuilder : IComponentConfigurer, + IDiagnosticDescription + { + private ConsulClient _existingClient; + private List> _configActions = new List>(); + private Uri _address; + private string _prefix = Consul.DefaultPrefix; + + internal ConsulDataStoreBuilder() { } + + /// + /// Shortcut for calling with a string. + /// + /// the URI of the Consul host as a string + /// the builder + /// + public ConsulDataStoreBuilder Address(string address) => Address(new Uri(address)); + + /// + /// Specifies the Consul agent's location. + /// + /// the URI of the Consul host + /// the builder + /// /// + public ConsulDataStoreBuilder Address(Uri address) + { + _address = address; + return this; + } + + /// + /// Specifies custom steps for configuring the Consul client. Your action may modify the + /// object in any way. + /// + /// an action for modifying the configuration + /// the builder + public ConsulDataStoreBuilder ConsulConfigChanges(Action configAction) + { + if (configAction != null) + { + _configActions.Add(configAction); + } + return this; + } + + /// + /// Specifies an existing, already-configured Consul client instance that the data store + /// should use rather than creating one of its own. + /// + /// + /// + /// If you specify an existing client, then the other builder methods for configuring Consul are ignored. + /// + /// + /// Note that the LaunchDarkly code will not take ownership of the lifecycle of this + /// object: in other words, it will not call Dispose() on the ConsulClient when + /// you dispose of the SDK client, as it would if it had created the ConsulClient itself. + /// It is your responsibility to call Dispose() on the ConsulClient when you are + /// done with it. + /// + /// + /// an existing Consul client instance + /// the builder + public ConsulDataStoreBuilder ExistingClient(ConsulClient client) + { + _existingClient = client; + return this; + } + + /// + /// Sets an optional namespace prefix for all keys stored in Consul. + /// + /// + /// Use this if you are sharing the same database table between multiple clients that are for + /// different LaunchDarkly environments, to avoid key collisions. + /// + /// the namespace prefix, or null to use + /// the builder + public ConsulDataStoreBuilder Prefix(string prefix) + { + _prefix = string.IsNullOrEmpty(prefix) ? Consul.DefaultPrefix : prefix; + return this; + } + + /// + public IPersistentDataStoreAsync Build(LdClientContext context) + { + var client = _existingClient; + if (client is null) + { + client = new ConsulClient(config => + { + if (_address != null) + { + config.Address = _address; + } + foreach (var action in _configActions) + { + action.Invoke(config); + } + }); + } + + return new ConsulDataStoreImpl( + client, + _existingClient != null, + _prefix, + context.Logger.SubLogger("DataStore.Consul") + ); + } + + /// + public LdValue DescribeConfiguration(LdClientContext context) => + LdValue.Of("Consul"); + } +} diff --git a/pkgs/dotnet-server-sdk-consul/src/ConsulDataStoreImpl.cs b/pkgs/dotnet-server-sdk-consul/src/ConsulDataStoreImpl.cs new file mode 100644 index 00000000..0f569bd0 --- /dev/null +++ b/pkgs/dotnet-server-sdk-consul/src/ConsulDataStoreImpl.cs @@ -0,0 +1,214 @@ +using System; +using System.Collections.Generic; +using System.Text; +using System.Threading.Tasks; +using Consul; +using LaunchDarkly.Logging; +using LaunchDarkly.Sdk.Server.Subsystems; + +using static LaunchDarkly.Sdk.Server.Subsystems.DataStoreTypes; + +namespace LaunchDarkly.Sdk.Server.Integrations +{ + /// + /// Internal implementation of the Consul data store. + /// + /// + /// + /// Implementation notes: + /// + /// + /// Feature flags, segments, and any other kind of entity the LaunchDarkly client may wish + /// to store, are stored as individual items with the key "{prefix}/features/{flag-key}", + /// "{prefix}/segments/{segment-key}", etc. + /// The special key "{prefix}/$inited" indicates that the store contains a complete data set. + /// Since Consul has limited support for transactions(they can't contain more than 64 + /// operations), the Init method-- which replaces the entire data store-- is not guaranteed to + /// be atomic, so there can be a race condition if another process is adding new data via + /// Upsert. To minimize this, we don't delete all the data at the start; instead, we update + /// the items we've received, and then delete all other items. That could potentially result in + /// deleting new data from another process, but that would be the case anyway if the Init + /// happened to execute later than the Upsert; we are relying on the fact that normally the + /// process that did the Init will also receive the new data shortly and do its own Upsert. + /// + /// + internal sealed class ConsulDataStoreImpl : IPersistentDataStoreAsync + { + private readonly ConsulClient _client; + private readonly bool _wasExistingClient; + private readonly string _prefix; + private readonly Logger _log; + + internal ConsulDataStoreImpl( + ConsulClient client, + bool wasExistingClient, + string prefix, + Logger log + ) + { + _client = client; + _wasExistingClient = wasExistingClient; + _prefix = String.IsNullOrEmpty(prefix) ? "" : (prefix + "/"); + _log = log; + _log.Info("Using Consul data store at {0} with prefix \"{1}\"", + client.Config.Address, prefix); + } + + public async Task InitializedAsync() + { + var result = await _client.KV.Get(InitedKey); + return result.Response != null; + } + + public async Task InitAsync(FullDataSet allData) + { + // Start by reading the existing keys; we will later delete any of these that weren't in allData. + var keysResult = await _client.KV.Keys(_prefix); + var unusedOldKeys = keysResult.Response == null ? new HashSet() : + new HashSet(keysResult.Response); + + var ops = new List(); + var numItems = 0; + + // Insert or update every provided item + foreach (var collection in allData.Data) + { + var kind = collection.Key; + foreach (var keyAndItem in collection.Value.Items) + { + var key = ItemKey(kind, keyAndItem.Key); + var op = new KVTxnOp(key, KVTxnVerb.Set) + { + Value = Encoding.UTF8.GetBytes(keyAndItem.Value.SerializedItem) + }; + ops.Add(op); + unusedOldKeys.Remove(key); + numItems++; + } + } + + // Now delete any previously existing items whose keys were not in the current data + foreach (var oldKey in unusedOldKeys) + { + ops.Add(new KVTxnOp(oldKey, KVTxnVerb.Delete)); + } + + // Now set the special key that we check in InitializedInternalAsync() + var initedOp = new KVTxnOp(InitedKey, KVTxnVerb.Set) + { + Value = new byte[0] + }; + ops.Add(initedOp); + + await BatchOperationsAsync(ops); + + _log.Info("Initialized data store with {0} items", numItems); + } + + public async Task GetAsync(DataKind kind, string key) + { + var result = await _client.KV.Get(ItemKey(kind, key)); + return result.Response == null ? (SerializedItemDescriptor?)null : + new SerializedItemDescriptor(0, false, Encoding.UTF8.GetString(result.Response.Value)); + } + + public async Task> GetAllAsync(DataKind kind) + { + var items = new List>(); + var baseKey = KindKey(kind); + var result = await _client.KV.List(baseKey); + foreach (var pair in result.Response) + { + var itemKey = pair.Key.Substring(baseKey.Length + 1); + items.Add(new KeyValuePair(itemKey, + new SerializedItemDescriptor(0, false, Encoding.UTF8.GetString(pair.Value)))); + } + return new KeyedItems(items); + } + + public async Task UpsertAsync(DataKind kind, string key, SerializedItemDescriptor newItem) + { + var fullKey = ItemKey(kind, key); + + // We will potentially keep retrying indefinitely until someone's write succeeds + while (true) + { + var oldValue = (await _client.KV.Get(fullKey)).Response; + var oldVersion = oldValue is null ? 0 : + kind.Deserialize(Encoding.UTF8.GetString(oldValue.Value)).Version; + + // Check whether the item is stale. If so, don't do the update (and return the existing item to + // FeatureStoreWrapper so it can be cached) + if (oldVersion >= newItem.Version) + { + return false; + } + + // Otherwise, try to write. We will do a compare-and-set operation, so the write will only succeed if + // the key's ModifyIndex is still equal to the previous value returned by getEvenIfDeleted. If the + // previous ModifyIndex was zero, it means the key did not previously exist and the write will only + // succeed if it still doesn't exist. + var modifyIndex = oldValue == null ? 0 : oldValue.ModifyIndex; + var pair = new KVPair(fullKey) + { + Value = Encoding.UTF8.GetBytes(newItem.SerializedItem), + ModifyIndex = modifyIndex + }; + var result = await _client.KV.CAS(pair); + if (result.Response) + { + return true; + } + + // If we failed, retry the whole shebang + _log.Debug("Concurrent modification detected, retrying"); + } + } + + public async Task IsStoreAvailableAsync() + { + try + { + await InitializedAsync(); // don't care about the return value, just that it doesn't throw an exception + return true; + } + catch + { // don't care about exception class, since any exception means the Consul request couldn't be made + return false; + } + } + + public void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + + private void Dispose(bool disposing) + { + if (disposing) + { + if (!_wasExistingClient) + { + _client.Dispose(); + } + } + } + + private string ItemKey(DataKind kind, string key) => KindKey(kind) + "/" + key; + + private string KindKey(DataKind kind) => _prefix + kind.Name; + + private string InitedKey => _prefix + "$inited"; + + private async Task BatchOperationsAsync(List ops) + { + int batchSize = 64; // Consul can only do this many at a time + for (int i = 0; i < ops.Count; i += batchSize) + { + var batch = ops.GetRange(i, Math.Min(batchSize, ops.Count - i)); + await _client.KV.Txn(batch); + } + } + } +} diff --git a/pkgs/dotnet-server-sdk-consul/src/LaunchDarkly.ServerSdk.Consul.csproj b/pkgs/dotnet-server-sdk-consul/src/LaunchDarkly.ServerSdk.Consul.csproj new file mode 100644 index 00000000..fa42e5aa --- /dev/null +++ b/pkgs/dotnet-server-sdk-consul/src/LaunchDarkly.ServerSdk.Consul.csproj @@ -0,0 +1,40 @@ + + + 5.0.0 + netstandard2.0;net462 + LaunchDarkly.ServerSdk.Consul + LaunchDarkly.ServerSdk.Consul + Library + 7.3 + LaunchDarkly .NET Client Consul Integration + LaunchDarkly + LaunchDarkly + LaunchDarkly + Copyright 2018 Catamorphic, Co. + Apache-2.0 + https://github.com/launchdarkly/dotnet-server-sdk-consul + https://github.com/launchdarkly/dotnet-server-sdk-consul + main + true + snupkg + LaunchDarkly.Sdk.Server.Integrations + + + + + + + + + + + + + bin\$(Configuration)\$(TargetFramework)\LaunchDarkly.ServerSdk.Consul.xml + + + + true + ../../../LaunchDarkly.Consul.snk + + diff --git a/pkgs/dotnet-server-sdk-consul/test/ConsulDataStoreTest.cs b/pkgs/dotnet-server-sdk-consul/test/ConsulDataStoreTest.cs new file mode 100644 index 00000000..ac04f8b9 --- /dev/null +++ b/pkgs/dotnet-server-sdk-consul/test/ConsulDataStoreTest.cs @@ -0,0 +1,64 @@ +using System.Threading; +using System.Threading.Tasks; +using Consul; +using LaunchDarkly.Logging; +using LaunchDarkly.Sdk.Server.Subsystems; +using LaunchDarkly.Sdk.Server.SharedTests.DataStore; +using Xunit; +using Xunit.Abstractions; + +namespace LaunchDarkly.Sdk.Server.Integrations +{ + public class ConsulDataStoreTest : PersistentDataStoreBaseTests + { + private static readonly TaskFactory _taskFactory = new TaskFactory(CancellationToken.None, + TaskCreationOptions.None, TaskContinuationOptions.None, TaskScheduler.Default); + + protected override PersistentDataStoreTestConfig Configuration => + new PersistentDataStoreTestConfig + { + StoreAsyncFactoryFunc = MakeStoreFactory, + ClearDataAction = ClearAllData + }; + + public ConsulDataStoreTest(ITestOutputHelper testOutput) : base(testOutput) { } + + private IComponentConfigurer MakeStoreFactory(string prefix) => + Consul.DataStore().Prefix(prefix); + + private async Task ClearAllData(string prefix) + { + using (var client = new ConsulClient()) + { + var keysResult = await client.KV.Keys(prefix ?? Consul.DefaultPrefix); + if (keysResult.Response != null) + { + foreach (var key in keysResult.Response) + { + await client.KV.Delete(key); + } + } + } + } + + [Fact] + public void LogMessageAtStartup() + { + var logCapture = Logs.Capture(); + var logger = logCapture.Logger("BaseLoggerName"); // in real life, the SDK will provide its own base log name + var context = new LdClientContext("", null, null, null, logger, false, null); + using (Consul.DataStore().Address("http://localhost:8500").Prefix("my-prefix") + .Build(context)) + { + Assert.Collection(logCapture.GetMessages(), + m => + { + Assert.Equal(LaunchDarkly.Logging.LogLevel.Info, m.Level); + Assert.Equal("BaseLoggerName.DataStore.Consul", m.LoggerName); + Assert.Equal("Using Consul data store at http://localhost:8500/ with prefix \"my-prefix\"", + m.Text); + }); + } + } + } +} diff --git a/pkgs/dotnet-server-sdk-consul/test/LaunchDarkly.ServerSdk.Consul.Tests.csproj b/pkgs/dotnet-server-sdk-consul/test/LaunchDarkly.ServerSdk.Consul.Tests.csproj new file mode 100644 index 00000000..c77c891a --- /dev/null +++ b/pkgs/dotnet-server-sdk-consul/test/LaunchDarkly.ServerSdk.Consul.Tests.csproj @@ -0,0 +1,20 @@ + + + net8.0 + $(TargetFrameworks);net462 + false + true + LaunchDarkly.Client.Integrations + + + + + + + + + + + + + diff --git a/pkgs/dotnet-server-sdk-dynamodb/CHANGELOG.md b/pkgs/dotnet-server-sdk-dynamodb/CHANGELOG.md new file mode 100644 index 00000000..ad1f99a4 --- /dev/null +++ b/pkgs/dotnet-server-sdk-dynamodb/CHANGELOG.md @@ -0,0 +1,56 @@ +# Change log + +All notable changes to the LaunchDarkly .NET SDK DynamoDB integration will be documented in this file. This project adheres to [Semantic Versioning](http://semver.org). + +## [4.0.0] - 2023-10-16 +### Changed: +- This release requires the `8.0.0` release of the `LaunchDarkly.ServerSdk`. + +## [3.0.0] - 2022-12-07 +This release corresponds to the 7.0.0 release of the LaunchDarkly server-side .NET SDK. Any application code that is being updated to use the 7.0.0 SDK, and was using a 2.x version of `LaunchDarkly.ServerSdk.DynamoDb`, should now use a 3.x version instead. + +There are no functional differences in the behavior of the DynamoDB integration; the differences are only related to changes in the usage of interface types for configuration in the SDK. + +### Added: +- `DynamoDb.BigSegmentStore()`, which creates a configuration builder for use with Big Segments. Previously, the `DynamoDb.DataStore()` builder was used for both regular data stores and Big Segment stores. + +### Changed: +- The type `DynamoDbDataStoreBuilder` has been removed, replaced by a generic type `DynamoDbStoreBuilder`. Application code would not normally need to reference these types by name, but if necessary, use either `DynamoDbStoreBuilder` or `DynamoDbStoreBuilder` depending on whether you are configuring a regular data store or a Big Segment store. + +## [2.1.1] - 2022-04-19 +### Fixed: +- If the SDK attempts to store a feature flag or segment whose total data size is over the 400KB limit for DynamoDB items, this integration will now log (at Error level) a message like `The item "my-flag-key" in "features" was too large to store in DynamoDB and was dropped` but will still process all other data updates. Previously, it would cause the SDK to enter an error state in which the oversized item would be pointlessly retried and other updates might be lost. + +## [2.1.0] - 2021-07-22 +### Added: +- Added support for Big Segments. An Early Access Program for creating and syncing Big Segments from customer data platforms is available to enterprise customers. + +## [2.0.0] - 2021-06-09 +This release is for use with versions 6.0.0 and higher of [`LaunchDarkly.ServerSdk`](https://github.com/launchdarkly/dotnet-server-sdk). + +For more information about changes in the SDK database integrations, see the [5.x to 6.0 migration guide](https://docs-stg.launchdarkly.com/252/sdk/server-side/dotnet/migration-5-to-6). + +### Changed: +- The namespace is now `LaunchDarkly.Sdk.Server.Integrations`. +- The entry point is now `LaunchDarkly.Sdk.Server.Integrations.DynamoDB` rather than `LaunchDarkly.Client.Integrations.DynamoDB` (or, in earlier versions, `LaunchDarkly.Client.DynamoDB.DynamoDBComponents`). +- If you pass in an existing DynamoDB client instance with `DynamoDBDataStoreBuilder.ExistingClient`, the SDK will no longer dispose of the client on shutdown; you are responsible for its lifecycle. +- The logger name is now `LaunchDarkly.Sdk.DataStore.DynamoDB` rather than `LaunchDarkly.Client.DynamoDB.DynamoDBFeatureStoreCore`. + +### Removed: +- Removed the deprecated `DynamoDBComponents` entry point and `DynamoDBFeatureStoreBuilder`. +- The package no longer has a dependency on `Common.Logging` but instead integrates with the SDK's logging mechanism. + +## [1.1.0] - 2021-01-26 +### Added: +- New classes `LaunchDarkly.Client.Integrations.DynamoDB` and `LaunchDarkly.Client.Integrations.DynamoDBStoreBuilder`, which serve the same purpose as the previous classes but are designed to work with the newer persistent data store API introduced in .NET SDK 5.14.0. + +### Deprecated: +- The old API in the `LaunchDarkly.Client.DynamoDB` namespace. + +## [1.0.1] - 2019-05-10 +### Changed: +- Corresponding to the SDK package name change from `LaunchDarkly.Client` to `LaunchDarkly.ServerSdk`, this package is now called `LaunchDarkly.ServerSdk.DynamoDB`. The functionality of the package, including the namespaces and class names, has not changed. + +## [1.0.0] - 2019-01-11 + +Initial release. diff --git a/pkgs/dotnet-server-sdk-dynamodb/CONTRIBUTING.md b/pkgs/dotnet-server-sdk-dynamodb/CONTRIBUTING.md new file mode 100644 index 00000000..eebddc5a --- /dev/null +++ b/pkgs/dotnet-server-sdk-dynamodb/CONTRIBUTING.md @@ -0,0 +1,63 @@ +# Contributing to the LaunchDarkly SDK DynamoDB Integration + +The source code for this library is [here](https://github.com/launchdarkly/dotnet-core/tree/main/pkgs/dotnet-server-sdk-dynamodb). We encourage pull-requests and other contributions from the community. Since this library is meant to be used in conjunction with the LaunchDarkly .NET Server SDK, you may want to look at the [.NET Server SDK source code](https://github.com/launchdarkly/dotnet-core/tree/main/pkgs/sdk/server) and our [SDK contributor's guide](https://docs.launchdarkly.com/sdk/concepts/contributors-guide). + +## Submitting bug reports and feature requests + +The LaunchDarkly SDK team monitors the [issue tracker](https://github.com/launchdarkly/dotnet-core/issues) in this repository. Bug reports and feature requests specific to this package should be filed in the issue tracker. The SDK team will respond to all newly filed issues within two business days. + +## Submitting pull requests + +We encourage pull requests and other contributions from the community. Before submitting pull requests, ensure that all temporary or unintended code is removed. Don't worry about adding reviewers to the pull request; the LaunchDarkly SDK team will add themselves. The SDK team will acknowledge all pull requests within two business days. + +## Build instructions + +### Prerequisites + +To set up your SDK build time environment, you must [download .NET development tools and follow the instructions](https://dotnet.microsoft.com/download). .NET 8.0 is preferred, since the .NET 8.0 tools are able to build for all supported target platforms. + +The project has a package dependency on `AWSSDK.DynamoDBv2`. The dependency version is intended to be the _minimum_ compatible version; applications are expected to override this with their own dependency on some higher version. + +The unit test project uses code from the `dotnet-server-sdk-shared-tests` project. See the [README.md](../shared/dotnet-server-sdk-shared-tests/README.md) file in that directory for more information. + +### Building + +To install all required packages: + +``` +dotnet restore +``` + +To build all targets of the project without running any tests: + +``` +dotnet build src/LaunchDarkly.ServerSdk.DynamoDB +``` + +Or, to build only one target (in this case .NET Standard 2.0): + +``` +dotnet build src/LaunchDarkly.ServerSdk.DynamoDB -f netstandard2.0 +``` + +Building the code locally in the default Debug configuration does not sign the assembly and does not require a key file. + +### Testing + +To run all unit tests, for all targets (this includes .NET Framework, so you can only do this in Windows): + +``` +dotnet test test/LaunchDarkly.ServerSdk.DynamoDB.Tests +``` + +Or, to run tests only for one target (in this case .NET 8.0): + +``` +dotnet test test/LaunchDarkly.ServerSdk.DynamoDB.Tests -f net8.0 +``` + +The tests expect you to have a local DynamoDB instance running on port 8000. One way to do this is with Docker: + +```bash +docker run -p 8000:8000 amazon/dynamodb-local +``` diff --git a/pkgs/dotnet-server-sdk-dynamodb/LICENSE b/pkgs/dotnet-server-sdk-dynamodb/LICENSE new file mode 100644 index 00000000..fd10303d --- /dev/null +++ b/pkgs/dotnet-server-sdk-dynamodb/LICENSE @@ -0,0 +1,13 @@ +Copyright 2018 Catamorphic, Co. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. \ No newline at end of file diff --git a/pkgs/dotnet-server-sdk-dynamodb/LaunchDarkly.ServerSdk.DynamoDB.sln b/pkgs/dotnet-server-sdk-dynamodb/LaunchDarkly.ServerSdk.DynamoDB.sln new file mode 100644 index 00000000..1804cfa0 --- /dev/null +++ b/pkgs/dotnet-server-sdk-dynamodb/LaunchDarkly.ServerSdk.DynamoDB.sln @@ -0,0 +1,37 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio 15 +VisualStudioVersion = 15.0.26730.16 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LaunchDarkly.ServerSdk.DynamoDB", "src\LaunchDarkly.ServerSdk.DynamoDB.csproj", "{C497533B-8F43-4E5A-A6F4-F50880AA549C}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LaunchDarkly.ServerSdk.DynamoDB.Tests", "test\LaunchDarkly.ServerSdk.DynamoDB.Tests.csproj", "{2F11CC6A-3427-423E-AF63-E3600FE63627}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LaunchDarkly.ServerSdk.SharedTests", "..\shared\dotnet-server-sdk-shared-tests\src\LaunchDarkly.ServerSdk.SharedTests.csproj", "{60651D1E-E7F9-49E2-9B12-C409A7F3B9FC}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {C497533B-8F43-4E5A-A6F4-F50880AA549C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C497533B-8F43-4E5A-A6F4-F50880AA549C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C497533B-8F43-4E5A-A6F4-F50880AA549C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C497533B-8F43-4E5A-A6F4-F50880AA549C}.Release|Any CPU.Build.0 = Release|Any CPU + {2F11CC6A-3427-423E-AF63-E3600FE63627}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2F11CC6A-3427-423E-AF63-E3600FE63627}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2F11CC6A-3427-423E-AF63-E3600FE63627}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2F11CC6A-3427-423E-AF63-E3600FE63627}.Release|Any CPU.Build.0 = Release|Any CPU + {60651D1E-E7F9-49E2-9B12-C409A7F3B9FC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {60651D1E-E7F9-49E2-9B12-C409A7F3B9FC}.Debug|Any CPU.Build.0 = Debug|Any CPU + {60651D1E-E7F9-49E2-9B12-C409A7F3B9FC}.Release|Any CPU.ActiveCfg = Release|Any CPU + {60651D1E-E7F9-49E2-9B12-C409A7F3B9FC}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {C92D0169-FDC9-4B5A-A3FA-70CD9609660D} + EndGlobalSection +EndGlobal diff --git a/pkgs/dotnet-server-sdk-dynamodb/README.md b/pkgs/dotnet-server-sdk-dynamodb/README.md new file mode 100644 index 00000000..59501382 --- /dev/null +++ b/pkgs/dotnet-server-sdk-dynamodb/README.md @@ -0,0 +1,60 @@ +# LaunchDarkly Server-Side SDK for .NET - DynamoDB integration + +[![NuGet](https://img.shields.io/nuget/v/LaunchDarkly.ServerSdk.DynamoDB.svg?style=flat-square)](https://www.nuget.org/packages/LaunchDarkly.ServerSdk.DynamoDB/) +[![Build and Test](https://github.com/launchdarkly/dotnet-server-sdk-dynamodb/actions/workflows/ci.yml/badge.svg)](https://github.com/launchdarkly/dotnet-server-sdk-dynamodb/actions/workflows/ci.yml) +[![Documentation](https://img.shields.io/static/v1?label=GitHub+Pages&message=API+reference&color=00add8)](https://launchdarkly.github.io/dotnet-server-sdk-dynamodb) + +This library provides a DynamoDB-backed persistence mechanism (data store) for the [LaunchDarkly server-side .NET SDK](https://github.com/launchdarkly/dotnet-server-sdk), replacing the default in-memory data store. It uses the [AWS SDK for .NET](https://aws.amazon.com/sdk-for-net/). + +For more information, see also: [Using DynamoDB as a persistent feature store](https://docs.launchdarkly.com/sdk/features/storing-data/dynamodb#net). + +Version 4.0.0 and above of this library works with version 8.0.0 and above of the LaunchDarkly .NET SDK. For earlier versions of the SDK, see the changelog for which version of this library to use. + +For full usage details and examples, see the [API reference](launchdarkly.github.io/dotnet-server-sdk-dynamodb). + +## .NET platform compatibility + +This version of the library is built for the following targets: + +* .NET Framework 4.6.2: works in .NET Framework of that version or higher. +* .NET Standard 2.0: works in .NET Core 3.x, .NET 6.x, or in a library targeted to .NET Standard 2.x. + +The .NET build tools should automatically load the most appropriate build of the library for whatever platform your application or library is targeted to. + +## Data size limitation + +DynamoDB has [a 400KB limit](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/ServiceQuotas.html#limits-items) on the size of any data item. For the LaunchDarkly SDK, a data item consists of the JSON representation of an individual feature flag or segment configuration, plus a few smaller attributes. You can see the format and size of these representations by querying `https://sdk.launchdarkly.com/flags/latest-all` and setting the `Authorization` header to your SDK key. + +Most flags and segments won't be nearly as big as 400KB, but they could be if for instance you have a long list of user keys for individual user targeting. If the flag or segment representation is too large, it cannot be stored in DynamoDB. To avoid disrupting storage and evaluation of other unrelated feature flags, the SDK will simply skip storing that individual flag or segment, and will log a message (at ERROR level) describing the problem. For example: + +``` + The item "my-flag-key" in "features" was too large to store in DynamoDB and was dropped +``` + +If caching is enabled in your configuration, the flag or segment may still be available in the SDK from the in-memory cache, but do not rely on this. If you see this message, consider redesigning your flag/segment configurations, or else do not use DynamoDB for the environment that contains this data item. + +This limitation does not apply to target lists in [Big Segments](https://docs.launchdarkly.com/home/users/big-segments/). + +A future version of the LaunchDarkly DynamoDB integration may use different strategies to work around this limitation, such as compressing the data or dividing it into multiple items. However, this integration is required to be interoperable with the DynamoDB integrations used by all the other LaunchDarkly SDKs and by the Relay Proxy, so any such change will only be made as part of a larger cross-platform release. + +## Signing + +The published version of this assembly is digitally signed with Authenticode and [strong-named](https://docs.microsoft.com/en-us/dotnet/framework/app-domains/strong-named-assemblies). Building the code locally in the default Debug configuration does not use strong-naming and does not require a key file. + +## Contributing + +We encourage pull requests and other contributions from the community. Check out our [contributing guidelines](CONTRIBUTING.md) for instructions on how to contribute to this project. + +## About LaunchDarkly + +* LaunchDarkly is a continuous delivery platform that provides feature flags as a service and allows developers to iterate quickly and safely. We allow you to easily flag your features and manage them from the LaunchDarkly dashboard. With LaunchDarkly, you can: + * Roll out a new feature to a subset of your users (like a group of users who opt-in to a beta tester group), gathering feedback and bug reports from real-world use cases. + * Gradually roll out a feature to an increasing percentage of users, and track the effect that the feature has on key metrics (for instance, how likely is a user to complete a purchase if they have feature A versus feature B?). + * Turn off a feature that you realize is causing performance problems in production, without needing to re-deploy, or even restart the application with a changed configuration file. + * Grant access to certain features based on user attributes, like payment plan (eg: users on the ‘gold’ plan get access to more features than users in the ‘silver’ plan). Disable parts of your application to facilitate maintenance, without taking everything offline. +* LaunchDarkly provides feature flag SDKs for a wide variety of languages and technologies. Read [our documentation](https://docs.launchdarkly.com/sdk) for a complete list. +* Explore LaunchDarkly + * [launchdarkly.com](https://www.launchdarkly.com/ "LaunchDarkly Main Website") for more information + * [docs.launchdarkly.com](https://docs.launchdarkly.com/ "LaunchDarkly Documentation") for our documentation and SDK reference guides + * [apidocs.launchdarkly.com](https://apidocs.launchdarkly.com/ "LaunchDarkly API Documentation") for our API documentation + * [blog.launchdarkly.com](https://blog.launchdarkly.com/ "LaunchDarkly Blog Documentation") for the latest product updates diff --git a/pkgs/dotnet-server-sdk-dynamodb/SECURITY.md b/pkgs/dotnet-server-sdk-dynamodb/SECURITY.md new file mode 100644 index 00000000..10f1d1ac --- /dev/null +++ b/pkgs/dotnet-server-sdk-dynamodb/SECURITY.md @@ -0,0 +1,5 @@ +# Reporting and Fixing Security Issues + +Please report all security issues to the LaunchDarkly security team by submitting a bug bounty report to our [HackerOne program](https://hackerone.com/launchdarkly?type=team). LaunchDarkly will triage and address all valid security issues following the response targets defined in our program policy. Valid security issues may be eligible for a bounty. + +Please do not open issues or pull requests for security issues. This makes the problem immediately visible to everyone, including potentially malicious actors. diff --git a/pkgs/dotnet-server-sdk-dynamodb/docfx.json b/pkgs/dotnet-server-sdk-dynamodb/docfx.json new file mode 100644 index 00000000..d489f337 --- /dev/null +++ b/pkgs/dotnet-server-sdk-dynamodb/docfx.json @@ -0,0 +1,48 @@ +{ + "metadata": [ + { + "src": [ + { + "src": "./src", + "files": [ + "**/*.csproj", + "**/bin/**/**LaunchDarkly**.dll" + ] + } + ], + "dest": "./api", + "properties" : { + "Configuration": "Debug" + } + } + ], + "build": { + "content": [ + { + "files": [ + "**/*.{md,yml}" + ], + "exclude": [ + "docs/**" + ] + } + ], + "resource": [ + { + "files": [ + "images/**" + ] + } + ], + "output": "docs", + "template": [ + "default" + ], + "globalMetadata": { + "_appName": "DynamoDB integration for the LaunchDarkly SDK for Server-side .NET ", + "_appTitle": "DynamoDB integration for the LaunchDarkly SDK for Server-side .NET ", + "_enableSearch": true, + "pdf": false + } + } +} diff --git a/pkgs/dotnet-server-sdk-dynamodb/github_actions.env b/pkgs/dotnet-server-sdk-dynamodb/github_actions.env new file mode 100644 index 00000000..8b160dc0 --- /dev/null +++ b/pkgs/dotnet-server-sdk-dynamodb/github_actions.env @@ -0,0 +1,5 @@ +WORKSPACE_PATH=pkgs/dotnet-server-sdk-dynamodb +PROJECT_FILE=pkgs/dotnet-server-sdk-dynamodb/src/LaunchDarkly.ServerSdk.DynamoDB.csproj +BUILD_OUTPUT_PATH=pkgs/dotnet-server-sdk-dynamodb/src/bin/Release/ +BUILD_OUTPUT_DLL_NAME=LaunchDarkly.ServerSdk.DynamoDB.dll +TEST_PROJECT_FILE=pkgs/dotnet-server-sdk-dynamodb/test/LaunchDarkly.ServerSdk.DynamoDB.Tests.csproj \ No newline at end of file diff --git a/pkgs/dotnet-server-sdk-dynamodb/index.md b/pkgs/dotnet-server-sdk-dynamodb/index.md new file mode 100644 index 00000000..efb4b492 --- /dev/null +++ b/pkgs/dotnet-server-sdk-dynamodb/index.md @@ -0,0 +1,48 @@ +The [`LaunchDarkly.ServerSdk.DynamoDb`](https://nuget.org/packages/LaunchDarkly.ServerSdk.DynamoDB) package provides a Consul-backed persistence mechanism (data store) for the [LaunchDarkly .NET SDK](https://github.com/launchdarkly/dotnet-server-sdk), replacing the default in-memory data store. It uses the [AWS SDK for .NET](https://aws.amazon.com/sdk-for-net/). + +For more information, see also: [Using DynamoDB as a persistent feature store](https://docs.launchdarkly.com/sdk/features/storing-data/dynamodb#net). + +Version 2.0.0 and above of this library works with version 6.0.0 and above of the LaunchDarkly .NET SDK. For earlier versions of the SDK, use the latest 1.x release of this library. + +The entry point for using this integration is the **** class in . + +## Quick setup + +This assumes that you have already installed the LaunchDarkly .NET SDK. + +1. In DynamoDB, create a table which has the following schema: a partition key called **"namespace"** and a sort key called **"key"**, both with a string type. The LaunchDarkly library does not create the table automatically, because it has no way of knowing what additional properties (such as permissions and throughput) you would want it to have. + +2. Add the NuGet package [`LaunchDarkly.ServerSdk.DynamoDB`](https://nuget.org/packages/LaunchDarkly.ServerSdk.DynamoDB) to your project. + +3. Import the package (note that the namespace is different from the package name): + +```csharp + using LaunchDarkly.Sdk.Server.Integrations; +``` + +4. When configuring your `LdClient`, add the DynamoDB data store as a `PersistentDataStore`. You may specify any custom DynamoDB options using the methods of `DynamoDBDataStoreBuilder`. For instance, if you are passing in your AWS credentials programmatically from a variable called `myCredentials`: + +```csharp + var ldConfig = Configuration.Default("YOUR_SDK_KEY") + .DataStore( + Components.PersistentDataStore( + DynamoDB.DataStore("my-table-name").Credentials(myCredentials) + ) + ) + .Build(); + var ldClient = new LdClient(ldConfig); +``` + +## Caching behavior + +The LaunchDarkly SDK has a standard caching mechanism for any persistent data store, to reduce database traffic. This is configured through the SDK's `PersistentDataStoreBuilder` class as described in the SDK documentation. For instance, to specify a cache TTL of 5 minutes: + +```csharp + var config = Configuration.Default("YOUR_SDK_KEY") + .DataStore( + Components.PersistentDataStore( + DynamoDB.DataStore("my-table-name").Credentials(myCredentials) + ).CacheTime(TimeSpan.FromMinutes(5)) + ) + .Build(); +``` diff --git a/pkgs/dotnet-server-sdk-dynamodb/src/AssemblyInfo.cs b/pkgs/dotnet-server-sdk-dynamodb/src/AssemblyInfo.cs new file mode 100644 index 00000000..1ac91460 --- /dev/null +++ b/pkgs/dotnet-server-sdk-dynamodb/src/AssemblyInfo.cs @@ -0,0 +1,6 @@ +using System.Runtime.CompilerServices; + +#if DEBUG +// Allow unit tests to see internal classes +[assembly: InternalsVisibleTo("LaunchDarkly.ServerSdk.DynamoDB.Tests")] +#endif diff --git a/pkgs/dotnet-server-sdk-dynamodb/src/DynamoDB.cs b/pkgs/dotnet-server-sdk-dynamodb/src/DynamoDB.cs new file mode 100644 index 00000000..89319499 --- /dev/null +++ b/pkgs/dotnet-server-sdk-dynamodb/src/DynamoDB.cs @@ -0,0 +1,106 @@ +using LaunchDarkly.Sdk.Server.Subsystems; + +namespace LaunchDarkly.Sdk.Server.Integrations +{ + /// + /// Integration between the LaunchDarkly SDK and DynamoDB. + /// + public static class DynamoDB + { + /// + /// Name of the partition key that the data store's table must have. You must specify + /// this when you create the table. The key type must be String. + /// + public const string DataStorePartitionKey = "namespace"; + + /// + /// Name of the sort key that the data store's table must have. You must specify this + /// when you create the table. The key type must be String. + /// + public const string DataStoreSortKey = "key"; + + /// + /// Returns a builder object for creating a Redis-backed persistent data store. + /// + /// + /// + /// This is for the main data store that holds feature flag data. To configure a + /// Big Segment store, use instead. + /// + /// + /// You can use methods of the builder to specify any non-default DynamoDB options + /// you may want, before passing the builder to + /// . + /// In this example, the store is configured to use a table called "table1": + /// + /// + /// var config = Configuration.Builder("sdk-key") + /// .DataStore( + /// Components.PersistentDataStore( + /// DynamoDB.DataStore("table1") + /// ) + /// ) + /// .Build(); + /// + /// + /// Note that the SDK also has its own options related to data storage that are configured + /// at a different level, because they are independent of what database is being used. For + /// instance, the builder returned by + /// has options for caching: + /// + /// + /// var config = Configuration.Builder("sdk-key") + /// .DataStore( + /// Components.PersistentDataStore( + /// DynamoDB.DataStore("table1") + /// ).CacheSeconds(15) + /// ) + /// .Build(); + /// + /// + /// the DynamoDB table name; this table must already exist + /// a data store configuration object + public static DynamoDBStoreBuilder DataStore(string tableName) => + new BuilderForDataStore(tableName); + + /// + /// Returns a builder object for creating a DynamoDB-backed Big Segment store. + /// + /// + /// + /// You can use methods of the builder to specify any non-default DynamoDB options + /// you may want, before passing the builder to + /// . + /// In this example, the store is configured to use a table called "table2": + /// + /// + /// var config = Configuration.Builder("sdk-key") + /// .DataStore( + /// Components.BigSegments( + /// DynamoDB.BigSegmentStore("table2") + /// ) + /// ) + /// .Build(); + /// + /// + /// Note that the SDK also has its own options related to Big Segments that are configured + /// at a different level, because they are independent of what database is being used. For + /// instance, the builder returned by + /// has an option for the status polling interval: + /// + /// + /// var config = Configuration.Builder("sdk-key") + /// .DataStore( + /// Components.BigSegments( + /// DynamoDB.BigSegmentStore("table2") + /// ).StatusPollInterval(TimeSpan.FromSeconds(30)) + /// ) + /// .Build(); + /// + /// + /// the DynamoDB table name; this table must already exist + /// a Big Segment store configuration object + public static DynamoDBStoreBuilder BigSegmentStore(string tableName) => + new BuilderForBigSegments(tableName); + } +} diff --git a/pkgs/dotnet-server-sdk-dynamodb/src/DynamoDBBigSegmentStoreImpl.cs b/pkgs/dotnet-server-sdk-dynamodb/src/DynamoDBBigSegmentStoreImpl.cs new file mode 100644 index 00000000..c025c210 --- /dev/null +++ b/pkgs/dotnet-server-sdk-dynamodb/src/DynamoDBBigSegmentStoreImpl.cs @@ -0,0 +1,71 @@ +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using Amazon.DynamoDBv2; +using Amazon.DynamoDBv2.Model; +using LaunchDarkly.Logging; +using LaunchDarkly.Sdk.Server.Subsystems; + +using static LaunchDarkly.Sdk.Server.Subsystems.BigSegmentStoreTypes; + +namespace LaunchDarkly.Sdk.Server.Integrations +{ + internal sealed class DynamoDBBigSegmentStoreImpl : DynamoDBStoreImplBase, IBigSegmentStore + { + internal const string MembershipKey = "big_segments_user"; + internal const string IncludedAttr = "included"; + internal const string ExcludedAttr = "excluded"; + + internal const string MetadataKey = "big_segments_metadata"; + internal const string SyncTimeAttr = "synchronizedOn"; + + internal DynamoDBBigSegmentStoreImpl( + AmazonDynamoDBClient client, + bool wasExistingClient, + string tableName, + string prefix, + Logger log + ) : base(client, wasExistingClient, tableName, prefix, log) + { } + + public async Task GetMembershipAsync(string userHash) + { + var namespaceKey = PrefixedNamespace(MembershipKey); + var request = new GetItemRequest(_tableName, MakeKeysMap(namespaceKey, userHash), true); + var result = await _client.GetItemAsync(request); + if (result.Item is null || result.Item.Count == 0) + { + return null; + } + var includedRefs = GetStringListFromSetAttr(result.Item, IncludedAttr); + var excludedRefs = GetStringListFromSetAttr(result.Item, ExcludedAttr); + return NewMembershipFromSegmentRefs(includedRefs, excludedRefs); + } + + private static IEnumerable GetStringListFromSetAttr( + Dictionary attrs, + string attrName + ) => + attrs.TryGetValue(attrName, out var attr) ? attr.SS : null; + + public async Task GetMetadataAsync() + { + var key = PrefixedNamespace(MetadataKey); + var request = new GetItemRequest(_tableName, MakeKeysMap(key, key), true); + var result = await _client.GetItemAsync(request); + if (result.Item is null || result.Item.Count == 0) + { + return null; + } + if (!result.Item.TryGetValue(SyncTimeAttr, out var syncTimeValue) || string.IsNullOrEmpty(syncTimeValue.N)) + { + return new StoreMetadata { LastUpToDate = null }; + } + if (!long.TryParse(syncTimeValue.N, out var milliseconds)) + { + throw new InvalidOperationException("Invalid data in DynamoDB: non-numeric timestamp"); + } + return new StoreMetadata { LastUpToDate = UnixMillisecondTime.OfMillis(milliseconds) }; + } + } +} diff --git a/pkgs/dotnet-server-sdk-dynamodb/src/DynamoDBDataStoreImpl.cs b/pkgs/dotnet-server-sdk-dynamodb/src/DynamoDBDataStoreImpl.cs new file mode 100644 index 00000000..14c8878d --- /dev/null +++ b/pkgs/dotnet-server-sdk-dynamodb/src/DynamoDBDataStoreImpl.cs @@ -0,0 +1,298 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; +using Amazon.DynamoDBv2; +using Amazon.DynamoDBv2.Model; +using LaunchDarkly.Logging; +using LaunchDarkly.Sdk.Server.Subsystems; + +using static LaunchDarkly.Sdk.Server.Subsystems.DataStoreTypes; + +namespace LaunchDarkly.Sdk.Server.Integrations +{ + /// + /// Internal implementation of the DynamoDB feature store. + /// + /// Implementation notes: + /// + /// * The AWS SDK methods are asynchronous; currently none of the LaunchDarkly SDK code is + /// asynchronous. Therefore, this implementation is async and we're relying on an adapter + /// that is part of CachingStoreWrapper to allow us to be called from synchronous code. If + /// our SDK is changed to use async code in the future, we should not have to change anything + /// in this class. + /// + /// * Feature flags, segments, and any other kind of entity the LaunchDarkly client may wish + /// to store, are all put in the same table. The only two required attributes are "key" (which + /// is present in all storeable entities) and "namespace" (a parameter from the client that is + /// used to disambiguate between flags and segments). + /// + /// * Because of DynamoDB's restrictions on attribute values (e.g. empty strings are not + /// allowed), the standard DynamoDB marshaling mechanism with one attribute per object property + /// is not used. Instead, the entire object is serialized to JSON and stored in a single + /// attribute, "item". The "version" property is also stored as a separate attribute since it + /// is used for updates. + /// + /// * Since DynamoDB doesn't have transactions, the Init method - which replaces the entire data + /// store - is not atomic, so there can be a race condition if another process is adding new data + /// via Upsert. To minimize this, we don't delete all the data at the start; instead, we update + /// the items we've received, and then delete all other items. That could potentially result in + /// deleting new data from another process, but that would be the case anyway if the Init + /// happened to execute later than the upsert(); we are relying on the fact that normally the + /// process that did the init() will also receive the new data shortly and do its own Upsert. + /// + /// * DynamoDB has a maximum item size of 400KB. Since each feature flag or user segment is + /// stored as a single item, this mechanism will not work for extremely large flags or segments. + /// + internal sealed class DynamoDBDataStoreImpl : DynamoDBStoreImplBase, IPersistentDataStoreAsync + { + // These attribute names aren't public because application code should never access them directly + private const string VersionAttribute = "version"; + private const string SerializedItemAttribute = "item"; + private const string DeletedItemPlaceholder = "null"; // DynamoDB does not allow empty strings + + // We won't try to store items whose total size exceeds this. The DynamoDB documentation says + // only "400KB", which probably means 400*1024, but to avoid any chance of trying to store a + // too-large item we are rounding it down. + const int DynamoDBMaxItemSize = 400000; + + internal DynamoDBDataStoreImpl( + AmazonDynamoDBClient client, + bool wasExistingClient, + string tableName, + string prefix, + Logger log + ) : base(client, wasExistingClient, tableName, prefix, log) + { } + + public async Task InitializedAsync() + { + var resp = await GetItemByKeys(InitedKey, InitedKey); + return resp.Item != null && resp.Item.Count > 0; + } + + public async Task InitAsync(FullDataSet allData) + { + // Start by reading the existing keys; we will later delete any of these that weren't in allData. + var unusedOldKeys = await ReadExistingKeys(allData.Data.Select(collection => collection.Key)); + + var requests = new List(); + var numItems = 0; + + // Insert or update every provided item + foreach (var collection in allData.Data) + { + var kind = collection.Key; + foreach (var keyAndItem in collection.Value.Items) + { + var encodedItem = MarshalItem(kind, keyAndItem.Key, keyAndItem.Value); + if (!CheckSizeLimit(encodedItem)) + { + continue; + } + requests.Add(new WriteRequest(new PutRequest(encodedItem))); + + var combinedKey = new Tuple(NamespaceForKind(kind), keyAndItem.Key); + unusedOldKeys.Remove(combinedKey); + + numItems++; + } + } + + // Now delete any previously existing items whose keys were not in the current data + foreach (var combinedKey in unusedOldKeys) + { + if (combinedKey.Item1 != InitedKey) + { + var keys = MakeKeysMap(combinedKey.Item1, combinedKey.Item2); + requests.Add(new WriteRequest(new DeleteRequest(keys))); + } + } + + // Now set the special key that we check in initializedInternal() + var initedItem = MakeKeysMap(InitedKey, InitedKey); + requests.Add(new WriteRequest(new PutRequest(initedItem))); + + await DynamoDBHelpers.BatchWriteRequestsAsync(_client, _tableName, requests); + + _log.Info("Initialized data store with {0} items", numItems); + } + + public async Task GetAsync(DataKind kind, String key) + { + var resp = await GetItemByKeys(NamespaceForKind(kind), key); + return UnmarshalItem(kind, resp.Item); + } + + public async Task> GetAllAsync(DataKind kind) + { + var ret = new List>(); + var req = MakeQueryForKind(kind); + await DynamoDBHelpers.IterateQuery(_client, req, + item => + { + var itemOut = UnmarshalItem(kind, item); + if (itemOut.HasValue) + { + var itemKey = item[DynamoDB.DataStoreSortKey].S; + ret.Add(new KeyValuePair(itemKey, itemOut.Value)); + } + }); + return new KeyedItems(ret); + } + + public async Task UpsertAsync(DataKind kind, string key, SerializedItemDescriptor newItem) + { + var encodedItem = MarshalItem(kind, key, newItem); + if (!CheckSizeLimit(encodedItem)) + { + return false; + } + + try + { + var request = new PutItemRequest(_tableName, encodedItem); + request.ConditionExpression = "attribute_not_exists(#namespace) or attribute_not_exists(#key) or :version > #version"; + request.ExpressionAttributeNames = new Dictionary() + { + { "#namespace", DynamoDB.DataStorePartitionKey }, + { "#key", DynamoDB.DataStoreSortKey }, + { "#version", VersionAttribute } + }; + request.ExpressionAttributeValues = new Dictionary() + { + { ":version", new AttributeValue() { N = Convert.ToString(newItem.Version) } } + }; + await _client.PutItemAsync(request); + } + catch (ConditionalCheckFailedException) + { + return false; + } + + return true; + } + + public async Task IsStoreAvailableAsync() + { + try + { + await InitializedAsync(); // don't care about the return value, just that it doesn't throw an exception + return true; + } + catch + { // don't care about exception class, since any exception means the DynamoDB request couldn't be made + return false; + } + } + + private string NamespaceForKind(DataKind kind) => + PrefixedNamespace(kind.Name); + + private string InitedKey => PrefixedNamespace("$inited"); + + private QueryRequest MakeQueryForKind(DataKind kind) + { + Condition cond = new Condition() + { + ComparisonOperator = ComparisonOperator.EQ, + AttributeValueList = new List() + { + new AttributeValue(NamespaceForKind(kind)) + } + }; + return new QueryRequest(_tableName) + { + KeyConditions = new Dictionary() + { + { DynamoDB.DataStorePartitionKey, cond } + }, + ConsistentRead = true + }; + } + + private Task GetItemByKeys(string ns, string key) + { + var req = new GetItemRequest(_tableName, MakeKeysMap(ns, key), true); + return _client.GetItemAsync(req); + } + + private async Task>> ReadExistingKeys(IEnumerable kinds) + { + var keys = new HashSet>(); + foreach (var kind in kinds) + { + var req = MakeQueryForKind(kind); + req.ProjectionExpression = "#namespace, #key"; + req.ExpressionAttributeNames = new Dictionary() + { + { "#namespace", DynamoDB.DataStorePartitionKey }, + { "#key", DynamoDB.DataStoreSortKey } + }; + await DynamoDBHelpers.IterateQuery(_client, req, + item => keys.Add(new Tuple( + item[DynamoDB.DataStorePartitionKey].S, + item[DynamoDB.DataStoreSortKey].S)) + ); + } + return keys; + } + + private Dictionary MarshalItem(DataKind kind, string key, SerializedItemDescriptor item) + { + var ret = MakeKeysMap(NamespaceForKind(kind), key); + ret[VersionAttribute] = new AttributeValue() { N = item.Version.ToString() }; + ret[SerializedItemAttribute] = new AttributeValue(item.Deleted ? DeletedItemPlaceholder : item.SerializedItem); + return ret; + } + + private SerializedItemDescriptor? UnmarshalItem(DataKind kind, IDictionary item) + { + if (item is null || item.Count == 0) + { + return null; + } + if (!item.TryGetValue(SerializedItemAttribute, out var serializedItemAttr) || serializedItemAttr.S is null) + { + throw new InvalidOperationException("Invalid data in DynamoDB: missing item attribute"); + } + if (!item.TryGetValue(VersionAttribute, out var versionAttr) || versionAttr.N is null) + { + throw new InvalidOperationException("Invalid data in DynamoDB: missing version attribute"); + } + if (!int.TryParse(versionAttr.N, out var version)) + { + throw new InvalidOperationException("Invalid data in DynamoDB: non-numeric version"); + } + if (serializedItemAttr.S == DeletedItemPlaceholder) + { + return new SerializedItemDescriptor(version, true, null); + } + return new SerializedItemDescriptor(version, false, serializedItemAttr.S); + } + + private bool CheckSizeLimit(Dictionary item) { + // see: https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/CapacityUnitCalculations.html + var size = 100; // fixed overhead for index data + foreach (var kv in item) + { + size += Encoding.UTF8.GetByteCount(kv.Key); + if (kv.Value.S != null) + { + size += Encoding.UTF8.GetByteCount(kv.Value.S); + } else if (kv.Value.N != null) + { + size += Encoding.UTF8.GetByteCount(kv.Value.N);// DynamoDB stores numbers as numeric strings + } + } + if (size <= DynamoDBMaxItemSize) + { + return true; + } + _log.Error(@"The item ""{0}"" in ""{1}"" was too large to store in DynamoDB and was dropped", + item[DynamoDB.DataStorePartitionKey].S, item[DynamoDB.DataStoreSortKey].S); + return false; + } + } +} diff --git a/pkgs/dotnet-server-sdk-dynamodb/src/DynamoDBHelpers.cs b/pkgs/dotnet-server-sdk-dynamodb/src/DynamoDBHelpers.cs new file mode 100644 index 00000000..de1afa06 --- /dev/null +++ b/pkgs/dotnet-server-sdk-dynamodb/src/DynamoDBHelpers.cs @@ -0,0 +1,87 @@ +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using Amazon.DynamoDBv2; +using Amazon.DynamoDBv2.Model; + +namespace LaunchDarkly.Sdk.Server.Integrations +{ + internal static class DynamoDBHelpers + { + /// + /// Sends a list of write requests in batches as large as the AWS SDK will allow. + /// + /// the client + /// the table name + /// list of requests + /// async Task with no return value + public static async Task BatchWriteRequestsAsync(AmazonDynamoDBClient client, string tableName, + List requests) + { + int batchSize = 25; + for (int i = 0; i < requests.Count; i += batchSize) + { + var batch = requests.GetRange(i, Math.Min(batchSize, requests.Count - i)); + var request = new BatchWriteItemRequest(new Dictionary>() + { + { tableName, batch } + }); + await client.BatchWriteItemAsync(request); + } + } + + /// + /// Executes a Query and continues to query any additional pages of results that are + /// available, calling a (synchronous) Action for each individual result. The original + /// QueryRequest will be modified. + /// + /// the client + /// the initial request + /// will be called for each result item + /// async Task with no return value + public static async Task IterateQuery(AmazonDynamoDBClient client, QueryRequest request, + Action> action) + { + while (true) + { + var resp = await client.QueryAsync(request); + foreach (var item in resp.Items) + { + action(item); + } + if (resp.LastEvaluatedKey == null || resp.LastEvaluatedKey.Count == 0) + { + break; + } + request.ExclusiveStartKey = resp.LastEvaluatedKey; + } + } + + /// + /// Executes a Scan and continues to query any additional pages of results that are + /// available, calling a (synchronous) Action for each individual result. The original + /// ScanRequest will be modified. + /// + /// the client + /// the initial request + /// will be called for each result item + /// async Task with no return value + public static async Task IterateScan(AmazonDynamoDBClient client, ScanRequest request, + Action> action) + { + while (true) + { + var resp = await client.ScanAsync(request); + foreach (var item in resp.Items) + { + action(item); + } + if (resp.LastEvaluatedKey == null || resp.LastEvaluatedKey.Count == 0) + { + break; + } + request.ExclusiveStartKey = resp.LastEvaluatedKey; + } + } + } +} diff --git a/pkgs/dotnet-server-sdk-dynamodb/src/DynamoDBStoreBuilder.cs b/pkgs/dotnet-server-sdk-dynamodb/src/DynamoDBStoreBuilder.cs new file mode 100644 index 00000000..53d4ed30 --- /dev/null +++ b/pkgs/dotnet-server-sdk-dynamodb/src/DynamoDBStoreBuilder.cs @@ -0,0 +1,208 @@ +using Amazon.DynamoDBv2; +using Amazon.Runtime; +using LaunchDarkly.Sdk.Server.Subsystems; + +namespace LaunchDarkly.Sdk.Server.Integrations +{ + /// + /// A builder for configuring the DynamoDB-based persistent data store. + /// + /// + /// + /// This can be used either for the main data store that holds feature flag data, or for the big + /// segment store, or both. If you are using both, they do not have to have the same parameters. For + /// instance, in this example the main data store uses a table called "table1" and the big segment + /// store uses a table called "table2": + /// + /// + /// var config = Configuration.Builder("sdk-key") + /// .DataStore( + /// Components.PersistentDataStore( + /// DynamoDB.DataStore("table1") + /// ) + /// ) + /// .BigSegments( + /// Components.BigSegments( + /// DynamoDB.DataStore("table2") + /// ) + /// ) + /// .Build(); + /// + /// + /// Note that the builder is passed to one of two methods, + /// or + /// , depending on the context in + /// which it is being used. This is because each of those contexts has its own additional + /// configuration options that are unrelated to the DynamoDB options. For instance, the + /// builder + /// has options for caching: + /// + /// + /// var config = Configuration.Builder("sdk-key") + /// .DataStore( + /// Components.PersistentDataStore( + /// DynamoDB.DataStore("table1") + /// ).CacheSeconds(15) + /// ) + /// .Build(); + /// + /// + /// Builder calls can be chained, for example: + /// + /// + /// var config = Configuration.Builder("sdk-key") + /// .DataStore( + /// Components.PersistentDataStore( + /// DynamoDB.DataStore("my-table-name") + /// .Credentials(myAWSCredentials) + /// .Prefix("app1") + /// ) + /// .CacheSeconds(15) + /// ) + /// .Build(); + /// + /// + public abstract class DynamoDBStoreBuilder : IComponentConfigurer, IDiagnosticDescription + { + internal AmazonDynamoDBClient _existingClient = null; + internal AWSCredentials _credentials = null; + internal AmazonDynamoDBConfig _config = null; + + internal readonly string _tableName; + internal string _prefix = ""; + + internal DynamoDBStoreBuilder(string tableName) + { + _tableName = tableName; + } + + /// + /// Specifies an existing, already-configured DynamoDB client instance that the data store + /// should use rather than creating one of its own. + /// + /// + /// + /// If you specify an existing client, then the other builder methods for configuring DynamoDB + /// are ignored. + /// + /// + /// Note that the LaunchDarkly code will not take ownership of the lifecycle of this + /// object: in other words, it will not call Dispose() on the AmazonDynamoDBClient when + /// you dispose of the SDK client, as it would if it had created the AmazonDynamoDBClient itself. + /// It is your responsibility to call Dispose() on the AmazonDynamoDBClient when you are + /// done with it. + /// + /// + /// an existing DynamoDB client instance + /// the builder + public DynamoDBStoreBuilder ExistingClient(AmazonDynamoDBClient client) + { + _existingClient = client; + return this; + } + + /// + /// Sets the AWS client credentials. + /// + /// + /// If you do not set them programmatically, the AWS SDK will attempt to find them in + /// environment variables and/or local configuration files. + /// + /// the AWS credentials + /// the builder + public DynamoDBStoreBuilder Credentials(AWSCredentials credentials) + { + _credentials = credentials; + return this; + } + + /// + /// Specifies an entire DynamoDB configuration. + /// + /// + /// If this is not provided explicitly, the AWS SDK will attempt to determine your + /// current region based on environment variables and/or local configuration files. + /// + /// a DynamoDB configuration object + /// the builder + public DynamoDBStoreBuilder Configuration(AmazonDynamoDBConfig config) + { + _config = config; + return this; + } + + /// + /// Sets an optional namespace prefix for all keys stored in DynamoDB. + /// + /// + /// You may use this if you are sharing the same database table between multiple clients that + /// are for different LaunchDarkly environments, to avoid key collisions. However, in DynamoDB + /// it is common to use separate tables rather than share a single table for unrelated + /// applications, so by default there is no prefix. + /// + /// the namespace prefix; null for no prefix + /// the builder + public DynamoDBStoreBuilder Prefix(string prefix) + { + _prefix = prefix; + return this; + } + + /// + public abstract T Build(LdClientContext context); + + /// + public LdValue DescribeConfiguration(LdClientContext context) => + LdValue.Of("DynamoDB"); + + internal AmazonDynamoDBClient MakeClient() + { + if (_existingClient != null) + { + return _existingClient; + } + // Unfortunately, the AWS SDK does not believe in builders + if (_credentials == null) + { + if (_config == null) + { + return new AmazonDynamoDBClient(); + } + return new AmazonDynamoDBClient(_config); + } + if (_config == null) + { + return new AmazonDynamoDBClient(_credentials); + } + return new AmazonDynamoDBClient(_credentials, _config); + } + } + + internal sealed class BuilderForDataStore : DynamoDBStoreBuilder + { + internal BuilderForDataStore(string tableName) : base(tableName) { } + + public override IPersistentDataStoreAsync Build(LdClientContext context) => + new DynamoDBDataStoreImpl( + MakeClient(), + _existingClient != null, + _tableName, + _prefix, + context.Logger.SubLogger("DataStore.DynamoDB") + ); + } + + internal sealed class BuilderForBigSegments : DynamoDBStoreBuilder + { + internal BuilderForBigSegments(string tableName) : base(tableName) { } + + public override IBigSegmentStore Build(LdClientContext context) => + new DynamoDBBigSegmentStoreImpl( + MakeClient(), + _existingClient != null, + _tableName, + _prefix, + context.Logger.SubLogger("DataStore.DynamoDB") + ); + } +} diff --git a/pkgs/dotnet-server-sdk-dynamodb/src/DynamoDBStoreImplBase.cs b/pkgs/dotnet-server-sdk-dynamodb/src/DynamoDBStoreImplBase.cs new file mode 100644 index 00000000..a6f26b7a --- /dev/null +++ b/pkgs/dotnet-server-sdk-dynamodb/src/DynamoDBStoreImplBase.cs @@ -0,0 +1,70 @@ +using System; +using System.Collections.Generic; +using Amazon.DynamoDBv2; +using Amazon.DynamoDBv2.Model; +using LaunchDarkly.Logging; + +namespace LaunchDarkly.Sdk.Server.Integrations +{ + internal abstract class DynamoDBStoreImplBase : IDisposable + { + protected readonly AmazonDynamoDBClient _client; + protected readonly string _tableName; + protected readonly string _prefix; + protected readonly Logger _log; + private readonly bool _wasExistingClient; + + protected DynamoDBStoreImplBase( + AmazonDynamoDBClient client, + bool wasExistingClient, + string tableName, + string prefix, + Logger log + ) + { + _client = client; + _wasExistingClient = wasExistingClient; + _tableName = tableName; + _log = log; + + if (string.IsNullOrEmpty(prefix)) + { + _prefix = null; + _log.Info("Using DynamoDB data store with table name \"{0}\" and no prefix", tableName); + } + else + { + _log.Info("Using DynamoDB data store with table name \"{0}\" and prefix \"{1}\"", + tableName, prefix); + _prefix = prefix; + } + } + + public void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + + private void Dispose(bool disposing) + { + if (disposing) + { + if (!_wasExistingClient) + { + _client.Dispose(); + } + } + } + + protected string PrefixedNamespace(string baseStr) => + _prefix is null ? baseStr : (_prefix + ":" + baseStr); + + protected static Dictionary MakeKeysMap(string ns, string key) => + new Dictionary() + { + { DynamoDB.DataStorePartitionKey, new AttributeValue(ns) }, + { DynamoDB.DataStoreSortKey, new AttributeValue(key) } + }; + } +} diff --git a/pkgs/dotnet-server-sdk-dynamodb/src/LaunchDarkly.ServerSdk.DynamoDB.csproj b/pkgs/dotnet-server-sdk-dynamodb/src/LaunchDarkly.ServerSdk.DynamoDB.csproj new file mode 100644 index 00000000..b8bad02d --- /dev/null +++ b/pkgs/dotnet-server-sdk-dynamodb/src/LaunchDarkly.ServerSdk.DynamoDB.csproj @@ -0,0 +1,36 @@ + + + 4.0.0 + netstandard2.0;net462 + LaunchDarkly.ServerSdk.DynamoDB + LaunchDarkly.ServerSdk.DynamoDB + Library + 7.3 + LaunchDarkly Server-Side .NET SDK DynamoDB Integration + LaunchDarkly + LaunchDarkly + LaunchDarkly + Copyright 2018 Catamorphic, Co. + Apache-2.0 + https://github.com/launchdarkly/dotnet-server-sdk-dynamodb + https://github.com/launchdarkly/dotnet-server-sdk-dynamodb + main + true + snupkg + LaunchDarkly.Sdk.Server.Integrations + + + + + + + + + bin\$(Configuration)\$(TargetFramework)\LaunchDarkly.ServerSdk.DynamoDB.xml + + + + true + ../../../LaunchDarkly.DynamoDB.snk + + diff --git a/pkgs/dotnet-server-sdk-dynamodb/test/DynamoDBBigSegmentStoreTest.cs b/pkgs/dotnet-server-sdk-dynamodb/test/DynamoDBBigSegmentStoreTest.cs new file mode 100644 index 00000000..d322c651 --- /dev/null +++ b/pkgs/dotnet-server-sdk-dynamodb/test/DynamoDBBigSegmentStoreTest.cs @@ -0,0 +1,92 @@ +using System.Collections.Generic; +using System.Threading.Tasks; +using Amazon.DynamoDBv2; +using Amazon.DynamoDBv2.Model; +using LaunchDarkly.Sdk.Server.Subsystems; +using LaunchDarkly.Sdk.Server.SharedTests.BigSegmentStore; +using Xunit; +using Xunit.Abstractions; +using static LaunchDarkly.Sdk.Server.Integrations.DynamoDBTestEnvironment; +using static LaunchDarkly.Sdk.Server.Subsystems.BigSegmentStoreTypes; + +namespace LaunchDarkly.Sdk.Server.Integrations +{ + public class DynamoDBBigSegmentStoreTest : BigSegmentStoreBaseTests, IAsyncLifetime + { + override protected BigSegmentStoreTestConfig Configuration => new BigSegmentStoreTestConfig + { + StoreFactoryFunc = MakeStoreFactory, + ClearDataAction = ClearAllData, + SetMetadataAction = SetMetadata, + SetSegmentsAction = SetSegments + }; + + public DynamoDBBigSegmentStoreTest(ITestOutputHelper testOutput) : base(testOutput) + { + } + + public Task InitializeAsync() => CreateTableIfNecessary(); + + public Task DisposeAsync() => Task.CompletedTask; + + private IComponentConfigurer MakeStoreFactory(string prefix) => + DynamoDB.BigSegmentStore(TableName) + .ExistingClient(DynamoDBTestEnvironment.client) + .Prefix(prefix); + + private async Task SetMetadata(string prefix, StoreMetadata metadata) + { + var client = DynamoDBTestEnvironment.client; + var key = prefix + ":" + DynamoDBBigSegmentStoreImpl.MetadataKey; + var timeValue = metadata.LastUpToDate.HasValue ? metadata.LastUpToDate.Value.Value.ToString() : null; + await client.PutItemAsync(new PutItemRequest(TableName, + new Dictionary + { + { DynamoDB.DataStorePartitionKey, new AttributeValue { S = key } }, + { DynamoDB.DataStoreSortKey, new AttributeValue { S = key } }, + { DynamoDBBigSegmentStoreImpl.SyncTimeAttr, new AttributeValue { N = timeValue } } + })); + } + + private async Task SetSegments(string prefix, string userHash, + IEnumerable includedRefs, IEnumerable excludedRefs) + { + var client = DynamoDBTestEnvironment.client; + if (includedRefs != null) + { + foreach (var value in includedRefs) + { + await AddToSetAsync(client, prefix, userHash, DynamoDBBigSegmentStoreImpl.IncludedAttr, value); + } + } + + if (excludedRefs != null) + { + foreach (var value in excludedRefs) + { + await AddToSetAsync(client, prefix, userHash, DynamoDBBigSegmentStoreImpl.ExcludedAttr, value); + } + } + } + + private async Task AddToSetAsync(AmazonDynamoDBClient client, string prefix, + string userHash, string attrName, string value) + { + var namespaceKey = prefix + ":" + DynamoDBBigSegmentStoreImpl.MembershipKey; + await client.UpdateItemAsync(new UpdateItemRequest + { + TableName = TableName, + Key = new Dictionary + { + { DynamoDB.DataStorePartitionKey, new AttributeValue { S = namespaceKey } }, + { DynamoDB.DataStoreSortKey, new AttributeValue { S = userHash } }, + }, + UpdateExpression = string.Format("ADD {0} :value", attrName), + ExpressionAttributeValues = new Dictionary + { + { ":value", new AttributeValue { SS = new List { value } } } + } + }); + } + } +} diff --git a/pkgs/dotnet-server-sdk-dynamodb/test/DynamoDBDataStoreTest.cs b/pkgs/dotnet-server-sdk-dynamodb/test/DynamoDBDataStoreTest.cs new file mode 100644 index 00000000..4e576be8 --- /dev/null +++ b/pkgs/dotnet-server-sdk-dynamodb/test/DynamoDBDataStoreTest.cs @@ -0,0 +1,227 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using LaunchDarkly.Logging; +using LaunchDarkly.Sdk.Server.Subsystems; +using LaunchDarkly.Sdk.Server.SharedTests.DataStore; +using Xunit; +using Xunit.Abstractions; + +using static LaunchDarkly.Sdk.Server.Integrations.DynamoDBTestEnvironment; +using static LaunchDarkly.Sdk.Server.Subsystems.DataStoreTypes; + +namespace LaunchDarkly.Sdk.Server.Integrations +{ + public class DynamoDBDataStoreTest : PersistentDataStoreBaseTests, IAsyncLifetime + { + const string BadItemKey = "baditem"; + + public DynamoDBDataStoreTest(ITestOutputHelper testOutput) : base(testOutput) { } + + protected override PersistentDataStoreTestConfig Configuration => + new PersistentDataStoreTestConfig + { + StoreAsyncFactoryFunc = MakeStoreFactory, + ClearDataAction = ClearAllData + }; + + + public Task InitializeAsync() => CreateTableIfNecessary(); + + public Task DisposeAsync() => Task.CompletedTask; + + private IComponentConfigurer MakeStoreFactory(string prefix) => + BaseBuilder().Prefix(prefix); + + private DynamoDBStoreBuilder BaseBuilder() => + DynamoDB.DataStore(TableName) + .Credentials(MakeTestCredentials()) + .Configuration(MakeTestConfiguration()); + + [Fact] + public void LogMessageAtStartup() + { + var logCapture = Logs.Capture(); + var logger = logCapture.Logger("BaseLoggerName"); // in real life, the SDK will provide its own base log name + var context = new LdClientContext("", null, null, null, logger, false, null); + using (((IComponentConfigurer)BaseBuilder().Prefix("my-prefix")).Build(context)) + { + Assert.Collection(logCapture.GetMessages(), + m => + { + Assert.Equal(LaunchDarkly.Logging.LogLevel.Info, m.Level); + Assert.Equal("BaseLoggerName.DataStore.DynamoDB", m.LoggerName); + Assert.Equal("Using DynamoDB data store with table name \"" + TableName + + "\" and prefix \"my-prefix\"", m.Text); + }); + } + } + + [Theory] + [InlineData("flag")] + [InlineData("segment")] + public async void DataStoreSkipsAndLogsTooLargeItemOnInit(string flagOrSegment) + { + var dataPlusBadItem = MakeGoodData().Data.ToList(); + GetTooLargeItemParams(flagOrSegment, out var dataKind, out var collIndex, out SerializedItemDescriptor item); + var items = dataPlusBadItem[collIndex].Value.Items.ToList(); + items.Insert(0, new KeyValuePair(BadItemKey, item)); + // put the bad item first to prove that items after that one are still stored + dataPlusBadItem[collIndex] = new KeyValuePair>( + dataKind, new KeyedItems(items)); + + var logCapture = Logs.Capture(); + var context = new LdClientContext("", null, null, null, logCapture.Logger(""), false, null); + + using (var store = ((IComponentConfigurer)BaseBuilder()).Build(context)) + { + await store.InitAsync(new FullDataSet(dataPlusBadItem)); + + Assert.True(logCapture.HasMessageWithRegex(LogLevel.Error, + @"""" + BadItemKey + @""".*was too large to store in DynamoDB and was dropped")); + + AssertDataSetsEqual(MakeGoodData(), await GetAllData(store)); + } + } + + [Theory] + [InlineData("flag")] + [InlineData("segment")] + public async void DataStoreSkipsAndLogsTooLargeItemOnUpsert(string flagOrSegment) + { + var goodData = MakeGoodData(); + GetTooLargeItemParams(flagOrSegment, out var dataKind, out var collIndex, out SerializedItemDescriptor item); + + var logCapture = Logs.Capture(); + var context = new LdClientContext("", null, null, null, logCapture.Logger(""), false, null); + + using (var store = ((IComponentConfigurer)BaseBuilder()).Build(context)) + { + await store.InitAsync(goodData); + + AssertDataSetsEqual(MakeGoodData(), await GetAllData(store)); + + await store.UpsertAsync(dataKind, BadItemKey, item); + + Assert.True(logCapture.HasMessageWithRegex(LogLevel.Error, + @"""" + BadItemKey + @""".*was too large to store in DynamoDB and was dropped")); + + AssertDataSetsEqual(MakeGoodData(), await GetAllData(store)); + } + } + + private static FullDataSet MakeGoodData() + { + // Not using the DataBuilder helper because that currently does not preserve insertion order. + var list = new List>> + { + new KeyValuePair>( + DataModel.Features, + new KeyedItems( + new List> + { + new KeyValuePair( + "flag1", + new SerializedItemDescriptor(1, false, @"{""key"": ""flag1"", ""version"": 1}") + ), + new KeyValuePair( + "flag2", + new SerializedItemDescriptor(1, false, @"{""key"": ""flag2"", ""version"": 1}") + ) + })), + new KeyValuePair>( + DataModel.Segments, + new KeyedItems( + new List> + { + new KeyValuePair( + "segment1", + new SerializedItemDescriptor(1, false, @"{""key"": ""segment1"", ""version"": 1}") + ), + new KeyValuePair( + "segment2", + new SerializedItemDescriptor(1, false, @"{""key"": ""segment2"", ""version"": 1}") + ) + })) + }; + return new FullDataSet(list); + } + + private static async Task> GetAllData(IPersistentDataStoreAsync store) + { + var colls = new List>>(); + foreach (var kind in new DataKind[] { DataModel.Features, DataModel.Segments }) + { + colls.Add(new KeyValuePair>( + kind, await store.GetAllAsync(kind))); + } + return new FullDataSet(colls); + } + + private static void GetTooLargeItemParams( + string flagOrSegment, + out DataKind dataKind, + out int collIndex, + out SerializedItemDescriptor item + ) + { + string tooBigKeysListJson = "["; + for (var i = 0; i < 40000; i++) + { + if (i != 0) + { + tooBigKeysListJson += ","; + } + tooBigKeysListJson += @"""key" + i + @""""; + } + tooBigKeysListJson += "]"; + Assert.NotInRange(tooBigKeysListJson.Length, 0, 400 * 1024); + + string badItemJson; + switch (flagOrSegment) + { + case "flag": + dataKind = DataModel.Features; + collIndex = 0; + badItemJson = @"{""key"":""" + BadItemKey + @""", ""version"": 1, ""targets"":[{""variation"":0,""values"":" + + tooBigKeysListJson + "}]}"; + break; + + case "segment": + dataKind = DataModel.Segments; + collIndex = 1; + badItemJson = @"{""key"":""" + BadItemKey + @""", ""version"": 1, ""included"":" + tooBigKeysListJson + "]}"; + break; + + default: + throw new ArgumentException("invalid type parameter"); + } + item = new SerializedItemDescriptor(1, false, badItemJson); + } + + private static void AssertDataSetsEqual(FullDataSet expected, + FullDataSet actual) + { + var collMatchers = new List>>>(); + foreach (var expectedColl in expected.Data) + { + collMatchers.Add(actualColl => + { + Assert.Equal(expectedColl.Key, actualColl.Key); + var itemsMatchers = new List>>(); + foreach (var expectedKV in expectedColl.Value.Items) + { + itemsMatchers.Add(actualKV => + { + Assert.Equal(expectedKV.Key, actualKV.Key); + Assert.Equal(expectedKV.Value.Version, actualKV.Value.Version); + Assert.Equal(expectedKV.Value.SerializedItem, actualKV.Value.SerializedItem); + }); + } + }); + } + Assert.Collection(actual.Data, collMatchers.ToArray()); + } + } +} diff --git a/pkgs/dotnet-server-sdk-dynamodb/test/DynamoDBTestEnvironment.cs b/pkgs/dotnet-server-sdk-dynamodb/test/DynamoDBTestEnvironment.cs new file mode 100644 index 00000000..c3b0814d --- /dev/null +++ b/pkgs/dotnet-server-sdk-dynamodb/test/DynamoDBTestEnvironment.cs @@ -0,0 +1,101 @@ +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Amazon.DynamoDBv2; +using Amazon.DynamoDBv2.Model; +using Amazon.Runtime; + +namespace LaunchDarkly.Sdk.Server.Integrations +{ + public static class DynamoDBTestEnvironment + { + public const string TableName = "test-dynamodb-table"; + + private static bool TableCreated; + private static SemaphoreSlim _tableLock = new SemaphoreSlim(1, 1); + + public static AWSCredentials MakeTestCredentials() => + new BasicAWSCredentials("key", "secret"); // not used, but required + + public static AmazonDynamoDBConfig MakeTestConfiguration() => + new AmazonDynamoDBConfig() + { + ServiceURL = "http://localhost:8000" // assumes we're running a local DynamoDB + }; + + public static AmazonDynamoDBClient client = CreateTestClient(); + + public static async Task CreateTableIfNecessary() + { + await _tableLock.WaitAsync(); + try + { + if (TableCreated) + { + return; + } + + try + { + await client.DescribeTableAsync(new DescribeTableRequest(TableName)); + return; // table exists + } + catch (ResourceNotFoundException) + { + // fall through to code below - we'll create the table + } + + var request = new CreateTableRequest() + { + TableName = TableName, + KeySchema = new List() + { + new KeySchemaElement(DynamoDB.DataStorePartitionKey, KeyType.HASH), + new KeySchemaElement(DynamoDB.DataStoreSortKey, KeyType.RANGE) + }, + AttributeDefinitions = new List() + { + new AttributeDefinition(DynamoDB.DataStorePartitionKey, ScalarAttributeType.S), + new AttributeDefinition(DynamoDB.DataStoreSortKey, ScalarAttributeType.S) + }, + ProvisionedThroughput = new ProvisionedThroughput(1, 1) + }; + await client.CreateTableAsync(request); + } + finally + { + TableCreated = true; + _tableLock.Release(); + } + } + + public static async Task ClearAllData(string prefix) + { + var keyPrefix = prefix is null ? "" : (prefix + ":"); + + var deleteReqs = new List(); + ScanRequest request = new ScanRequest(TableName) + { + ConsistentRead = true, + ProjectionExpression = "#namespace, #key", + ExpressionAttributeNames = new Dictionary() + { + { "#namespace", DynamoDB.DataStorePartitionKey }, + { "#key", DynamoDB.DataStoreSortKey } + } + }; + await DynamoDBHelpers.IterateScan(client, request, + item => + { + if (item[DynamoDB.DataStorePartitionKey].S.StartsWith(keyPrefix)) + { + deleteReqs.Add(new WriteRequest(new DeleteRequest(item))); + } + }); + await DynamoDBHelpers.BatchWriteRequestsAsync(client, TableName, deleteReqs); + } + + public static AmazonDynamoDBClient CreateTestClient() => + new AmazonDynamoDBClient(MakeTestCredentials(), MakeTestConfiguration()); + } +} diff --git a/pkgs/dotnet-server-sdk-dynamodb/test/LaunchDarkly.ServerSdk.DynamoDB.Tests.csproj b/pkgs/dotnet-server-sdk-dynamodb/test/LaunchDarkly.ServerSdk.DynamoDB.Tests.csproj new file mode 100644 index 00000000..c9a76306 --- /dev/null +++ b/pkgs/dotnet-server-sdk-dynamodb/test/LaunchDarkly.ServerSdk.DynamoDB.Tests.csproj @@ -0,0 +1,20 @@ + + + net8.0 + $(TargetFrameworks);net462 + false + true + LaunchDarkly.Sdk.Server.Integrations + + + + + + + + + + + + + diff --git a/pkgs/dotnet-server-sdk-redis/CHANGELOG.md b/pkgs/dotnet-server-sdk-redis/CHANGELOG.md new file mode 100644 index 00000000..43343c06 --- /dev/null +++ b/pkgs/dotnet-server-sdk-redis/CHANGELOG.md @@ -0,0 +1,89 @@ +# Change log + +All notable changes to the LaunchDarkly .NET SDK Redis integration will be documented in this file. This project adheres to [Semantic Versioning](http://semver.org). + +## [5.0.0] - 2023-10-16 +### Changed: +- This release requires the `8.0.0` release of the `LaunchDarkly.ServerSdk`. + +## [4.0.0] - 2022-12-07 +This release corresponds to the 7.0.0 release of the LaunchDarkly server-side .NET SDK. Any application code that is being updated to use the 7.0.0 SDK, and was using a 3.x version of `LaunchDarkly.ServerSdk.Redis`, should now use a 4.x version instead. + +There are no functional differences in the behavior of the Redis integration; the differences are only related to changes in the usage of interface types for configuration in the SDK. + +### Added: +- `Redis.BigSegmentStore()`, which creates a configuration builder for use with Big Segments. Previously, the `Redis.DataStore()` builder was used for both regular data stores and Big Segment stores. + +### Changed: +- The type `RedisDataStoreBuilder` has been removed, replaced by a generic type `RedisStoreBuilder`. Application code would not normally need to reference these types by name, but if necessary, use either `RedisStoreBuilder` or `RedisStoreBuilder` depending on whether you are configuring a regular data store or a Big Segment store. + +## [3.1.0] - 2021-07-22 +### Added: +- Added support for Big Segments. An Early Access Program for creating and syncing Big Segments from customer data platforms is available to enterprise customers. + +## [3.0.0] - 2021-06-09 +This release is for use with versions 6.0.0 and higher of [`LaunchDarkly.ServerSdk`](https://github.com/launchdarkly/dotnet-server-sdk). + +For more information about changes in the SDK database integrations, see the [5.x to 6.0 migration guide](https://docs-stg.launchdarkly.com/252/sdk/server-side/dotnet/migration-5-to-6). + +Like the previous major version of this library, it uses version 2.x of `StackExchange.Redis`. + +### Changed: +- The namespace is now `LaunchDarkly.Sdk.Server.Integrations`. +- The entry point is now `LaunchDarkly.Sdk.Server.Integrations.Redis` rather than `LaunchDarkly.Client.Integrations.Redis` (or, in earlier versions, `LaunchDarkly.Client.Redis.RedisComponents`). +- The logger name is now `LaunchDarkly.Sdk.DataStore.Redis` rather than `LaunchDarkly.Client.Redis.RedisFeatureStoreCore`. + +### Removed: +- Removed the deprecated `RedisComponents` entry point and `RedisFeatureStoreBuilder`. +- The package no longer has a dependency on `Common.Logging` but instead integrates with the SDK's logging mechanism. + +## [2.0.1] - 2021-06-01 +### Fixed: +- The library was not fully compliant with the standard usage of Redis keys by other LaunchDarkly SDKs and by the Relay Proxy, as follows: although feature flag data was stored with the correct keys, the wrong key was used for the special value that indicates that the database has been initialized. As a result, if the Relay Proxy had stored data in Redis, the .NET SDK would not detect it, and if the .NET SDK had stored data in Redis, other SDKs might not detect it. + +## [2.0.0] - 2021-02-01 +This release updates the third-party dependency on `StackExchange.Redis` to use the 2.x version of that library. For details about how `StackExchange.Redis` 2.x differs from the 1.x versions, see its [release notes](https://stackexchange.github.io/StackExchange.Redis/ReleaseNotes.html). + +This version of `LaunchDarkly.ServerSdk.Redis` requires version 5.14 or higher of the LaunchDarkly .NET SDK (`LaunchDarkly.ServerSdk`). It supports both the older configuration API used in previous versions, and the newer configuration API that was introduced in version 5.14 of the SDK and in the [1.2.0](https://github.com/launchdarkly/dotnet-server-sdk-redis/releases/tag/1.2.0) release of this package. Using the newer API (see `LaunchDarkly.Client.Integrations.Redis` in this package) is preferable because that is how configuration will work in the 6.0 release of the SDK. + +### Added: +- The `OperationTimeout` configuration property, which corresponds to `SyncTimeout` in the `StackExchange.Redis` API. + +### Changed: +- The minimum version of `StackExchange.Redis` is now 2.0.513. +- The minimum version of `LaunchDarkly.ServerSdk` is now 5.14.0. +- There is no longer a separate `LaunchDarkly.ServerSdk.Redis.StrongName` package that is the strong-named version; instead, there is just `LaunchDarkly.ServerSdk.Redis` which is always strong-named. That distinction was previously necessary because the `StackExchange.Redis` package had both strong-named and non-strong-named versions, which is no longer the case. +- The lowest compatible version of .NET Framework is now 4.6.1 (because that is the lowest version supported by `StackExchange.Redis` 2.x). The package still has a .NET Standard 2.0 target as well. + +### Removed: +- The `ResponseTimeout` configuration property, which is no longer supported by `StackExchange.Redis`. + +## [1.2.1] - 2021-06-01 +### Fixed: +- The library was not fully compliant with the standard usage of Redis keys by other LaunchDarkly SDKs and by the Relay Proxy, as follows: although feature flag data was stored with the correct keys, the wrong key was used for the special value that indicates that the database has been initialized. As a result, if the Relay Proxy had stored data in Redis, the .NET SDK would not detect it, and if the .NET SDK had stored data in Redis, other SDKs might not detect it. + +## [1.2.0] - 2021-01-26 +### Added: +- New classes `LaunchDarkly.Client.Integrations.Redis` and `LaunchDarkly.Client.Integrations.RedisDataStoreBuilder`, which serve the same purpose as the previous classes but are designed to work with the newer persistent data store API introduced in .NET SDK 5.14.0. + +### Deprecated: +- The old API in the `LaunchDarkly.Client.Redis` namespace. + +## [1.1.1] - 2019-05-13 +### Changed: +- Corresponding to the SDK package name change from `LaunchDarkly.Client` to `LaunchDarkly.ServerSdk`, this package is now called `LaunchDarkly.ServerSdk.Redis` (or `LaunchDarkly.ServerSdk.Redis.StrongName`). The functionality of the package, including the namespaces and class names, has not changed. + +## [1.1.0] - 2019-01-14 +### Added +- `RedisFeatureStoreBuilder.WithCaching` is the new way to configure local caching behavior, using the new SDK class `FeatureStoreCacheConfig`. This allows you to specify a limit on the number of cached items, which was not previously possible. Future releases of the SDK may add more caching parameters, which will then be automatically supported by this library. +- The assemblies in this package now have Authenticode signatures. + +### Changed +- The minimum LaunchDarkly.Client version for use with this library is now 5.6.1. + +### Deprecated +- `RedisFeatureStoreBuilder.WithCacheExpiration` + +## [1.0.0] - 2018-09-28 + +Initial release. diff --git a/pkgs/dotnet-server-sdk-redis/CONTRIBUTING.md b/pkgs/dotnet-server-sdk-redis/CONTRIBUTING.md new file mode 100644 index 00000000..654aef59 --- /dev/null +++ b/pkgs/dotnet-server-sdk-redis/CONTRIBUTING.md @@ -0,0 +1,63 @@ +# Contributing to the LaunchDarkly SDK Redis Integration + +The source code for this library is [here](https://github.com/launchdarkly/dotnet-core/tree/main/pkgs/dotnet-server-sdk-redis). We encourage pull-requests and other contributions from the community. Since this library is meant to be used in conjunction with the LaunchDarkly .NET Server SDK, you may want to look at the [.NET Server SDK source code](https://github.com/launchdarkly/dotnet-core/tree/main/pkgs/sdk/server) and our [SDK contributor's guide](https://docs.launchdarkly.com/sdk/concepts/contributors-guide). + +## Submitting bug reports and feature requests + +The LaunchDarkly SDK team monitors the [issue tracker](https://github.com/launchdarkly/dotnet-core/issues) in this repository. Bug reports and feature requests specific to this package should be filed in the issue tracker. The SDK team will respond to all newly filed issues within two business days. + +## Submitting pull requests + +We encourage pull requests and other contributions from the community. Before submitting pull requests, ensure that all temporary or unintended code is removed. Don't worry about adding reviewers to the pull request; the LaunchDarkly SDK team will add themselves. The SDK team will acknowledge all pull requests within two business days. + +## Build instructions + +### Prerequisites + +To set up your SDK build time environment, you must [download .NET development tools and follow the instructions](https://dotnet.microsoft.com/download). .NET 8.0 is preferred, since the .NET 8.0 tools are able to build for all supported target platforms. + +The project has a package dependency on `StackExchange.Redis`. The dependency version is intended to be the _minimum_ compatible version; applications are expected to override this with their own dependency on some higher version. + +The unit test project uses code from the `dotnet-server-sdk-shared-tests` project. See the [README.md](../shared/dotnet-server-sdk-shared-tests/README.md) file in that directory for more information. + +### Building + +To install all required packages: + +``` +dotnet restore +``` + +To build all targets of the project without running any tests: + +``` +dotnet build src/LaunchDarkly.ServerSdk.Redis +``` + +Or, to build only one target (in this case .NET Standard 2.0): + +``` +dotnet build src/LaunchDarkly.ServerSdk.Redis -f netstandard2.0 +``` + +Building the code locally in the default Debug configuration does not sign the assembly and does not require a key file. + +### Testing + +To run all unit tests, for all targets (this includes .NET Framework, so you can only do this in Windows): + +``` +dotnet test test/LaunchDarkly.ServerSdk.Redis.Tests +``` + +Or, to run tests only for the .NET Standard 2.0 target (using the .NET 8.0 runtime): + +``` +dotnet test test/LaunchDarkly.ServerSdk.Redis.Tests -f net8.0 +``` + +The tests expect you to have Redis running locally on the default port, 6379. One way to do this is with Docker: + +```bash +docker run -p 6379:6379 redis +``` diff --git a/pkgs/dotnet-server-sdk-redis/LICENSE b/pkgs/dotnet-server-sdk-redis/LICENSE new file mode 100644 index 00000000..fd10303d --- /dev/null +++ b/pkgs/dotnet-server-sdk-redis/LICENSE @@ -0,0 +1,13 @@ +Copyright 2018 Catamorphic, Co. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. \ No newline at end of file diff --git a/pkgs/dotnet-server-sdk-redis/LaunchDarkly.ServerSdk.Redis.sln b/pkgs/dotnet-server-sdk-redis/LaunchDarkly.ServerSdk.Redis.sln new file mode 100644 index 00000000..f39c57f6 --- /dev/null +++ b/pkgs/dotnet-server-sdk-redis/LaunchDarkly.ServerSdk.Redis.sln @@ -0,0 +1,37 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio 15 +VisualStudioVersion = 15.0.26730.16 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "LaunchDarkly.ServerSdk.Redis", "src\LaunchDarkly.ServerSdk.Redis.csproj", "{C497533B-8F43-4E5A-A6F4-F50880AA549C}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LaunchDarkly.ServerSdk.Redis.Tests", "test\LaunchDarkly.ServerSdk.Redis.Tests.csproj", "{2F11CC6A-3427-423E-AF63-E3600FE63627}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LaunchDarkly.ServerSdk.SharedTests", "..\shared\dotnet-server-sdk-shared-tests\src\LaunchDarkly.ServerSdk.SharedTests.csproj", "{1E4AC383-D6C7-4096-9D74-B10D059E84F1}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {C497533B-8F43-4E5A-A6F4-F50880AA549C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C497533B-8F43-4E5A-A6F4-F50880AA549C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C497533B-8F43-4E5A-A6F4-F50880AA549C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C497533B-8F43-4E5A-A6F4-F50880AA549C}.Release|Any CPU.Build.0 = Release|Any CPU + {2F11CC6A-3427-423E-AF63-E3600FE63627}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2F11CC6A-3427-423E-AF63-E3600FE63627}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2F11CC6A-3427-423E-AF63-E3600FE63627}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2F11CC6A-3427-423E-AF63-E3600FE63627}.Release|Any CPU.Build.0 = Release|Any CPU + {1E4AC383-D6C7-4096-9D74-B10D059E84F1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1E4AC383-D6C7-4096-9D74-B10D059E84F1}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1E4AC383-D6C7-4096-9D74-B10D059E84F1}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1E4AC383-D6C7-4096-9D74-B10D059E84F1}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {C92D0169-FDC9-4B5A-A3FA-70CD9609660D} + EndGlobalSection +EndGlobal diff --git a/pkgs/dotnet-server-sdk-redis/README.md b/pkgs/dotnet-server-sdk-redis/README.md new file mode 100644 index 00000000..676d74c6 --- /dev/null +++ b/pkgs/dotnet-server-sdk-redis/README.md @@ -0,0 +1,48 @@ +# LaunchDarkly Server-Side SDK for .NET - Redis integration + +[![NuGet](https://img.shields.io/nuget/v/LaunchDarkly.ServerSdk.Redis.svg?style=flat-square)](https://www.nuget.org/packages/LaunchDarkly.ServerSdk.Redis/) +[![CircleCI](https://circleci.com/gh/launchdarkly/dotnet-server-sdk-redis.svg?style=shield)](https://circleci.com/gh/launchdarkly/dotnet-server-sdk-redis) +[![Documentation](https://img.shields.io/static/v1?label=GitHub+Pages&message=API+reference&color=00add8)](https://launchdarkly.github.io/dotnet-server-sdk-redis) + +This library provides a Redis-backed persistence mechanism (data store) for the [LaunchDarkly .NET SDK](https://github.com/launchdarkly/dotnet-server-sdk), replacing the default in-memory data store. The underlying Redis client implementation is [StackExchange.Redis](https://github.com/StackExchange/StackExchange.Redis). + +For more information, see also: [Using Redis as a persistent feature store](https://docs.launchdarkly.com/sdk/features/storing-data/redis#net-server-side). + +Version 5.0.0 and above of this library works with version 8.0.0 and above of the LaunchDarkly .NET SDK. For earlier versions of the SDK, see the changelog for which version of this library to use. + +It has a dependency on StackExchange.Redis version 2.0.513. If you are using a higher version of StackExchange.Redis, you should install it explicitly as a dependency in your application to override this minimum version. + +For full usage details and examples, see the [API reference](http://launchdarkly.github.io/dotnet-server-sdk-redis). + +## Supported .NET versions + +This version of the library is built for the following targets: + +* .NET Framework 4.6.2: works in .NET Framework of that version or higher. +* .NET Standard 2.0: works in .NET Core 3.x, .NET 6.x, or in a library targeted to .NET Standard 2.x. + +The .NET build tools should automatically load the most appropriate build of the library for whatever platform your application or library is targeted to. + +## Signing + +The published version of this assembly is both digitally signed by LaunchDarkly and [strong-named](https://docs.microsoft.com/en-us/dotnet/framework/app-domains/strong-named-assemblies). + +Building the code locally in the default Debug configuration does not sign the assembly and does not require a key file. + +## Contributing + +We encourage pull requests and other contributions from the community. Check out our [contributing guidelines](CONTRIBUTING.md) for instructions on how to contribute to this project. + +## About LaunchDarkly + +* LaunchDarkly is a continuous delivery platform that provides feature flags as a service and allows developers to iterate quickly and safely. We allow you to easily flag your features and manage them from the LaunchDarkly dashboard. With LaunchDarkly, you can: + * Roll out a new feature to a subset of your users (like a group of users who opt-in to a beta tester group), gathering feedback and bug reports from real-world use cases. + * Gradually roll out a feature to an increasing percentage of users, and track the effect that the feature has on key metrics (for instance, how likely is a user to complete a purchase if they have feature A versus feature B?). + * Turn off a feature that you realize is causing performance problems in production, without needing to re-deploy, or even restart the application with a changed configuration file. + * Grant access to certain features based on user attributes, like payment plan (eg: users on the ‘gold’ plan get access to more features than users in the ‘silver’ plan). Disable parts of your application to facilitate maintenance, without taking everything offline. +* LaunchDarkly provides feature flag SDKs for a wide variety of languages and technologies. Read [our documentation](https://docs.launchdarkly.com/sdk) for a complete list. +* Explore LaunchDarkly + * [launchdarkly.com](https://www.launchdarkly.com/ "LaunchDarkly Main Website") for more information + * [docs.launchdarkly.com](https://docs.launchdarkly.com/ "LaunchDarkly Documentation") for our documentation and SDK reference guides + * [apidocs.launchdarkly.com](https://apidocs.launchdarkly.com/ "LaunchDarkly API Documentation") for our API documentation + * [blog.launchdarkly.com](https://blog.launchdarkly.com/ "LaunchDarkly Blog Documentation") for the latest product updates diff --git a/pkgs/dotnet-server-sdk-redis/SECURITY.md b/pkgs/dotnet-server-sdk-redis/SECURITY.md new file mode 100644 index 00000000..10f1d1ac --- /dev/null +++ b/pkgs/dotnet-server-sdk-redis/SECURITY.md @@ -0,0 +1,5 @@ +# Reporting and Fixing Security Issues + +Please report all security issues to the LaunchDarkly security team by submitting a bug bounty report to our [HackerOne program](https://hackerone.com/launchdarkly?type=team). LaunchDarkly will triage and address all valid security issues following the response targets defined in our program policy. Valid security issues may be eligible for a bounty. + +Please do not open issues or pull requests for security issues. This makes the problem immediately visible to everyone, including potentially malicious actors. diff --git a/pkgs/dotnet-server-sdk-redis/github_actions.env b/pkgs/dotnet-server-sdk-redis/github_actions.env new file mode 100644 index 00000000..90a389ae --- /dev/null +++ b/pkgs/dotnet-server-sdk-redis/github_actions.env @@ -0,0 +1,5 @@ +WORKSPACE_PATH=pkgs/dotnet-server-sdk-redis +PROJECT_FILE=pkgs/dotnet-server-sdk-redis/src/LaunchDarkly.ServerSdk.Redis.csproj +BUILD_OUTPUT_PATH=pkgs/dotnet-server-sdk-redis/src/bin/Release/ +BUILD_OUTPUT_DLL_NAME=LaunchDarkly.ServerSdk.Redis.dll +TEST_PROJECT_FILE=pkgs/dotnet-server-sdk-redis/test/LaunchDarkly.ServerSdk.Redis.Tests.csproj \ No newline at end of file diff --git a/pkgs/dotnet-server-sdk-redis/src/AssemblyInfo.cs b/pkgs/dotnet-server-sdk-redis/src/AssemblyInfo.cs new file mode 100644 index 00000000..87f08c27 --- /dev/null +++ b/pkgs/dotnet-server-sdk-redis/src/AssemblyInfo.cs @@ -0,0 +1,8 @@ +using System.Reflection; +using System.Runtime.CompilerServices; + +#if DEBUG +// Allow unit tests to see internal classes (note, the test assembly is not signed; +// tests must be run against the Debug configuration of this assembly) +[assembly: InternalsVisibleTo("LaunchDarkly.ServerSdk.Redis.Tests")] +#endif diff --git a/pkgs/dotnet-server-sdk-redis/src/LaunchDarkly.ServerSdk.Redis.csproj b/pkgs/dotnet-server-sdk-redis/src/LaunchDarkly.ServerSdk.Redis.csproj new file mode 100644 index 00000000..7bca6653 --- /dev/null +++ b/pkgs/dotnet-server-sdk-redis/src/LaunchDarkly.ServerSdk.Redis.csproj @@ -0,0 +1,36 @@ + + + 5.0.0 + netstandard2.0;net462 + LaunchDarkly.ServerSdk.Redis + Library + LaunchDarkly.ServerSdk.Redis + 7.3 + LaunchDarkly + LaunchDarkly + LaunchDarkly + LaunchDarkly Server-Side .NET SDK Redis Integration + Copyright 2018 LaunchDarkly + Apache-2.0 + https://github.com/launchdarkly/dotnet-server-sdk-redis + https://github.com/launchdarkly/dotnet-server-sdk-redis + main + true + snupkg + LaunchDarkly.Sdk.Server.Integrations + + + + + + + + + bin\$(Configuration)\$(TargetFramework)\LaunchDarkly.ServerSdk.Redis.xml + + + + true + ../../../LaunchDarkly.Redis.snk + + diff --git a/pkgs/dotnet-server-sdk-redis/src/Redis.cs b/pkgs/dotnet-server-sdk-redis/src/Redis.cs new file mode 100644 index 00000000..2a910b2d --- /dev/null +++ b/pkgs/dotnet-server-sdk-redis/src/Redis.cs @@ -0,0 +1,115 @@ +using System; +using System.Net; +using System.Net.NetworkInformation; +using LaunchDarkly.Sdk.Server.Subsystems; + +namespace LaunchDarkly.Sdk.Server.Integrations +{ + /// + /// Integration between the LaunchDarkly SDK and Redis. + /// + public static class Redis + { + /// + /// The default location of the Redis server: localhost:6379 + /// + public static readonly EndPoint DefaultRedisEndPoint = new DnsEndPoint("localhost", 6379); + + /// + /// The default value for . + /// + public static readonly string DefaultPrefix = "launchdarkly"; + + /// + /// The default value for . + /// + public static readonly TimeSpan DefaultConnectTimeout = TimeSpan.FromSeconds(5); + + /// + /// The default value for . + /// + public static readonly TimeSpan DefaultOperationTimeout = TimeSpan.FromSeconds(3); + + /// + /// Returns a builder object for creating a Redis-backed persistent data store. + /// + /// + /// + /// This is for the main data store that holds feature flag data. To configure a + /// Big Segment store, use instead. + /// + /// + /// You can use methods of the builder to specify any non-default Redis options + /// you may want, before passing the builder to + /// . + /// In this example, the store is configured to use a Redis host called "host1": + /// + /// + /// var config = Configuration.Builder("sdk-key") + /// .DataStore( + /// Components.PersistentDataStore( + /// Redis.DataStore().Uri("redis://host1:6379") + /// ) + /// ) + /// .Build(); + /// + /// + /// Note that the SDK also has its own options related to data storage that are configured + /// at a different level, because they are independent of what database is being used. For + /// instance, the builder returned by + /// has options for caching: + /// + /// + /// var config = Configuration.Builder("sdk-key") + /// .DataStore( + /// Components.PersistentDataStore( + /// Redis.DataStore().Uri("redis://my-redis-host") + /// ).CacheSeconds(15) + /// ) + /// .Build(); + /// + /// + /// a data store configuration object + public static RedisStoreBuilder DataStore() => + new BuilderForDataStore(); + + /// + /// Returns a builder object for creating a Redis-backed Big Segment store. + /// + /// + /// + /// You can use methods of the builder to specify any non-default Redis options + /// you may want, before passing the builder to + /// . + /// In this example, the store is configured to use a Redis host called "host2": + /// + /// + /// var config = Configuration.Builder("sdk-key") + /// .DataStore( + /// Components.BigSegments( + /// Redis.BigSegmentStore().Uri("redis://host2:6379") + /// ) + /// ) + /// .Build(); + /// + /// + /// Note that the SDK also has its own options related to Big Segments that are configured + /// at a different level, because they are independent of what database is being used. For + /// instance, the builder returned by + /// has an option for the status polling interval: + /// + /// + /// var config = Configuration.Builder("sdk-key") + /// .DataStore( + /// Components.BigSegments( + /// Redis.BigSegmentStore().Uri("redis://my-redis-host") + /// ).StatusPollInterval(TimeSpan.FromSeconds(30)) + /// ) + /// .Build(); + /// + /// + /// a Big Segment store configuration object + public static RedisStoreBuilder BigSegmentStore() => + new BuilderForBigSegments(); + } +} diff --git a/pkgs/dotnet-server-sdk-redis/src/RedisBigSegmentStoreImpl.cs b/pkgs/dotnet-server-sdk-redis/src/RedisBigSegmentStoreImpl.cs new file mode 100644 index 00000000..000945bc --- /dev/null +++ b/pkgs/dotnet-server-sdk-redis/src/RedisBigSegmentStoreImpl.cs @@ -0,0 +1,61 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using LaunchDarkly.Sdk.Server.Subsystems; +using LaunchDarkly.Logging; +using StackExchange.Redis; + +using static LaunchDarkly.Sdk.Server.Subsystems.BigSegmentStoreTypes; + +namespace LaunchDarkly.Sdk.Server.Integrations +{ + internal sealed class RedisBigSegmentStoreImpl : RedisStoreImplBase, IBigSegmentStore + { + private readonly string _syncTimeKey; + private readonly string _includedKeyPrefix; + private readonly string _excludedKeyPrefix; + + internal RedisBigSegmentStoreImpl( + ConfigurationOptions redisConfig, + string prefix, + Logger log + ) : base(redisConfig, prefix, log) + { + _syncTimeKey = prefix + ":big_segments_synchronized_on"; + _includedKeyPrefix = prefix + ":big_segment_include:"; + _excludedKeyPrefix = prefix + ":big_segment_exclude:"; + } + + public async Task GetMembershipAsync(string userHash) + { + var db = _redis.GetDatabase(); + + var includedRefs = await db.SetMembersAsync(_includedKeyPrefix + userHash); + var excludedRefs = await db.SetMembersAsync(_excludedKeyPrefix + userHash); + + return NewMembershipFromSegmentRefs(RedisValuesToStrings(includedRefs), + RedisValuesToStrings(excludedRefs)); + } + + public async Task GetMetadataAsync() + { + var db = _redis.GetDatabase(); + + var value = await db.StringGetAsync(_syncTimeKey); + if (value.IsNull) + { + return null; + } + if (value == "") + { + return new StoreMetadata { LastUpToDate = null }; + } + var millis = long.Parse(value); + return new StoreMetadata { LastUpToDate = UnixMillisecondTime.OfMillis(millis) }; + } + + private static IEnumerable RedisValuesToStrings(RedisValue[] values) => + (values is null) ? null : values.Select(v => v.ToString()); + } +} diff --git a/pkgs/dotnet-server-sdk-redis/src/RedisDataStoreImpl.cs b/pkgs/dotnet-server-sdk-redis/src/RedisDataStoreImpl.cs new file mode 100644 index 00000000..4fbe5e5b --- /dev/null +++ b/pkgs/dotnet-server-sdk-redis/src/RedisDataStoreImpl.cs @@ -0,0 +1,177 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net; +using LaunchDarkly.Logging; +using LaunchDarkly.Sdk.Server.Subsystems; +using StackExchange.Redis; + +using static LaunchDarkly.Sdk.Server.Subsystems.DataStoreTypes; + +namespace LaunchDarkly.Sdk.Server.Integrations +{ + /// + /// Internal implementation of the Redis data store. + /// + /// + /// + /// Implementation notes: + /// + /// + /// Feature flags, segments, and any other kind of entity the LaunchDarkly client may wish + /// to store, are stored as hash values with the main key "{prefix}:features", "{prefix}:segments", + /// etc. + /// Redis only allows a single string value per hash key, so there is no way to store the + /// item metadata (version number and deletion status) separately from the value. The SDK understands + /// that some data store implementations don't have that capability, so it will always pass us a + /// serialized item string that contains the metadata in it, and we're allowed to return 0 as the + /// version number of a queried item to indicate "you have to deserialize the item to find out the + /// metadata". + /// + /// The special key "{prefix}:$inited" indicates that the store contains a complete data set. + /// + /// + /// + internal sealed class RedisDataStoreImpl : RedisStoreImplBase, IPersistentDataStore + { + // This is used for unit testing only + internal Action _updateHook; + + private readonly string _initedKey; + + internal RedisDataStoreImpl( + ConfigurationOptions redisConfig, + string prefix, + Logger log + ) : base(redisConfig, prefix, log) + { + _initedKey = prefix + ":$inited"; + } + + public bool Initialized() => + _redis.GetDatabase().KeyExists(_initedKey); + + public void Init(FullDataSet allData) + { + IDatabase db = _redis.GetDatabase(); + ITransaction txn = db.CreateTransaction(); + foreach (var collection in allData.Data) + { + string key = ItemsKey(collection.Key); + txn.KeyDeleteAsync(key); + foreach (var item in collection.Value.Items) + { + txn.HashSetAsync(key, item.Key, item.Value.SerializedItem); + // Note, these methods are async because this Redis client treats all actions + // in a transaction as async - they are only sent to Redis when we execute the + // transaction. We don't need to await them. + } + } + txn.StringSetAsync(_initedKey, ""); + txn.Execute(); + } + + public SerializedItemDescriptor? Get(DataKind kind, string key) + { + IDatabase db = _redis.GetDatabase(); + string json = db.HashGet(ItemsKey(kind), key); + if (json == null) + { + _log.Debug("[get] Key: {0} not found in \"{1}\"", key, kind.Name); + return null; + } + return new SerializedItemDescriptor(0, false, json); // see implementation notes + } + + public KeyedItems GetAll(DataKind kind) + { + IDatabase db = _redis.GetDatabase(); + HashEntry[] allEntries = db.HashGetAll(ItemsKey(kind)); + var result = new List>(); + foreach (HashEntry entry in allEntries) + { + result.Add(new KeyValuePair(entry.Name, + new SerializedItemDescriptor(0, false, entry.Value))); // see implementation notes + } + return new KeyedItems(result); + } + + public bool Upsert(DataKind kind, string key, SerializedItemDescriptor newItem) + { + IDatabase db = _redis.GetDatabase(); + string baseKey = ItemsKey(kind); + while (true) + { + string oldData; + try + { + oldData = db.HashGet(baseKey, key); + } + catch (RedisTimeoutException e) + { + _log.Error("Timeout in update when reading {0} from {1}: {2}", key, baseKey, e.ToString()); + throw; + } + // Here, unfortunately, we have to deserialize the old item (if any) just to find + // out its version number (see implementation notes). + var oldVersion = (oldData is null) ? 0 : kind.Deserialize(oldData).Version; + if (oldVersion >= newItem.Version) + { + _log.Debug("Attempted to {0} key: {1} version: {2} with a version that is" + + " the same or older: {3} in \"{4}\"", + newItem.Deleted ? "delete" : "update", + key, oldVersion, newItem.Version, kind.Name); + return false; + } + + // This hook is used only in unit tests + _updateHook?.Invoke(); + + // Note that transactions work a bit differently in StackExchange.Redis than in other + // Redis clients. The same Redis connection is shared across all threads, so it can't + // set a WATCH at the moment we start the transaction. Instead, it saves up all of + // the actions we send during the transaction, and replays them all within a MULTI + // when the transaction. AddCondition() is this client's way of doing a WATCH, and it + // can only use an equality match on the whole value (which is unfortunate since a + // serialized flag value could be fairly large). + ITransaction txn = db.CreateTransaction(); + txn.AddCondition(oldData is null ? Condition.HashNotExists(baseKey, key) : + Condition.HashEqual(baseKey, key, oldData)); + + txn.HashSetAsync(baseKey, key, newItem.SerializedItem); + + try + { + bool success = txn.Execute(); + if (!success) + { + // The watch was triggered, we should retry + _log.Debug("Concurrent modification detected, retrying"); + continue; + } + } + catch (RedisTimeoutException e) + { + _log.Error("Timeout on update of {0} in {1}: {2}", key, baseKey, e.ToString()); + throw; + } + return true; + } + } + + public bool IsStoreAvailable() + { + try + { + Initialized(); // don't care about the return value, just that it doesn't throw an exception + return true; + } + catch + { // don't care about exception class, since any exception means the Redis request couldn't be made + return false; + } + } + + private string ItemsKey(DataKind kind) => _prefix + ":" + kind.Name; + } +} diff --git a/pkgs/dotnet-server-sdk-redis/src/RedisStoreBuilder.cs b/pkgs/dotnet-server-sdk-redis/src/RedisStoreBuilder.cs new file mode 100644 index 00000000..9e55b433 --- /dev/null +++ b/pkgs/dotnet-server-sdk-redis/src/RedisStoreBuilder.cs @@ -0,0 +1,246 @@ +using System; +using System.Collections.Generic; +using System.Net; +using LaunchDarkly.Sdk.Server.Subsystems; +using StackExchange.Redis; + +namespace LaunchDarkly.Sdk.Server.Integrations +{ + /// + /// A builder for configuring the + /// Redis-based persistent data store. + /// + /// + /// + /// This can be used either for the main data store that holds feature flag data, or for the big + /// segment store, or both. If you are using both, they do not have to have the same parameters. For + /// instance, in this example the main data store uses a Redis host called "host1" and the big + /// segment store uses a Redis host called "host2": + /// + /// + /// var config = Configuration.Builder("sdk-key") + /// .DataStore( + /// Components.PersistentDataStore( + /// Redis.DataStore().Uri("redis://host1:6379") + /// ) + /// ) + /// .BigSegments( + /// Components.BigSegments( + /// Redis.DataStore().Uri("redis://host2:6379") + /// ) + /// ) + /// .Build(); + /// + /// + /// Note that the builder is passed to one of two methods, + /// or + /// , depending on the context in + /// which it is being used. This is because each of those contexts has its own additional + /// configuration options that are unrelated to the Redis options. + /// + /// + /// Builder calls can be chained, for example: + /// + /// + /// var config = Configuration.Builder("sdk-key") + /// .DataStore( + /// Components.PersistentDataStore( + /// Redis.DataStore() + /// .Uri("redis://my-redis-host") + /// .Database(1) + /// ) + /// .CacheSeconds(15) + /// ) + /// .Build(); + /// + /// + public abstract class RedisStoreBuilder : IComponentConfigurer, IDiagnosticDescription + { + internal ConfigurationOptions _redisConfig = new ConfigurationOptions(); + internal string _prefix = Redis.DefaultPrefix; + + internal RedisStoreBuilder() + { + _redisConfig.EndPoints.Add(Redis.DefaultRedisEndPoint); + _redisConfig.ConnectTimeout = (int)Redis.DefaultConnectTimeout.TotalMilliseconds; + } + + /// + /// Specifies all Redis configuration options at once. + /// + /// a instance + /// the builder + public RedisStoreBuilder RedisConfiguration(ConfigurationOptions config) + { + _redisConfig = config.Clone(); + return this; + } + + /// + /// Specifies a single Redis server by hostname and port. + /// + /// hostname of the Redis server + /// port of the Redis server + /// the builder + public RedisStoreBuilder HostAndPort(string host, int port) => + EndPoint(new DnsEndPoint(host, port)); + + /// + /// Specifies a single Redis server as an EndPoint. + /// + /// location of the Redis server + /// the builder + public RedisStoreBuilder EndPoint(EndPoint endPoint) => + EndPoints(new List { endPoint }); + + /// + /// Shortcut for calling with a string. + /// + /// the Redis server URI as a string + /// the builder + /// + public RedisStoreBuilder Uri(string uri) => Uri(new Uri(uri)); + + /// + /// Specifies a Redis server - and, optionally, other properties including + /// credentials and database number - using a URI. + /// + /// the Redis server URI + /// the builder + /// + public RedisStoreBuilder Uri(Uri uri) + { + if (uri.Scheme.ToLower() != "redis") + { + throw new ArgumentException("URI scheme must be 'redis'"); + } + HostAndPort(uri.Host, uri.Port); + if (!string.IsNullOrEmpty(uri.UserInfo)) + { + var parts = uri.UserInfo.Split(':'); + if (parts.Length == 2) + { + // Redis doesn't use the username + _redisConfig.Password = parts[1]; + } + else + { + throw new ArgumentException("Credentials must be in the format ':password'"); + } + } + if (!string.IsNullOrEmpty(uri.AbsolutePath) && uri.AbsolutePath != "/") + { + var path = uri.AbsolutePath; + if (path.StartsWith("/")) + { + path = path.Substring(1); + } + var dbIndex = Int32.Parse(path); + _redisConfig.DefaultDatabase = dbIndex; + } + return this; + } + + /// + /// Specifies multiple Redis servers as a list of EndPoints. + /// + /// locations of the Redis servers + /// the builder + public RedisStoreBuilder EndPoints(IList endPoints) + { + _redisConfig.EndPoints.Clear(); + foreach (var ep in endPoints) + { + _redisConfig.EndPoints.Add(ep); + } + return this; + } + + /// + /// Specifies which database to use within the Redis server. The default is 0. + /// + /// index of the database to use + /// the builder + public RedisStoreBuilder DatabaseIndex(int database) + { + _redisConfig.DefaultDatabase = database; + return this; + } + + /// + /// Specifies the maximum time to wait for a connection to the Redis server. + /// + /// the timeout interval + /// the builder + public RedisStoreBuilder ConnectTimeout(TimeSpan timeout) + { + _redisConfig.ConnectTimeout = (int)timeout.TotalMilliseconds; + return this; + } + + /// + /// Specifies the maximum time to wait for each synchronous Redis operation to complete. + /// If you are seeing timeout errors - which could result from either an overburdened + /// Redis server, or an unusually large operation such as storing a very large feature + /// flag - you may want to increase this setting. + /// + /// the timeout interval + /// the builder + public RedisStoreBuilder OperationTimeout(TimeSpan timeout) + { + _redisConfig.SyncTimeout = (int)timeout.TotalMilliseconds; + return this; + } + + /// + /// Specifies the namespace prefix for all keys stored in Redis. + /// + /// the namespace prefix, or null to use + /// the builder + public RedisStoreBuilder Prefix(string prefix) + { + _prefix = string.IsNullOrEmpty(prefix) ? Redis.DefaultPrefix : prefix; + return this; + } + + /// + /// Allows you to modify any of the configuration options supported by StackExchange.Redis + /// directly. The current configuration will be passed to your Action, which can modify it + /// in any way. + /// + /// + /// + /// Redis.DataStore() + /// .RedisConfigChanges((config) => { + /// config.Ssl = true; + /// }) + /// + /// + /// + /// + public RedisStoreBuilder RedisConfigChanges(Action modifyConfig) + { + modifyConfig.Invoke(_redisConfig); + return this; + } + + /// + public abstract T Build(LdClientContext context); + + /// + public LdValue DescribeConfiguration(LdClientContext context) => + LdValue.Of("Redis"); + } + + internal sealed class BuilderForDataStore : RedisStoreBuilder + { + public override IPersistentDataStore Build(LdClientContext context) => + new RedisDataStoreImpl(_redisConfig, _prefix, context.Logger.SubLogger("DataStore.Redis")); + } + + internal sealed class BuilderForBigSegments : RedisStoreBuilder + { + public override IBigSegmentStore Build(LdClientContext context) => + new RedisBigSegmentStoreImpl(_redisConfig, _prefix, context.Logger.SubLogger("BigSegments.Redis")); + } +} diff --git a/pkgs/dotnet-server-sdk-redis/src/RedisStoreImplBase.cs b/pkgs/dotnet-server-sdk-redis/src/RedisStoreImplBase.cs new file mode 100644 index 00000000..d7799f61 --- /dev/null +++ b/pkgs/dotnet-server-sdk-redis/src/RedisStoreImplBase.cs @@ -0,0 +1,54 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net; +using LaunchDarkly.Logging; +using LaunchDarkly.Sdk.Server.Subsystems; +using StackExchange.Redis; + +namespace LaunchDarkly.Sdk.Server.Integrations +{ + internal abstract class RedisStoreImplBase : IDisposable + { + protected readonly ConnectionMultiplexer _redis; + protected readonly string _prefix; + protected readonly Logger _log; + + protected RedisStoreImplBase( + ConfigurationOptions redisConfig, + string prefix, + Logger log + ) + { + _log = log; + var redisConfigCopy = redisConfig.Clone(); + _redis = ConnectionMultiplexer.Connect(redisConfigCopy); + _prefix = prefix; + _log.Info("Using Redis data store at {0} with prefix \"{1}\"", + string.Join(", ", redisConfig.EndPoints.Select(DescribeEndPoint)), prefix); + } + + public void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + + private void Dispose(bool disposing) + { + if (disposing) + { + _redis.Dispose(); + } + } + + private string DescribeEndPoint(EndPoint e) + { + // The default ToString() method of DnsEndPoint adds a prefix of "Unspecified", which looks + // confusing in our log messages. + return (e is DnsEndPoint de) ? + string.Format("{0}:{1}", de.Host, de.Port) : + e.ToString(); + } + } +} diff --git a/pkgs/dotnet-server-sdk-redis/test/LaunchDarkly.ServerSdk.Redis.Tests.csproj b/pkgs/dotnet-server-sdk-redis/test/LaunchDarkly.ServerSdk.Redis.Tests.csproj new file mode 100644 index 00000000..804a0265 --- /dev/null +++ b/pkgs/dotnet-server-sdk-redis/test/LaunchDarkly.ServerSdk.Redis.Tests.csproj @@ -0,0 +1,19 @@ + + + net8.0;net462 + false + true + LaunchDarkly.Sdk.Server.Integrations + + + + + + + + + + + + + diff --git a/pkgs/dotnet-server-sdk-redis/test/RedisBigSegmentStoreTest.cs b/pkgs/dotnet-server-sdk-redis/test/RedisBigSegmentStoreTest.cs new file mode 100644 index 00000000..51dcf253 --- /dev/null +++ b/pkgs/dotnet-server-sdk-redis/test/RedisBigSegmentStoreTest.cs @@ -0,0 +1,60 @@ +using System.Collections.Generic; +using System.Threading.Tasks; +using LaunchDarkly.Sdk.Server.Subsystems; +using LaunchDarkly.Sdk.Server.SharedTests.BigSegmentStore; +using StackExchange.Redis; +using Xunit.Abstractions; + +using static LaunchDarkly.Sdk.Server.Subsystems.BigSegmentStoreTypes; + +namespace LaunchDarkly.Sdk.Server.Integrations +{ + public class RedisBigSegmentStoreTest : BigSegmentStoreBaseTests + { + private readonly ConnectionMultiplexer _redis; + + override protected BigSegmentStoreTestConfig Configuration => new BigSegmentStoreTestConfig + { + StoreFactoryFunc = MakeStoreFactory, + ClearDataAction = ClearData, + SetMetadataAction = SetMetadata, + SetSegmentsAction = SetSegments + }; + + public RedisBigSegmentStoreTest(ITestOutputHelper testOutput) : base(testOutput) + { + _redis = ConnectionMultiplexer.Connect("localhost:6379,allowAdmin=true"); + } + + private IComponentConfigurer MakeStoreFactory(string prefix) => + Redis.BigSegmentStore().Prefix(prefix); + + private async Task ClearData(string prefix) => + await RedisDataStoreTest.ClearDataWithPrefix(_redis, prefix); + + private async Task SetMetadata(string prefix, StoreMetadata metadata) => + await _redis.GetDatabase().StringSetAsync( + prefix + ":big_segments_synchronized_on", + metadata.LastUpToDate.HasValue ? + metadata.LastUpToDate.Value.Value.ToString() : + "" + ); + + private async Task SetSegments(string prefix, string userHash, + IEnumerable includedRefs, IEnumerable excludedRefs) + { + var db = _redis.GetDatabase(); + + var includeKey = prefix + ":big_segment_include:" + userHash; + var excludeKey = prefix + ":big_segment_exclude:" + userHash; + foreach (var r in includedRefs) + { + await db.SetAddAsync(includeKey, r); + } + foreach (var r in excludedRefs) + { + await db.SetAddAsync(excludeKey, r); + } + } + } +} diff --git a/pkgs/dotnet-server-sdk-redis/test/RedisDataStoreBuilderTest.cs b/pkgs/dotnet-server-sdk-redis/test/RedisDataStoreBuilderTest.cs new file mode 100644 index 00000000..87d5d22e --- /dev/null +++ b/pkgs/dotnet-server-sdk-redis/test/RedisDataStoreBuilderTest.cs @@ -0,0 +1,117 @@ +using System; +using System.Collections.Generic; +using System.Net; +using Xunit; + +namespace LaunchDarkly.Sdk.Server.Integrations +{ + public class RedisDataStoreBuilderTest + { + [Fact] + public void DefaultConfigHasDefaultRedisHostAndPort() + { + var builder = Redis.DataStore(); + Assert.Collection(builder._redisConfig.EndPoints, + e => Assert.Equal(new DnsEndPoint("localhost", 6379), e)); + } + + [Fact] + public void EndPoint() + { + var builder = Redis.DataStore(); + DnsEndPoint ep = new DnsEndPoint("test", 9999); + builder.EndPoint(ep); + Assert.Collection(builder._redisConfig.EndPoints, + e => Assert.Equal(ep, e)); + } + + [Fact] + public void MultipleEndPoints() + { + var builder = Redis.DataStore(); + DnsEndPoint ep1 = new DnsEndPoint("test", 9998); + DnsEndPoint ep2 = new DnsEndPoint("test", 9999); + builder.EndPoints(new List { ep1, ep2 }); + Assert.Collection(builder._redisConfig.EndPoints, + e => Assert.Equal(ep1, e), + e => Assert.Equal(ep2, e)); + } + + [Fact] + public void HostAndPort() + { + var builder = Redis.DataStore(); + builder.HostAndPort("test", 9999); + Assert.Collection(builder._redisConfig.EndPoints, + e => Assert.Equal(new DnsEndPoint("test", 9999), e)); + } + + [Fact] + public void MinimalUri() + { + var builder = Redis.DataStore(); + builder.Uri(new Uri("redis://test:9999")); + Assert.Collection(builder._redisConfig.EndPoints, + e => Assert.Equal(new DnsEndPoint("test", 9999), e)); + Assert.Null(builder._redisConfig.Password); + Assert.Null(builder._redisConfig.DefaultDatabase); + } + + [Fact] + public void UriWithPassword() + { + var builder = Redis.DataStore(); + builder.Uri(new Uri("redis://:secret@test:9999")); + Assert.Collection(builder._redisConfig.EndPoints, + e => Assert.Equal(new DnsEndPoint("test", 9999), e)); + Assert.Equal("secret", builder._redisConfig.Password); + Assert.Null(builder._redisConfig.DefaultDatabase); + } + + [Fact] + public void UriWithDatabase() + { + var builder = Redis.DataStore(); + builder.Uri(new Uri("redis://@test:9999/8")); + Assert.Collection(builder._redisConfig.EndPoints, + e => Assert.Equal(new DnsEndPoint("test", 9999), e)); + Assert.Null(builder._redisConfig.Password); + Assert.Equal(8, builder._redisConfig.DefaultDatabase); + } + + [Fact] + public void Database() + { + var builder = Redis.DataStore(); + builder.DatabaseIndex(8); + Assert.Equal(8, builder._redisConfig.DefaultDatabase); + } + + [Fact] + public void ConnectTimeout() + { + var builder = Redis.DataStore(); + builder.ConnectTimeout(TimeSpan.FromSeconds(8)); + Assert.Equal(8000, builder._redisConfig.ConnectTimeout); + } + + [Fact] + public void OperationTimeout() + { + var builder = Redis.DataStore(); + builder.OperationTimeout(TimeSpan.FromSeconds(8)); + Assert.Equal(8000, builder._redisConfig.SyncTimeout); + } + + [Fact] + public void Prefix() + { + var builder = Redis.DataStore(); + Assert.Equal(Redis.DefaultPrefix, builder._prefix); + builder.Prefix("abc"); + Assert.Equal("abc", builder._prefix); + builder.Prefix(null); + Assert.Equal(Redis.DefaultPrefix, builder._prefix); + } + } +} diff --git a/pkgs/dotnet-server-sdk-redis/test/RedisDataStoreTest.cs b/pkgs/dotnet-server-sdk-redis/test/RedisDataStoreTest.cs new file mode 100644 index 00000000..5d2f9306 --- /dev/null +++ b/pkgs/dotnet-server-sdk-redis/test/RedisDataStoreTest.cs @@ -0,0 +1,77 @@ +using System; +using System.Linq; +using System.Threading.Tasks; +using LaunchDarkly.Logging; +using LaunchDarkly.Sdk.Server.Subsystems; +using LaunchDarkly.Sdk.Server.SharedTests.DataStore; +using StackExchange.Redis; +using Xunit; +using Xunit.Abstractions; + +namespace LaunchDarkly.Sdk.Server.Integrations +{ + public class RedisDataStoreTest : PersistentDataStoreBaseTests + { + protected override PersistentDataStoreTestConfig Configuration => + new PersistentDataStoreTestConfig + { + StoreFactoryFunc = MakeStoreFactory, + ClearDataAction = ClearAllData, + SetConcurrentModificationHookAction = SetUpdateHook + }; + + public RedisDataStoreTest(ITestOutputHelper testOutput) : base(testOutput) { } + + private IComponentConfigurer MakeStoreFactory(string prefix) + { + return Redis.DataStore().Prefix(prefix); + } + + private async Task ClearAllData(string prefix) + { + using (var cxn = ConnectionMultiplexer.Connect("localhost:6379,allowAdmin=true")) + { + await ClearDataWithPrefix(cxn, prefix); + } + } + + internal static async Task ClearDataWithPrefix(ConnectionMultiplexer cxn, string prefix) + { + prefix = string.IsNullOrWhiteSpace(prefix) ? Redis.DefaultPrefix : prefix; + var server = cxn.GetServer("localhost:6379"); + var db = cxn.GetDatabase(); + var keys = server.Keys().ToList(); + foreach (var key in keys) + { + if (key.ToString().StartsWith(prefix + ":")) + { + await db.KeyDeleteAsync(key); + } + } + } + + private void SetUpdateHook(object store, Action hook) + { + (store as RedisDataStoreImpl)._updateHook = hook; + } + + [Fact] + public void LogMessageAtStartup() + { + var logCapture = Logs.Capture(); + var logger = logCapture.Logger("BaseLoggerName"); // in real life, the SDK will provide its own base log name + var context = new LdClientContext("", null, null, null, logger, false, null); + using (Redis.DataStore().Prefix("my-prefix").Build(context)) + { + Assert.Collection(logCapture.GetMessages(), + m => + { + Assert.Equal(LogLevel.Info, m.Level); + Assert.Equal("BaseLoggerName.DataStore.Redis", m.LoggerName); + Assert.Equal("Using Redis data store at localhost:6379 with prefix \"my-prefix\"", + m.Text); + }); + } + } + } +} diff --git a/pkgs/shared/dotnet-server-sdk-shared-tests/LICENSE b/pkgs/shared/dotnet-server-sdk-shared-tests/LICENSE new file mode 100644 index 00000000..fd10303d --- /dev/null +++ b/pkgs/shared/dotnet-server-sdk-shared-tests/LICENSE @@ -0,0 +1,13 @@ +Copyright 2018 Catamorphic, Co. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. \ No newline at end of file diff --git a/pkgs/shared/dotnet-server-sdk-shared-tests/LaunchDarkly.ServerSdk.SharedTests.sln b/pkgs/shared/dotnet-server-sdk-shared-tests/LaunchDarkly.ServerSdk.SharedTests.sln new file mode 100644 index 00000000..73e747bf --- /dev/null +++ b/pkgs/shared/dotnet-server-sdk-shared-tests/LaunchDarkly.ServerSdk.SharedTests.sln @@ -0,0 +1,31 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio 15 +VisualStudioVersion = 15.0.26730.16 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LaunchDarkly.ServerSdk.SharedTests", "src\LaunchDarkly.ServerSdk.SharedTests.csproj", "{44904ADC-8CD1-414E-9184-9378CEB4B9CE}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LaunchDarkly.ServerSdk.SharedTests.Tests", "test\LaunchDarkly.ServerSdk.SharedTests.Tests.csproj", "{79BC10FD-FF08-40FD-B87D-E54549C1F71F}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {44904ADC-8CD1-414E-9184-9378CEB4B9CE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {44904ADC-8CD1-414E-9184-9378CEB4B9CE}.Debug|Any CPU.Build.0 = Debug|Any CPU + {44904ADC-8CD1-414E-9184-9378CEB4B9CE}.Release|Any CPU.ActiveCfg = Release|Any CPU + {44904ADC-8CD1-414E-9184-9378CEB4B9CE}.Release|Any CPU.Build.0 = Release|Any CPU + {79BC10FD-FF08-40FD-B87D-E54549C1F71F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {79BC10FD-FF08-40FD-B87D-E54549C1F71F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {79BC10FD-FF08-40FD-B87D-E54549C1F71F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {79BC10FD-FF08-40FD-B87D-E54549C1F71F}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {C92D0169-FDC9-4B5A-A3FA-70CD9609660D} + EndGlobalSection +EndGlobal diff --git a/pkgs/shared/dotnet-server-sdk-shared-tests/README.md b/pkgs/shared/dotnet-server-sdk-shared-tests/README.md new file mode 100644 index 00000000..a5c692d6 --- /dev/null +++ b/pkgs/shared/dotnet-server-sdk-shared-tests/README.md @@ -0,0 +1,4 @@ +LaunchDarkly Server-Side .NET SDK Shared Test Code +================================================== + +This project provides support code for testing LaunchDarkly .NET SDK integrations. Feature store implementations, etc., should use this code whenever possible to ensure consistent test coverage and avoid repetition. An example of a project using this code is [dotnet-server-sdk-redis](../../dotnet-server-sdk-redis/README.md). diff --git a/pkgs/shared/dotnet-server-sdk-shared-tests/SECURITY.md b/pkgs/shared/dotnet-server-sdk-shared-tests/SECURITY.md new file mode 100644 index 00000000..10f1d1ac --- /dev/null +++ b/pkgs/shared/dotnet-server-sdk-shared-tests/SECURITY.md @@ -0,0 +1,5 @@ +# Reporting and Fixing Security Issues + +Please report all security issues to the LaunchDarkly security team by submitting a bug bounty report to our [HackerOne program](https://hackerone.com/launchdarkly?type=team). LaunchDarkly will triage and address all valid security issues following the response targets defined in our program policy. Valid security issues may be eligible for a bounty. + +Please do not open issues or pull requests for security issues. This makes the problem immediately visible to everyone, including potentially malicious actors. diff --git a/pkgs/shared/dotnet-server-sdk-shared-tests/src/AsyncUtils.cs b/pkgs/shared/dotnet-server-sdk-shared-tests/src/AsyncUtils.cs new file mode 100644 index 00000000..fadb5906 --- /dev/null +++ b/pkgs/shared/dotnet-server-sdk-shared-tests/src/AsyncUtils.cs @@ -0,0 +1,60 @@ +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace LaunchDarkly.Sdk.Server.SharedTests +{ + internal static class AsyncUtils + { + private static readonly TaskFactory _taskFactory = new TaskFactory(CancellationToken.None, + TaskCreationOptions.None, TaskContinuationOptions.None, TaskScheduler.Default); + + // This procedure for blocking on a Task without using Task.Wait is derived from the MIT-licensed ASP.NET + // code here: https://github.com/aspnet/AspNetIdentity/blob/master/src/Microsoft.AspNet.Identity.Core/AsyncHelper.cs + // In general, mixing sync and async code is not recommended, and if done in other ways can result in + // deadlocks. See: https://stackoverflow.com/questions/9343594/how-to-call-asynchronous-method-from-synchronous-method-in-c + // Task.Wait would only be safe if we could guarantee that every intermediate Task within the async + // code had been modified with ConfigureAwait(false), but that is very error-prone and we can't depend + // on feature store implementors doing so. + + internal static void WaitSafely(Func taskFn) + { + _taskFactory.StartNew(taskFn) + .Unwrap() + .GetAwaiter() + .GetResult(); + // Note, GetResult does not throw AggregateException so we don't need to post-process exceptions + } + + internal static bool WaitSafely(Func taskFn, TimeSpan timeout) + { + try + { + return _taskFactory.StartNew(taskFn) + .Unwrap() + .Wait(timeout); + } + catch (AggregateException e) + { + throw UnwrapAggregateException(e); + } + } + + internal static T WaitSafely(Func> taskFn) + { + return _taskFactory.StartNew(taskFn) + .Unwrap() + .GetAwaiter() + .GetResult(); + } + + private static Exception UnwrapAggregateException(AggregateException e) + { + if (e.InnerExceptions.Count == 1) + { + return e.InnerExceptions[0]; + } + return e; + } + } +} diff --git a/pkgs/shared/dotnet-server-sdk-shared-tests/src/BigSegmentStore/BigSegmentStoreBaseTests.cs b/pkgs/shared/dotnet-server-sdk-shared-tests/src/BigSegmentStore/BigSegmentStoreBaseTests.cs new file mode 100644 index 00000000..6f31770b --- /dev/null +++ b/pkgs/shared/dotnet-server-sdk-shared-tests/src/BigSegmentStore/BigSegmentStoreBaseTests.cs @@ -0,0 +1,149 @@ +using System.Threading.Tasks; +using LaunchDarkly.Logging; +using LaunchDarkly.Sdk.Server.Subsystems; +using Xunit; +using Xunit.Abstractions; + +using static LaunchDarkly.Sdk.Server.Subsystems.BigSegmentStoreTypes; + +namespace LaunchDarkly.Sdk.Server.SharedTests.BigSegmentStore +{ + /// + /// A configurable Xunit test class for all implementations of IBigSegmentStore. + /// + /// + /// + /// Each implementation of those interfaces should define a test class that is a subclass of this + /// class for their implementation type, and run it in the unit tests for their project. + /// + /// + /// You must override the property to provide details specific to + /// your implementation type. + /// + /// + public abstract class BigSegmentStoreBaseTests + { + /// + /// Override this method to create the configuration for the test suite. + /// + protected abstract BigSegmentStoreTestConfig Configuration { get; } + + private const string prefix = "testprefix"; + private const string fakeUserHash = "userhash"; + private const string segmentRef1 = "key1", segmentRef2 = "key2", segmentRef3 = "key3"; + private static readonly string[] allSegmentRefs = new string[] { segmentRef1, segmentRef2, segmentRef3 }; + + private readonly ILogAdapter _testLogging; + + protected BigSegmentStoreBaseTests() + { + _testLogging = Logs.None; + } + + protected BigSegmentStoreBaseTests(ITestOutputHelper testOutput) + { + _testLogging = TestLogging.TestOutputAdapter(testOutput); + } + + private IBigSegmentStore MakeStore() + { + var context = new LdClientContext("sdk-key", null, null, + Components.HttpConfiguration().Build(new LdClientContext("sdk-key")), _testLogging.Logger(""), false, null); + return Configuration.StoreFactoryFunc(prefix).Build(context); + } + + private async Task MakeEmptyStore() + { + var store = MakeStore(); + try + { + await Configuration.ClearDataAction(prefix); + } + catch + { + store.Dispose(); + throw; + } + return store; + } + + [Fact] + public async void MissingMetadata() + { + using (var store = await MakeEmptyStore()) + { + Assert.Null(await store.GetMetadataAsync()); + } + } + + [Fact] + public async void ValidMetadata() + { + using (var store = await MakeEmptyStore()) + { + var metadata = new StoreMetadata { LastUpToDate = UnixMillisecondTime.Now }; + await Configuration.SetMetadataAction(prefix, metadata); + + var result = await store.GetMetadataAsync(); + Assert.NotNull(result); + Assert.Equal(metadata.LastUpToDate, result.Value.LastUpToDate); + } + } + + [Fact] + public async void MembershipNotFound() + { + using (var store = await MakeEmptyStore()) + { + var membership = await store.GetMembershipAsync(fakeUserHash); + + // Either null or an empty membership is allowed in this case + if (membership != null) + { + AssertEqualMembership(NewMembershipFromSegmentRefs(null, null), membership); + } + } + } + + [Theory] + [InlineData(new string[] { segmentRef1 }, new string[] { })] + [InlineData(new string[] { segmentRef1, segmentRef2 }, new string[] { })] + [InlineData(new string[] { }, new string[] { segmentRef1 })] + [InlineData(new string[] { }, new string[] { segmentRef1, segmentRef2 })] + [InlineData(new string[] { segmentRef1, segmentRef2 }, new string[] { segmentRef2, segmentRef3 })] + public async void MembershipFound(string[] includes, string[] excludes) + { + using (var store = await MakeEmptyStore()) + { + await Configuration.SetSegmentsAction(prefix, fakeUserHash, includes, excludes); + + var membership = await store.GetMembershipAsync(fakeUserHash); + + AssertEqualMembership(NewMembershipFromSegmentRefs(includes, excludes), membership); + } + } + + private static void AssertEqualMembership(IMembership expected, IMembership actual) + { + if (actual.GetType().FullName.StartsWith("LaunchDarkly.Sdk.Server.Internal.BigSegments.MembershipBuilder")) + { + // The store implementation is using our standard membership types, so we can rely on the + // standard equality test for those + Assert.Equal(expected, actual); + } + else + { + // The store implementation has implemented IMembership in some other way, so we have to + // check for the inclusion or exclusion of specific keys + foreach (var segmentRef in allSegmentRefs) + { + if (actual.CheckMembership(segmentRef) != expected.CheckMembership(segmentRef)) + { + Assert.True(false, string.Format("expected membership for {0} to be {1} but was {2}", + segmentRef, expected.CheckMembership(segmentRef), actual.CheckMembership(segmentRef))); + } + } + } + } + } +} diff --git a/pkgs/shared/dotnet-server-sdk-shared-tests/src/BigSegmentStore/BigSegmentStoreTestConfig.cs b/pkgs/shared/dotnet-server-sdk-shared-tests/src/BigSegmentStore/BigSegmentStoreTestConfig.cs new file mode 100644 index 00000000..9e764b67 --- /dev/null +++ b/pkgs/shared/dotnet-server-sdk-shared-tests/src/BigSegmentStore/BigSegmentStoreTestConfig.cs @@ -0,0 +1,67 @@ +using System.Collections.Generic; +using System.Threading.Tasks; +using LaunchDarkly.Sdk.Server.Subsystems; + +using static LaunchDarkly.Sdk.Server.Subsystems.BigSegmentStoreTypes; + +namespace LaunchDarkly.Sdk.Server.SharedTests.BigSegmentStore +{ + /// + /// A function that takes a prefix string and returns a configured factory for + /// your implementation of IBigSegmentStore. + /// + /// + /// If the prefix string is null or "", it should use the default prefix defined by the + /// data store implementation. The factory must include any necessary configuration that + /// may be appropriate for the test environment (for instance, pointing it to a database + /// instance that has been set up for the tests). + /// + /// the database prefix + /// a configured factory + public delegate IComponentConfigurer StoreFactoryFunc(string prefix); + + /// + /// An asynchronous function that removes all data from the underlying + /// data store for the specified prefix string. + /// + /// the database prefix + /// an asynchronous task + public delegate Task ClearDataAction(string prefix); + + /// + /// An asynchronous function that updates the store metadata to the specified values. + /// This must be provided separately by the test code because the store interface used by + /// the SDK has no update methods. + /// + /// the database prefix + /// the data to write to the store + /// an asynchronous task + public delegate Task SetMetadataAction(string prefix, StoreMetadata metadata); + + /// + /// An asynchronous function that updates the membership state for a user in the store. + /// This must be provided separately by the test code because the store interface used by + /// the SDK has no update methods. + /// + /// the database prefix + /// the hashed user key + /// segment references to be included + /// segment references to be excluded + /// an asynchronous task + public delegate Task SetSegmentsAction(string prefix, string userHashKey, + IEnumerable includedSegmentRefs, IEnumerable excludedSegmentRefs); + + /// + /// Configuration for . + /// + public sealed class BigSegmentStoreTestConfig + { + public StoreFactoryFunc StoreFactoryFunc { get; set; } + + public ClearDataAction ClearDataAction { get; set; } + + public SetMetadataAction SetMetadataAction { get; set; } + + public SetSegmentsAction SetSegmentsAction { get; set; } + } +} diff --git a/pkgs/shared/dotnet-server-sdk-shared-tests/src/DataStore/DataBuilder.cs b/pkgs/shared/dotnet-server-sdk-shared-tests/src/DataStore/DataBuilder.cs new file mode 100644 index 00000000..26d1fcd0 --- /dev/null +++ b/pkgs/shared/dotnet-server-sdk-shared-tests/src/DataStore/DataBuilder.cs @@ -0,0 +1,44 @@ +using System; +using System.Collections.Generic; +using System.Linq; + +using static LaunchDarkly.Sdk.Server.Subsystems.DataStoreTypes; + +namespace LaunchDarkly.Sdk.Server.SharedTests.DataStore +{ + /// + /// Simplifies building the input parameter for a data store's Init method. + /// + public class DataBuilder + { + private readonly IDictionary> _data = + new Dictionary>(); + + public DataBuilder Add(DataKind kind, params TestEntity[] items) + { + IDictionary itemsDict; + if (!_data.TryGetValue(kind, out itemsDict)) + { + itemsDict = new Dictionary(); + _data[kind] = itemsDict; + } + foreach (var item in items) + { + itemsDict[item.Key] = item; + } + return this; + } + + public FullDataSet BuildSerialized() + { + return new FullDataSet( + _data.ToDictionary(kv => kv.Key, + kv => new KeyedItems( + kv.Value.ToDictionary(kv1 => kv1.Key, + kv1 => kv1.Value.SerializedItemDescriptor + ) + ) + )); + } + } +} diff --git a/pkgs/shared/dotnet-server-sdk-shared-tests/src/DataStore/FlagTestData.cs b/pkgs/shared/dotnet-server-sdk-shared-tests/src/DataStore/FlagTestData.cs new file mode 100644 index 00000000..60894bfd --- /dev/null +++ b/pkgs/shared/dotnet-server-sdk-shared-tests/src/DataStore/FlagTestData.cs @@ -0,0 +1,77 @@ +using System.Collections.Generic; + +using static LaunchDarkly.Sdk.Server.Subsystems.DataStoreTypes; + +namespace LaunchDarkly.Sdk.Server.SharedTests.DataStore +{ + public static class FlagTestData + { + public const string FlagKey = "flagkey", SegmentKey = "segmentkey", + UserKey = "userkey", OtherUserKey = "otheruser"; + + public static readonly LdValue GoodValue1 = LdValue.Of("good"), + GoodValue2 = LdValue.Of("better"), BadValue = LdValue.Of("bad"); + + public const int GoodVariation1 = 0, GoodVariation2 = 1, BadVariation = 2; + + public static readonly Context MainUser = Context.New(UserKey), + OtherUser = Context.New(OtherUserKey); + + public static ItemDescriptor MakeFlagThatReturnsVariationForSegmentMatch(int version, int variation) + { + var flagJson = LdValue.BuildObject() + .Add("key", FlagKey) + .Add("version", version) + .Add("on", true) + .Add("variations", LdValue.ArrayOf(GoodValue1, GoodValue2, BadValue)) + .Add("fallthrough", LdValue.BuildObject().Add("variation", BadVariation).Build()) + .Add("rules", LdValue.BuildArray() + .Add(LdValue.BuildObject() + .Add("variation", variation) + .Add("clauses", LdValue.BuildArray() + .Add(LdValue.BuildObject() + .Add("attribute", "") + .Add("op", "segmentMatch") + .Add("values", LdValue.ArrayOf(LdValue.Of(SegmentKey))) + .Build()) + .Build()) + .Build()) + .Build()) + .Build().ToJsonString(); + return DataModel.Features.Deserialize(flagJson); + } + + public static ItemDescriptor MakeSegmentThatMatchesUserKeys(int version, params string[] keys) + { + var segmentJson = LdValue.BuildObject() + .Add("key", SegmentKey) + .Add("version", version) + .Add("included", LdValue.Convert.String.ArrayFrom(keys)) + .Build().ToJsonString(); + return DataModel.Segments.Deserialize(segmentJson); + } + + public static FullDataSet MakeFullDataSet(ItemDescriptor flag, ItemDescriptor segment) + { + return new FullDataSet( + new Dictionary> + { + { + DataModel.Features, + new KeyedItems( + new Dictionary + { { FlagKey, flag } } + ) + }, + { + DataModel.Segments, + new KeyedItems( + new Dictionary + { { SegmentKey, segment } } + ) + }, + } + ); + } + } +} diff --git a/pkgs/shared/dotnet-server-sdk-shared-tests/src/DataStore/PersistentDataStoreBaseTests.cs b/pkgs/shared/dotnet-server-sdk-shared-tests/src/DataStore/PersistentDataStoreBaseTests.cs new file mode 100644 index 00000000..0d5b61fd --- /dev/null +++ b/pkgs/shared/dotnet-server-sdk-shared-tests/src/DataStore/PersistentDataStoreBaseTests.cs @@ -0,0 +1,607 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using LaunchDarkly.Logging; +using LaunchDarkly.Sdk.Server.Subsystems; +using Xunit; +using Xunit.Abstractions; + +using static LaunchDarkly.Sdk.Server.Subsystems.DataStoreTypes; + +namespace LaunchDarkly.Sdk.Server.SharedTests.DataStore +{ + /// + /// A configurable Xunit test class for all implementations of IPersistentDataStore. + /// + /// + /// + /// Each implementation of those interfaces should define a test class that is a subclass of this + /// class for their implementation type, and run it in the unit tests for their project. + /// + /// + /// In order to be testable with this class, a data store implementation must have the following + /// characteristics: + /// + /// + /// It has some notion of a "prefix" string that can be used to distinguish between different + /// SDK instances using the same underlying database. + /// Two instances of the same data store type with the same configuration, and the same prefix, + /// should be able to see each other's data. + /// + /// + /// You must override the property to provide details specific to + /// your implementation type. + /// + /// + public abstract class PersistentDataStoreBaseTests + { + // Note that we don't reference the actual type of the store object within this test code; + // it can't be provided as a generic type parameter because it is likely to be internal or + // private, and you can't derive a public test class from a class that has a non-public + // type parameter. And, it could either be an IPersistentDataStore or an IPersistentDataStoreAsync. + // So we refer to it as the only lowest common denominator we know: IDisposable. + + /// + /// Override this method to create the configuration for the test suite. + /// + protected abstract PersistentDataStoreTestConfig Configuration { get; } + + private readonly TestEntity item1 = new TestEntity("first", 5, "value1"); + private readonly TestEntity item2 = new TestEntity("second", 5, "value2"); + private readonly TestEntity other1 = new TestEntity("third", 5, "othervalue1"); + private readonly string unusedKey = "whatever"; + private readonly ILogAdapter _testLogging; + + protected PersistentDataStoreBaseTests() + { + _testLogging = Logs.None; + } + + protected PersistentDataStoreBaseTests(ITestOutputHelper testOutput) + { + _testLogging = TestLogging.TestOutputAdapter(testOutput); + } + + [Fact] + public async void StoreNotInitializedBeforeInit() + { + await ClearAllData(); + using (var store = CreateStoreImpl()) + { + Assert.False(await Initialized(store)); + } + } + + [Fact] + public async void OneInstanceCanDetectIfAnotherInstanceHasInitializedStore() + { + await ClearAllData(); + using (var store1 = CreateStoreImpl()) + { + await Init(store1, new DataBuilder().Add(TestEntity.Kind, item1).BuildSerialized()); + + using (var store2 = CreateStoreImpl()) + { + Assert.True(await Initialized(store2)); + } + } + } + + [Fact] + public async void StoreInitializedAfterInit() + { + await ClearAllData(); + using (var store = CreateStoreImpl()) + { + await Init(store, new DataBuilder().BuildSerialized()); + Assert.True(await Initialized(store)); + } + } + + [Fact] + public async void InitCompletelyReplacesExistingData() + { + await ClearAllData(); + using (var store = CreateStoreImpl()) + { + var allData = new DataBuilder() + .Add(TestEntity.Kind, item1, item2) + .Add(TestEntity.OtherKind, other1) + .BuildSerialized(); + await Init(store, allData); + + var item2v2 = item2.NextVersion(); + var data2 = new DataBuilder() + .Add(TestEntity.Kind, item2v2) + .Add(TestEntity.OtherKind) + .BuildSerialized(); + await Init(store, data2); + + Assert.Null(await Get(store, TestEntity.Kind, item1.Key)); + AssertEqualsSerializedItem(item2v2, await Get(store, TestEntity.Kind, item2.Key)); + Assert.Null(await Get(store, TestEntity.OtherKind, other1.Key)); + } + } + + [Fact] + public async void GetExistingItem() + { + await ClearAllData(); + using (var store = CreateStoreImpl()) + { + await Init(store, new DataBuilder().Add(TestEntity.Kind, item1, item2).BuildSerialized()); + AssertEqualsSerializedItem(item1, await Get(store, TestEntity.Kind, item1.Key)); + } + } + + [Fact] + public async void GetNonexistingItem() + { + await ClearAllData(); + using (var store = CreateStoreImpl()) + { + await Init(store, new DataBuilder().Add(TestEntity.Kind, item1, item2).BuildSerialized()); + Assert.Null(await Get(store, TestEntity.Kind, unusedKey)); + } + } + + [Fact] + public async void GetAllItems() + { + await ClearAllData(); + using (var store = CreateStoreImpl()) + { + await Init(store, new DataBuilder().Add(TestEntity.Kind, item1, item2) + .Add(TestEntity.OtherKind, other1).BuildSerialized()); + var result = await GetAll(store, TestEntity.Kind); + AssertSerializedItemsCollection(result, item1, item2); + } + } + + [Fact] + public async void GetAllWithDeletedItem() + { + await ClearAllData(); + using (var store = CreateStoreImpl()) + { + var deletedItem = new TestEntity(unusedKey, 1, null); + await Init(store, new DataBuilder().Add(TestEntity.Kind, item1, item2, deletedItem) + .Add(TestEntity.OtherKind, other1).BuildSerialized()); + var result = await GetAll(store, TestEntity.Kind); + AssertSerializedItemsCollection(result, item1, item2, deletedItem); + } + } + + [Fact] + public async void UpsertWithNewerVersion() + { + await ClearAllData(); + using (var store = CreateStoreImpl()) + { + await Init(store, new DataBuilder().Add(TestEntity.Kind, item1, item2).BuildSerialized()); + var newer = item1.NextVersion(); + await Upsert(store, TestEntity.Kind, item1.Key, newer.SerializedItemDescriptor); + AssertEqualsSerializedItem(newer, await Get(store, TestEntity.Kind, item1.Key)); + } + } + + [Fact] + public async void UpsertWithSameVersion() + { + await ClearAllData(); + using (var store = CreateStoreImpl()) + { + await Init(store, new DataBuilder().Add(TestEntity.Kind, item1, item2).BuildSerialized()); + var sameVersionDifferentValue = item1.WithValue("modified"); + await Upsert(store, TestEntity.Kind, item1.Key, sameVersionDifferentValue.SerializedItemDescriptor); + AssertEqualsSerializedItem(item1, await Get(store, TestEntity.Kind, item1.Key)); + } + } + + [Fact] + public async void UpsertWithOlderVersion() + { + await ClearAllData(); + using (var store = CreateStoreImpl()) + { + await Init(store, new DataBuilder().Add(TestEntity.Kind, item1, item2).BuildSerialized()); + var older = item1.WithVersion(item1.Version - 1); + await Upsert(store, TestEntity.Kind, item1.Key, older.SerializedItemDescriptor); + AssertEqualsSerializedItem(item1, await Get(store, TestEntity.Kind, item1.Key)); + } + } + + [Fact] + public async void UpsertNewItem() + { + await ClearAllData(); + using (var store = CreateStoreImpl()) + { + await Init(store, new DataBuilder().Add(TestEntity.Kind, item1, item2).BuildSerialized()); + var newItem = new TestEntity(unusedKey, 1, "newvalue"); + await Upsert(store, TestEntity.Kind, unusedKey, newItem.SerializedItemDescriptor); + AssertEqualsSerializedItem(newItem, await Get(store, TestEntity.Kind, newItem.Key)); + } + } + + [Fact] + public async void DeleteWithNewerVersion() + { + await ClearAllData(); + using (var store = CreateStoreImpl()) + { + await Init(store, new DataBuilder().Add(TestEntity.Kind, item1, item2).BuildSerialized()); + var deletedItem = new TestEntity(item1.Key, item1.Version + 1, null); + await Upsert(store, TestEntity.Kind, item1.Key, deletedItem.SerializedItemDescriptor); + AssertEqualsSerializedItem(deletedItem, await Get(store, TestEntity.Kind, item1.Key)); + } + } + + [Fact] + public async void DeleteWithSameVersion() + { + await ClearAllData(); + using (var store = CreateStoreImpl()) + { + await Init(store, new DataBuilder().Add(TestEntity.Kind, item1, item2).BuildSerialized()); + var deletedItem = new TestEntity(item1.Key, item1.Version, null); + await Upsert(store, TestEntity.Kind, item1.Key, deletedItem.SerializedItemDescriptor); + AssertEqualsSerializedItem(item1, await Get(store, TestEntity.Kind, item1.Key)); + } + } + + [Fact] + public async void DeleteWithOlderVersion() + { + await ClearAllData(); + using (var store = CreateStoreImpl()) + { + await Init(store, new DataBuilder().Add(TestEntity.Kind, item1, item2).BuildSerialized()); + var deletedItem = new TestEntity(item1.Key, item1.Version - 1, null); + await Upsert(store, TestEntity.Kind, item1.Key, deletedItem.SerializedItemDescriptor); + AssertEqualsSerializedItem(item1, await Get(store, TestEntity.Kind, item1.Key)); + } + } + + [Fact] + public async void DeleteUnknownItem() + { + await ClearAllData(); + using (var store = CreateStoreImpl()) + { + await Init(store, new DataBuilder().Add(TestEntity.Kind, item1).BuildSerialized()); + var deletedItem = new TestEntity(unusedKey, 99, null); + await Upsert(store, TestEntity.Kind, unusedKey, deletedItem.SerializedItemDescriptor); + AssertEqualsSerializedItem(deletedItem, await Get(store, TestEntity.Kind, unusedKey)); + } + } + + [Fact] + public async void StoresWithDifferentPrefixAreIndependent() + { + // The prefix parameter, if supported, is a namespace for all of a store's data, + // so that it won't interfere with data from some other instance with a different + // prefix. This test verifies that Init, Get, All, and Upsert are all respecting + // the prefix. + await ClearAllData("aaa"); + await ClearAllData("bbb"); + + using (var store1 = CreateStoreImpl("aaa")) + { + using (var store2 = CreateStoreImpl("bbb")) + { + Assert.False(await Initialized(store1)); + Assert.False(await Initialized(store2)); + + var store1Item1 = new TestEntity("a", 1, "1a"); + var store1Item2 = new TestEntity("b", 1, "1b"); + var store1Item3 = new TestEntity("c", 1, "1c"); + var store2Item1 = new TestEntity("a", 99, "2a"); + var store2Item2 = new TestEntity("bb", 1, "2b"); // skipping key "b" validates that store2.Init doesn't delete store1's "b" key + var store2Item3 = new TestEntity("c", 2, "2c"); + await Init(store1, new DataBuilder().Add(TestEntity.Kind, store1Item1, store1Item2).BuildSerialized()); + await Init(store2, new DataBuilder().Add(TestEntity.Kind, store2Item1, store2Item2).BuildSerialized()); + await Upsert(store1, TestEntity.Kind, store1Item3.Key, store1Item3.SerializedItemDescriptor); + await Upsert(store2, TestEntity.Kind, store2Item3.Key, store2Item3.SerializedItemDescriptor); + + var items1 = await GetAll(store1, TestEntity.Kind); + AssertSerializedItemsCollection(items1, store1Item1, store1Item2, store1Item3); + var items2 = await GetAll(store2, TestEntity.Kind); + AssertSerializedItemsCollection(items2, store2Item1, store2Item2, store2Item3); + } + } + } + + [Fact] + public async void UpsertRaceConditionAgainstOtherClientWithLowerVersion() + { + if (Configuration.SetConcurrentModificationHookAction is null) + { + return; + } + + var key = "key"; + int startVersion = 1, store2VersionStart = 2, store2VersionEnd = 4, store1VersionEnd = 10; + var startItem = new TestEntity(key, startVersion, "value1"); + + using (var store2 = CreateStoreImpl()) + { + int versionCounter = store2VersionStart; + Action concurrentModifier = () => + { + if (versionCounter <= store2VersionEnd) + { + AsyncUtils.WaitSafely(() => Upsert(store2, TestEntity.Kind, key, + startItem.WithVersion(versionCounter).WithValue("value" + versionCounter).SerializedItemDescriptor)); + versionCounter++; + } + }; + + var store1 = CreateStoreImplWithUpdateHook(concurrentModifier); + await Init(store1, new DataBuilder().Add(TestEntity.Kind, startItem).BuildSerialized()); + + var endItem = startItem.WithVersion(store1VersionEnd).WithValue("value" + store1VersionEnd); + await Upsert(store1, TestEntity.Kind, key, endItem.SerializedItemDescriptor); + + AssertEqualsSerializedItem(endItem, await Get(store1, TestEntity.Kind, key)); + } + } + + [Fact] + public async void UpsertRaceConditionAgainstOtherClientWithHigherVersion() + { + if (Configuration.SetConcurrentModificationHookAction is null) + { + return; + } + + var key = "key"; + int startVersion = 1, higherVersion = 3, attemptedVersion = 2; + var startItem = new TestEntity(key, startVersion, "value1"); + var higherItem = startItem.WithVersion(higherVersion).WithValue("value" + higherVersion); + + using (var store2 = CreateStoreImpl()) + { + Action concurrentModifier = () => + { + AsyncUtils.WaitSafely(() => Upsert(store2, TestEntity.Kind, key, + higherItem.SerializedItemDescriptor)); + }; + + var store1 = CreateStoreImplWithUpdateHook(concurrentModifier); + await Init(store1, new DataBuilder().Add(TestEntity.Kind, startItem).BuildSerialized()); + + var attemptedItem = startItem.WithVersion(attemptedVersion); + await Upsert(store1, TestEntity.Kind, key, attemptedItem.SerializedItemDescriptor); + + AssertEqualsSerializedItem(higherItem, await Get(store1, TestEntity.Kind, key)); + } + } + + [Fact] + public void LdClientEndToEndTests() + { + // This is a basic smoke test to verify that the data store component behaves correctly within an + // SDK client instance. + + var flag = FlagTestData.MakeFlagThatReturnsVariationForSegmentMatch(1, FlagTestData.GoodVariation1); + var segment = FlagTestData.MakeSegmentThatMatchesUserKeys(1, FlagTestData.UserKey); + var data = FlagTestData.MakeFullDataSet(flag, segment); + var dataSourceFactory = new TestDataSourceFactory(data); + + var clientConfig = LaunchDarkly.Sdk.Server.Configuration.Builder("sdk-key") + .DataSource(dataSourceFactory) + .Events(Components.NoEvents) + .Logging(Components.Logging(_testLogging)); + + if (Configuration.StoreFactoryFunc != null) + { + clientConfig.DataStore(Components.PersistentDataStore(Configuration.StoreFactoryFunc(null))); + } + else if (Configuration.StoreAsyncFactoryFunc != null) + { + clientConfig.DataStore(Components.PersistentDataStore(Configuration.StoreAsyncFactoryFunc(null))); + } + else + { + throw new InvalidOperationException("neither StoreFactoryFunc nor StoreAsyncFactoryFunc was set"); + } + + using (var client = new LdClient(clientConfig.Build())) + { + var dataSourceUpdates = dataSourceFactory._updates; + + Action flagShouldHaveValueForUser = (user, value) => + Assert.Equal(value, client.JsonVariation(FlagTestData.FlagKey, user, LdValue.Null)); + + // evaluate each flag from the data store + flagShouldHaveValueForUser(FlagTestData.MainUser, FlagTestData.GoodValue1); + flagShouldHaveValueForUser(FlagTestData.OtherUser, FlagTestData.BadValue); + + // evaluate all flags + var state = client.AllFlagsState(FlagTestData.MainUser); + Assert.Equal(FlagTestData.GoodValue1, state.GetFlagValueJson(FlagTestData.FlagKey)); + + // update the flag + var flagV2 = FlagTestData.MakeFlagThatReturnsVariationForSegmentMatch(2, FlagTestData.GoodVariation2); + dataSourceUpdates.Upsert(DataModel.Features, FlagTestData.FlagKey, flagV2); + + // flag should now return new value + flagShouldHaveValueForUser(FlagTestData.MainUser, FlagTestData.GoodValue2); + flagShouldHaveValueForUser(FlagTestData.OtherUser, FlagTestData.BadValue); + + // update the segment so it now matches both users + var segmentV2 = FlagTestData.MakeSegmentThatMatchesUserKeys(2, + FlagTestData.UserKey, FlagTestData.OtherUserKey); + dataSourceUpdates.Upsert(DataModel.Segments, FlagTestData.SegmentKey, segmentV2); + + flagShouldHaveValueForUser(FlagTestData.MainUser, FlagTestData.GoodValue2); + flagShouldHaveValueForUser(FlagTestData.OtherUser, FlagTestData.GoodValue2); + + // delete the segment - should cause the flag that uses it to stop matching + dataSourceUpdates.Upsert(DataModel.Segments, FlagTestData.SegmentKey, ItemDescriptor.Deleted(3)); + flagShouldHaveValueForUser(FlagTestData.MainUser, FlagTestData.BadValue); + flagShouldHaveValueForUser(FlagTestData.OtherUser, FlagTestData.BadValue); + + // delete the flag so it becomes unknown + dataSourceUpdates.Upsert(DataModel.Features, FlagTestData.FlagKey, ItemDescriptor.Deleted(3)); + var detail = client.JsonVariationDetail(FlagTestData.FlagKey, FlagTestData.MainUser, LdValue.Null); + Assert.Equal(EvaluationReason.ErrorReason(EvaluationErrorKind.FlagNotFound), detail.Reason); + } + } + + private IDisposable CreateStoreImpl(string prefix = null) + { + var context = new LdClientContext("sdk-key"); + if (Configuration.StoreFactoryFunc != null) + { + return Configuration.StoreFactoryFunc(prefix).Build(context); + } + if (Configuration.StoreAsyncFactoryFunc != null) + { + return Configuration.StoreAsyncFactoryFunc(prefix).Build(context); + } + throw new InvalidOperationException("neither StoreFactoryFunc nor StoreAsyncFactoryFunc was set"); + } + + private IDisposable CreateStoreImplWithUpdateHook(Action hook) + { + var store = CreateStoreImpl(); + Configuration.SetConcurrentModificationHookAction(store, hook); + return store; + } + + private Task ClearAllData(string prefix = null) + { + if (Configuration.ClearDataAction is null) + { + throw new InvalidOperationException("configuration did not specify ClearDataAction"); + } + return Configuration.ClearDataAction(prefix); + } + + private static async Task Initialized(IDisposable store) + { + if (store is IPersistentDataStore syncStore) + { + return syncStore.Initialized(); + } + return await (store as IPersistentDataStoreAsync).InitializedAsync(); + } + + private static async Task Init(IDisposable store, FullDataSet allData) + { + if (store is IPersistentDataStore syncStore) + { + syncStore.Init(allData); + } + else + { + await (store as IPersistentDataStoreAsync).InitAsync(allData); + } + } + + private static async Task Get(IDisposable store, DataKind kind, string key) + { + if (store is IPersistentDataStore syncStore) + { + return syncStore.Get(kind, key); + } + return await (store as IPersistentDataStoreAsync).GetAsync(kind, key); + } + + private static async Task> GetAll(IDisposable store, DataKind kind) + { + if (store is IPersistentDataStore syncStore) + { + return syncStore.GetAll(kind); + } + return await (store as IPersistentDataStoreAsync).GetAllAsync(kind); + } + + private static async Task Upsert(IDisposable store, DataKind kind, string key, SerializedItemDescriptor item) + { + if (store is IPersistentDataStore syncStore) + { + return syncStore.Upsert(kind, key, item); + } + return await (store as IPersistentDataStoreAsync).UpsertAsync(kind, key, item); + } + + private static void AssertEqualsSerializedItem(TestEntity item, SerializedItemDescriptor? serializedItemDesc) + { + // This allows for the fact that a PersistentDataStore may not be able to get the item version without + // deserializing it, so we allow the version to be zero. Also, there are two ways a store can return a + // deleted item, depending on its ability to persist metadata: either Deleted is true, in which case + // it doesn't matter what SerializedItem is, or else SerializedItem contains whatever placeholder + // string the DataKind uses to denote deleted items. + Assert.NotNull(serializedItemDesc); + if (serializedItemDesc.Value.Version != 0) + { + Assert.Equal(item.Version, serializedItemDesc.Value.Version); + } + if (serializedItemDesc.Value.Deleted) + { + Assert.True(item.Deleted); + } + else + { + Assert.Equal(item.SerializedItemDescriptor.SerializedItem, serializedItemDesc.Value.SerializedItem); + } + } + + private static void AssertSerializedItemsCollection(KeyedItems serializedItems, params TestEntity[] expectedItems) + { + var sortedItems = serializedItems.Items.OrderBy(kv => kv.Key); + Assert.Collection(sortedItems, + expectedItems.Select>>(item => + kv => + { + Assert.Equal(item.Key, kv.Key); + AssertEqualsSerializedItem(item, kv.Value); + } + ).ToArray() + ); + } + + private class TestDataSourceFactory : IComponentConfigurer + { + private readonly FullDataSet _data; + internal IDataSourceUpdates _updates; + + internal TestDataSourceFactory(FullDataSet data) + { + _data = data; + } + + public IDataSource Build(LdClientContext context) + { + _updates = context.DataSourceUpdates; + return new TestDataSource(_data, context.DataSourceUpdates); + } + } + + private class TestDataSource : IDataSource + { + private readonly FullDataSet _data; + private readonly IDataSourceUpdates _updates; + + internal TestDataSource(FullDataSet data, IDataSourceUpdates updates) + { + _data = data; + _updates = updates; + } + + public void Dispose() { } + + public bool Initialized => true; + + public Task Start() + { + _updates.Init(_data); + return Task.FromResult(true); + } + } + } +} diff --git a/pkgs/shared/dotnet-server-sdk-shared-tests/src/DataStore/PersistentDataStoreTestConfig.cs b/pkgs/shared/dotnet-server-sdk-shared-tests/src/DataStore/PersistentDataStoreTestConfig.cs new file mode 100644 index 00000000..5113c200 --- /dev/null +++ b/pkgs/shared/dotnet-server-sdk-shared-tests/src/DataStore/PersistentDataStoreTestConfig.cs @@ -0,0 +1,59 @@ +using System; +using System.Threading.Tasks; +using LaunchDarkly.Sdk.Server.Subsystems; + +namespace LaunchDarkly.Sdk.Server.SharedTests.DataStore +{ + /// + /// Configuration for . + /// + /// either IPersistentDataStore or IPersistentDataStoreAsync + public sealed class PersistentDataStoreTestConfig + { + /// + /// Set this to a function that takes a prefix string and returns a configured factory for + /// your implementation of IPersistentDataStore. If you implemented + /// IPersistentDataStoreAsync instead, then use StoreAsyncFactoryFunc. + /// + /// + /// If the prefix string is null or "", it should use the default prefix defined by the + /// data store implementation. The factory must include any necessary configuration that + /// may be appropriate for the test environment (for instance, pointing it to a database + /// instance that has been set up for the tests). + /// + public Func> StoreFactoryFunc { get; set; } + + /// + /// Set this to a function that takes a prefix string and returns a configured factory for + /// your implementation of IPersistentDataStoreAsync. If you implemented + /// IPersistentDataStore instead, then use StoreFactoryFunc. + /// + /// + /// If the prefix string is null or "", it should use the default prefix defined by the + /// data store implementation. The factory must include any necessary configuration that + /// may be appropriate for the test environment (for instance, pointing it to a database + /// instance that has been set up for the tests). + /// + public Func> StoreAsyncFactoryFunc { get; set; } + + /// + /// Set this to an asynchronous function that removes all data from the underlying + /// data store for the specified prefix string. + /// + public Func ClearDataAction { get; set; } + + /// + /// Set this to enable tests of concurrent modification behavior, for store implementations + /// that support testing this; otherwise leave it null. + /// + /// + /// The function should take two parameters: an instance of your store type (typed here as + /// object because the actual implementation type is unknown to the tests), and a hook + /// which is a synchronous Action. Your function should modify the store instance + /// so that it will call the hook synchronously during each Upsert operation -- + /// after the old value has been read, but before the new one has been written (if those + /// operations are not done atomically). + /// + public Action SetConcurrentModificationHookAction { get; set; } + } +} diff --git a/pkgs/shared/dotnet-server-sdk-shared-tests/src/DataStore/TestEntity.cs b/pkgs/shared/dotnet-server-sdk-shared-tests/src/DataStore/TestEntity.cs new file mode 100644 index 00000000..d58ebed0 --- /dev/null +++ b/pkgs/shared/dotnet-server-sdk-shared-tests/src/DataStore/TestEntity.cs @@ -0,0 +1,84 @@ +using static LaunchDarkly.Sdk.Server.Subsystems.DataStoreTypes; + +namespace LaunchDarkly.Sdk.Server.SharedTests.DataStore +{ + /// + /// A simple class that stands in for the SDK's data model types such as FeatureFlag. + /// + /// + /// Data stores are tested against this type, instead of the real data model types, to make sure + /// they are using the generic mechanisms provided by the SDK and not anything specific to the + /// currently defined data model types, since the SDK may add to or change those in the future. + /// + public class TestEntity + { + public static readonly DataKind Kind = new DataKind( + "test", + Serialize, + Deserialize + ); + public static readonly DataKind OtherKind = new DataKind( + "other", + Serialize, + Deserialize + ); + + public string Key { get; } + public int Version { get; } + public string Value { get; } + + public bool Deleted => Value is null; + + public TestEntity() { } + + public TestEntity(string key, int version, string value) + { + Key = key; + Version = version; + Value = value; + } + + public TestEntity WithVersion(int newVersion) => + new TestEntity(Key, newVersion, Value); + + public TestEntity WithValue(string newValue) => + new TestEntity(Key, Version, newValue); + + public TestEntity NextVersion() => + WithVersion(Version + 1); + + internal SerializedItemDescriptor SerializedItemDescriptor => + Deleted ? + new SerializedItemDescriptor(Version, true, Serialize(ItemDescriptor.Deleted(Version))) : + new SerializedItemDescriptor(Version, false, Serialize(new ItemDescriptor(Version, this))); + + public override bool Equals(object obj) => + (obj is TestEntity o) && + Key == o.Key && Version == o.Version && Value == o.Value; + + public override int GetHashCode() => + new { Key, Version, Value }.GetHashCode(); + + public static string Serialize(ItemDescriptor item) + { + if (item.Item is null) + { + return "$DELETED:" + item.Version; + } + var e = item.Item as TestEntity; + return e.Key + ":" + e.Version + ":" + e.Value; + } + + public static ItemDescriptor Deserialize(string serialized) + { + var parts = serialized.Split(':'); + var key = parts[0]; + var version = int.Parse(parts[1]); + if (key == "$DELETED") + { + return ItemDescriptor.Deleted(version); + } + return new ItemDescriptor(version, new TestEntity(key, version, parts[2])); + } + } +} diff --git a/pkgs/shared/dotnet-server-sdk-shared-tests/src/LaunchDarkly.ServerSdk.SharedTests.csproj b/pkgs/shared/dotnet-server-sdk-shared-tests/src/LaunchDarkly.ServerSdk.SharedTests.csproj new file mode 100644 index 00000000..dd78456f --- /dev/null +++ b/pkgs/shared/dotnet-server-sdk-shared-tests/src/LaunchDarkly.ServerSdk.SharedTests.csproj @@ -0,0 +1,20 @@ + + + 2.0.0-alpha.2 + LaunchDarkly.ServerSdk.SharedTests + LaunchDarkly.ServerSdk.SharedTests + netstandard2.0;net462 + Library + LaunchDarkly .NET Shared Tests + LaunchDarkly + Copyright 2020 Catamorphic, Co. + Apache-2.0 + LaunchDarkly.Sdk.Server.SharedTests + + + + + + + + diff --git a/pkgs/shared/dotnet-server-sdk-shared-tests/src/TestLogging.cs b/pkgs/shared/dotnet-server-sdk-shared-tests/src/TestLogging.cs new file mode 100644 index 00000000..09f43d98 --- /dev/null +++ b/pkgs/shared/dotnet-server-sdk-shared-tests/src/TestLogging.cs @@ -0,0 +1,33 @@ +using LaunchDarkly.Logging; +using Xunit.Abstractions; + +namespace LaunchDarkly.Sdk.Server.SharedTests +{ + /// + /// Provides integration between LaunchDarkly logging and Xunit test output. + /// + public class TestLogging + { + /// + /// Allows LaunchDarkly log output to be captured in the Xunit test output buffer. + /// + /// + /// Xunit suppresses console output from tests, because tests can run in parallel and + /// so their output could be interleaved and unreadable. Instead, it provides the + /// ITestOutputHelper interface; any test class constructor that declares a + /// parameter of this type will receive an instance of it, and output sent to the + /// interface will be printed along with the test results if the test fails. Calling + /// TestLogging.TestOutputAdapter converts the ITestOutputHelper into + /// the type that is used for LaunchDarkly logging configuration. + /// + /// + /// + /// an ITestOutputHelper provided by Xunit + /// optional text that will be prepended to each log line, to + /// distinguish it from any other kind of output from the test + /// an ILogAdapter for use in an LaunchDarkly SDK + public static ILogAdapter TestOutputAdapter(ITestOutputHelper testOutputHelper, + string prefix = null) => + Logs.ToMethod(line => testOutputHelper.WriteLine((prefix ?? "") + line)); + } +} diff --git a/pkgs/shared/dotnet-server-sdk-shared-tests/test/BigSegmentStore/BigSegmentStoreBaseTestsTest.cs b/pkgs/shared/dotnet-server-sdk-shared-tests/test/BigSegmentStore/BigSegmentStoreBaseTestsTest.cs new file mode 100644 index 00000000..12646216 --- /dev/null +++ b/pkgs/shared/dotnet-server-sdk-shared-tests/test/BigSegmentStore/BigSegmentStoreBaseTestsTest.cs @@ -0,0 +1,108 @@ +using System.Collections.Generic; +using System.Threading.Tasks; +using LaunchDarkly.Sdk.Server.Subsystems; +using Xunit.Abstractions; + +using static LaunchDarkly.Sdk.Server.Subsystems.BigSegmentStoreTypes; + +namespace LaunchDarkly.Sdk.Server.SharedTests.BigSegmentStore +{ + public class BigSegmentStoreBaseTestsTest : BigSegmentStoreBaseTests + { + // This runs BigSegmentStoreBaseTests against a mock store implementation that is known to + // behave as expected, to verify that the test suite logic has the correct expectations. + + protected override BigSegmentStoreTestConfig Configuration => + new BigSegmentStoreTestConfig + { + StoreFactoryFunc = CreateStoreFactory, + ClearDataAction = ClearData, + SetMetadataAction = SetMetadata, + SetSegmentsAction = SetSegments + }; + + private class DataSet { + public StoreMetadata? Metadata = null; + public Dictionary Memberships = new Dictionary(); + } + + private readonly Dictionary _allData = new Dictionary(); + + public BigSegmentStoreBaseTestsTest(ITestOutputHelper testOutput) : base(testOutput) + { + } + + private IComponentConfigurer CreateStoreFactory(string prefix) => + new MockStoreFactory(GetOrCreateDataSet(prefix)); + + private Task ClearData(string prefix) + { + var data = GetOrCreateDataSet(prefix); + data.Metadata = null; + data.Memberships.Clear(); + return Task.CompletedTask; + } + + private Task SetMetadata(string prefix, StoreMetadata metadata) + { + GetOrCreateDataSet(prefix).Metadata = metadata; + return Task.CompletedTask; + } + + private Task SetSegments(string prefix, string userHashKey, + IEnumerable includedSegmentRefs, IEnumerable excludedSegmentRefs) + { + GetOrCreateDataSet(prefix).Memberships[userHashKey] = + NewMembershipFromSegmentRefs(includedSegmentRefs, excludedSegmentRefs); + return Task.CompletedTask; + } + + private DataSet GetOrCreateDataSet(string prefix) + { + if (!_allData.ContainsKey(prefix)) + { + _allData[prefix] = new DataSet(); + } + return _allData[prefix]; + } + + private class MockStoreFactory : IComponentConfigurer + { + private readonly DataSet _data; + + public MockStoreFactory(DataSet data) + { + _data = data; + } + + public IBigSegmentStore Build(LdClientContext context) => + new MockStore(_data); + } + + private class MockStore : IBigSegmentStore + { + private readonly DataSet _data; + + public MockStore(DataSet data) + { + _data = data; + } + + public void Dispose() { } + + public Task GetMembershipAsync(string userHash) + { + if (_data.Memberships.TryGetValue(userHash, out var result)) + { + return Task.FromResult(result); + } + return Task.FromResult((IMembership)null); + } + + public Task GetMetadataAsync() + { + return Task.FromResult(_data.Metadata); + } + } + } +} diff --git a/pkgs/shared/dotnet-server-sdk-shared-tests/test/DataStore/MockAsyncStore.cs b/pkgs/shared/dotnet-server-sdk-shared-tests/test/DataStore/MockAsyncStore.cs new file mode 100644 index 00000000..5b612bb8 --- /dev/null +++ b/pkgs/shared/dotnet-server-sdk-shared-tests/test/DataStore/MockAsyncStore.cs @@ -0,0 +1,43 @@ +using System.Threading.Tasks; +using LaunchDarkly.Sdk.Server.Subsystems; + +using static LaunchDarkly.Sdk.Server.Subsystems.DataStoreTypes; + +namespace LaunchDarkly.Sdk.Server.SharedTests.DataStore +{ + // MockAsyncStore is defined as a simple wrapper around MockSyncStore because we're not trying to + // test any real asynchronous functionality in the data store itself; we're just testing that the + // SDK makes the appropriate calls to the IPersistentDataStoreAsync API. + + public class MockAsyncStore : IPersistentDataStoreAsync + { + private readonly MockSyncStore _syncStore; + + public MockAsyncStore(MockDatabase db, string prefix) + { + _syncStore = new MockSyncStore(db, prefix); + } + + public void Dispose() { } + +#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously + + public async Task GetAsync(DataKind kind, string key) => + _syncStore.Get(kind, key); + + public async Task> GetAllAsync(DataKind kind) => + _syncStore.GetAll(kind); + + public async Task InitAsync(FullDataSet allData) => + _syncStore.Init(allData); + + public async Task InitializedAsync() => _syncStore.Initialized(); + + public async Task IsStoreAvailableAsync() => true; + + public async Task UpsertAsync(DataKind kind, string key, SerializedItemDescriptor item) => + _syncStore.Upsert(kind, key, item); + +#pragma warning restore CS1998 + } +} diff --git a/pkgs/shared/dotnet-server-sdk-shared-tests/test/DataStore/MockDatabase.cs b/pkgs/shared/dotnet-server-sdk-shared-tests/test/DataStore/MockDatabase.cs new file mode 100644 index 00000000..0eb25fb7 --- /dev/null +++ b/pkgs/shared/dotnet-server-sdk-shared-tests/test/DataStore/MockDatabase.cs @@ -0,0 +1,51 @@ +using System.Collections.Generic; + +using static LaunchDarkly.Sdk.Server.Subsystems.DataStoreTypes; + +namespace LaunchDarkly.Sdk.Server.SharedTests.DataStore +{ + public class MockDatabase + { + public static readonly MockDatabase Instance = new MockDatabase(); + + public readonly IDictionary>> _data = + new Dictionary>>(); + + public readonly ISet _inited = new HashSet(); + + private MockDatabase() { } + + public Dictionary> DataForPrefix(string prefix) + { + if (_data.TryGetValue(prefix ?? "", out var ret)) + { + return ret; + } + var d = new Dictionary>(); + _data[prefix ?? ""] = d; + return d; + } + + public Dictionary DataForPrefixAndKind(string prefix, DataKind kind) + { + var dfp = DataForPrefix(prefix); + if (dfp.TryGetValue(kind, out var ret)) + { + return ret; + } + var d = new Dictionary(); + dfp[kind] = d; + return d; + } + + public void Clear(string prefix) + { + _data.Remove(prefix ?? ""); + _inited.Remove(prefix ?? ""); + } + + public bool Inited(string prefix) => _inited.Contains(prefix ?? ""); + + public void SetInited(string prefix) => _inited.Add(prefix ?? ""); + } +} diff --git a/pkgs/shared/dotnet-server-sdk-shared-tests/test/DataStore/MockSyncStore.cs b/pkgs/shared/dotnet-server-sdk-shared-tests/test/DataStore/MockSyncStore.cs new file mode 100644 index 00000000..1dd3a26f --- /dev/null +++ b/pkgs/shared/dotnet-server-sdk-shared-tests/test/DataStore/MockSyncStore.cs @@ -0,0 +1,53 @@ +using System.Collections.Generic; +using LaunchDarkly.Sdk.Server.Subsystems; + +using static LaunchDarkly.Sdk.Server.Subsystems.DataStoreTypes; + +namespace LaunchDarkly.Sdk.Server.SharedTests.DataStore +{ + public class MockSyncStore : IPersistentDataStore + { + private readonly MockDatabase _db; + private readonly string _prefix; + + public MockSyncStore(MockDatabase db, string prefix) + { + _db = db; + _prefix = prefix ?? ""; + } + + public void Dispose() { } + + public SerializedItemDescriptor? Get(DataKind kind, string key) => + _db.DataForPrefixAndKind(_prefix, kind).TryGetValue(key, out var ret) ? + ret : (SerializedItemDescriptor?)null; + + public KeyedItems GetAll(DataKind kind) => + new KeyedItems(_db.DataForPrefixAndKind(_prefix, kind)); + + public void Init(FullDataSet allData) + { + _db.DataForPrefix(_prefix).Clear(); + foreach (var coll in allData.Data) + { + _db.DataForPrefix(_prefix)[coll.Key] = new Dictionary(coll.Value.Items); + } + _db.SetInited(_prefix); + } + + public bool Initialized() => _db.Inited(_prefix); + + public bool IsStoreAvailable() => true; + + public bool Upsert(DataKind kind, string key, SerializedItemDescriptor item) + { + var dict = _db.DataForPrefixAndKind(_prefix, kind); + if (dict.TryGetValue(key, out var oldItem) && oldItem.Version >= item.Version) + { + return false; + } + dict[key] = item; + return true; + } + } +} diff --git a/pkgs/shared/dotnet-server-sdk-shared-tests/test/DataStore/PersistentDataStoreBaseTestsAsyncTest.cs b/pkgs/shared/dotnet-server-sdk-shared-tests/test/DataStore/PersistentDataStoreBaseTestsAsyncTest.cs new file mode 100644 index 00000000..65a95ebd --- /dev/null +++ b/pkgs/shared/dotnet-server-sdk-shared-tests/test/DataStore/PersistentDataStoreBaseTestsAsyncTest.cs @@ -0,0 +1,40 @@ +using System.Threading.Tasks; +using LaunchDarkly.Sdk.Server.Subsystems; +using Xunit; +using Xunit.Abstractions; + +namespace LaunchDarkly.Sdk.Server.SharedTests.DataStore +{ + // This runs PersistentDataStoreBaseTests against a mock store implementation that is known to + // behave as expected, to verify that the test suite logic has the correct expectations. + + [Collection("Sequential")] // don't want this and the other test class to run simultaneously + public class PersistentDataStoreBaseTestsAsyncTest : PersistentDataStoreBaseTests + { + protected override PersistentDataStoreTestConfig Configuration => + new PersistentDataStoreTestConfig + { + StoreAsyncFactoryFunc = CreateStoreFactory, + ClearDataAction = ClearAllData, + }; + + public PersistentDataStoreBaseTestsAsyncTest(ITestOutputHelper testOutput) : base(testOutput) { } + + private IComponentConfigurer CreateStoreFactory(string prefix) => + new MockAsyncStoreFactory { Database = MockDatabase.Instance, Prefix = prefix }; + +#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously + private async Task ClearAllData(string prefix) => + MockDatabase.Instance.Clear(prefix); +#pragma warning restore CS1998 + + private class MockAsyncStoreFactory : IComponentConfigurer + { + internal MockDatabase Database { get; set; } + internal string Prefix { get; set; } + + public IPersistentDataStoreAsync Build(LdClientContext context) => + new MockAsyncStore(Database, Prefix); + } + } +} diff --git a/pkgs/shared/dotnet-server-sdk-shared-tests/test/DataStore/PersistentDataStoreBaseTestsSyncTest.cs b/pkgs/shared/dotnet-server-sdk-shared-tests/test/DataStore/PersistentDataStoreBaseTestsSyncTest.cs new file mode 100644 index 00000000..0d429b96 --- /dev/null +++ b/pkgs/shared/dotnet-server-sdk-shared-tests/test/DataStore/PersistentDataStoreBaseTestsSyncTest.cs @@ -0,0 +1,40 @@ +using System.Threading.Tasks; +using LaunchDarkly.Sdk.Server.Subsystems; +using Xunit; +using Xunit.Abstractions; + +namespace LaunchDarkly.Sdk.Server.SharedTests.DataStore +{ + // This runs PersistentDataStoreBaseTests against a mock store implementation that is known to + // behave as expected, to verify that the test suite logic has the correct expectations. + + [Collection("Sequential")] // don't want this and the other test class to run simultaneously + public class PersistentDataStoreBaseTestsSyncTest : PersistentDataStoreBaseTests + { + protected override PersistentDataStoreTestConfig Configuration => + new PersistentDataStoreTestConfig + { + StoreFactoryFunc = CreateStoreFactory, + ClearDataAction = ClearAllData, + }; + + public PersistentDataStoreBaseTestsSyncTest(ITestOutputHelper testOutput) : base(testOutput) { } + + private IComponentConfigurer CreateStoreFactory(string prefix) => + new MockSyncStoreFactory { Database = MockDatabase.Instance, Prefix = prefix }; + +#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously + private async Task ClearAllData(string prefix) => + MockDatabase.Instance.Clear(prefix); +#pragma warning restore CS1998 + + private class MockSyncStoreFactory : IComponentConfigurer + { + internal MockDatabase Database { get; set; } + internal string Prefix { get; set; } + + public IPersistentDataStore Build(LdClientContext context) => + new MockSyncStore(Database, Prefix); + } + } +} diff --git a/pkgs/shared/dotnet-server-sdk-shared-tests/test/LaunchDarkly.ServerSdk.SharedTests.Tests.csproj b/pkgs/shared/dotnet-server-sdk-shared-tests/test/LaunchDarkly.ServerSdk.SharedTests.Tests.csproj new file mode 100644 index 00000000..2c062013 --- /dev/null +++ b/pkgs/shared/dotnet-server-sdk-shared-tests/test/LaunchDarkly.ServerSdk.SharedTests.Tests.csproj @@ -0,0 +1,16 @@ + + + net8.0 + false + LaunchDarkly.Sdk.Server.SharedTests + + + + + + + + + + + diff --git a/release-please-config.json b/release-please-config.json index 9ab33f20..9318b0fe 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -3,6 +3,27 @@ "separate-pull-requests": true, "include-component-in-tag": true, "packages": { + "pkgs/dotnet-server-sdk-consul": { + "release-type": "simple", + "package-name": "LaunchDarkly.ServerSdk.Consul", + "extra-files": [ + "src/LaunchDarkly.ServerSdk.Consul.csproj" + ] + }, + "pkgs/dotnet-server-sdk-dynamodb": { + "release-type": "simple", + "package-name": "LaunchDarkly.ServerSdk.DynamoDb", + "extra-files": [ + "src/LaunchDarkly.ServerSdk.DynamoDb.csproj" + ] + }, + "pkgs/dotnet-server-sdk-redis": { + "release-type": "simple", + "package-name": "LaunchDarkly.ServerSdk.Redis", + "extra-files": [ + "src/LaunchDarkly.ServerSdk.Redis.csproj" + ] + }, "pkgs/sdk/server": { "package-name": "LaunchDarkly.ServerSdk", "extra-files": [