diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 39058db..1b6cbe6 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -2,17 +2,18 @@ # See LICENSE file for licensing details. name: Tests -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - on: workflow_dispatch: - workflow_call: pull_request: + workflow_call: schedule: - cron: "53 0 * * *" # Daily at 00:53 UTC +env: + JUJU_CHANNEL: "3.6/stable" + MICROK8S_CHANNEL: "1.28-strict/stable" + JUJU_BOOTSTRAP_OPTIONS: "--agent-version 3.6.12" + jobs: lint: name: Lint @@ -24,54 +25,70 @@ jobs: - name: Install tox run: pipx install tox - name: Run linters - run: make lint + run: tox run -e lint - integration-test: + integration-test-terraform: strategy: fail-fast: false - max-parallel: 5 + max-parallel: 2 matrix: - tests: - - tox-environment: integration-bundle - tls: "true" - - tox-environment: integration-e2e - tls: "false" - - tox-environment: integration-e2e - tls: "true" - - tox-environment: integration-e2e-backup - tls: "false" + tox-environment: + - integration-terraform + kraft-mode: + - single + - multi juju: - - snap_channel: "3.4/stable" - agent: "3.4.2" - snap_channel: "3.6/stable" - agent: "3.6.0" - name: ${{ matrix.tests.tox-environment }}_${{ matrix.tests.tls }}_${{ matrix.juju.agent || matrix.juju.snap_channel }} + agent: "3.6.10" + runs-on: [self-hosted, linux, AMD64, X64, xlarge, noble] + name: ${{ matrix.tox-environment }}_${{ matrix.kraft-mode }}_${{ matrix.juju.agent || matrix.juju.snap_channel }} needs: - lint - runs-on: ubuntu-22.04 timeout-minutes: 120 steps: - - name: (GitHub hosted) Free up disk space - timeout-minutes: 5 - run: | - printf '\nDisk usage before cleanup\n' - df --human-readable - # Based on https://github.com/actions/runner-images/issues/2840#issuecomment-790492173 - rm -r /usr/share/dotnet - rm -r /opt/hostedtoolcache/ - printf '\nDisk usage after cleanup\n' - df --human-readable - name: Checkout uses: actions/checkout@v4 - name: Setup operator environment - uses: charmed-kubernetes/actions-operator@main - with: - juju-channel: ${{ matrix.juju.snap_channel }} - bootstrap-options: "--agent-version ${{ matrix.juju.agent }}" - provider: microk8s - channel: 1.28-strict/stable - microk8s-group: snap_microk8s - microk8s-addons: "hostpath-storage dns" + run: | + sudo snap install concierge --classic + # calico has issues with strict confinements, so remove -strict + sudo snap install microk8s --channel ${MICROK8S_CHANNEL//"-strict"/} --classic + sudo snap install juju --channel $JUJU_CHANNEL + + sudo usermod -a -G microk8s $USER + mkdir -p ~/.kube + chmod 0700 ~/.kube + + sudo tee /var/snap/microk8s/current/args/certs.d/docker.io/hosts.toml << EOF + server = "$DOCKERHUB_MIRROR" + [host."${DOCKERHUB_MIRROR#'https://'}"] + capabilities = ["pull", "resolve"] + EOF + + sudo microk8s stop + sudo microk8s start + + # make juju work with microk8s --classic + sudo mkdir -p /var/snap/juju/current/microk8s/credentials + sudo microk8s config | sudo tee /var/snap/juju/current/microk8s/credentials/client.config + sudo chown -R $USER:$USER /var/snap/juju/current/microk8s/credentials + + sudo concierge prepare \ + -p microk8s \ + --microk8s-channel ${MICROK8S_CHANNEL//"-strict"/} \ + --juju-channel $JUJU_CHANNEL \ + --extra-debs pipx + + IP_ADDR=$(ip -4 -j route get 2.2.2.2 | jq -r '.[] | .prefsrc') + sudo microk8s enable dns + sudo microk8s enable hostpath-storage + sudo microk8s enable metallb:$IP_ADDR-$IP_ADDR + + pipx ensurepath + pipx install tox poetry + - name: Install terraform snap + run: | + sudo snap install terraform --channel=latest/stable --classic - name: Select tests id: select-tests run: | @@ -84,17 +101,5 @@ jobs: echo "mark_expression=not unstable" >> $GITHUB_OUTPUT fi - name: Run integration tests - run: | - if [[ "${{ matrix.tests.tls }}" == "true" ]]; then - FLAGS="--tls" - else - FLAGS="" - fi - - make build TLS=${{ matrix.tests.tls }} - echo "======================" - echo "Bundle being deployed:" - echo "======================" - cat build/bundle.yaml - echo "======================" - tox run -e ${{ matrix.tests.tox-environment }} -- -m '${{ steps.select-tests.outputs.mark_expression }}' $FLAGS + run: | + tox run -e ${{ matrix.tox-environment }} -- -m '${{ steps.select-tests.outputs.mark_expression }}' --keep-models --kraft-mode=${{ matrix.kraft-mode }} diff --git a/.github/workflows/ci_e2e.yaml b/.github/workflows/ci_e2e.yaml deleted file mode 100644 index b208151..0000000 --- a/.github/workflows/ci_e2e.yaml +++ /dev/null @@ -1,97 +0,0 @@ -# Copyright 2023 Canonical Ltd. -# See LICENSE file for licensing details. -name: Tests (End-to-End) - -on: - pull_request: - workflow_dispatch: - workflow_call: - schedule: - - cron: "53 0 * * SAT" # Every Saturday at 00:53 UTC - -jobs: - lint: - name: Lint - runs-on: ubuntu-22.04 - timeout-minutes: 5 - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Install tox - run: pipx install tox - - name: Run linters - run: make lint - - e2e-tests: - strategy: - fail-fast: false - max-parallel: 10 - matrix: - tox-environments: - - integration-e2e-basic-flow - - integration-e2e-password-rotation - tls: - - false - - true - options: - - - - --integrator - juju: - - snap_channel: "3.4/stable" - agent: "3.4.2" - - snap_channel: "3.6/stable" - agent: "3.6.0" - name: ${{ matrix.tox-environments }}_${{matrix.tls}}_${{ matrix.juju.agent || matrix.juju.snap_channel }} - needs: - - lint - # runs-on: ["self-hosted", "linux", "X64", "jammy", "large"] - runs-on: ubuntu-22.04 - timeout-minutes: 120 - steps: - - name: (GitHub hosted) Free up disk space - timeout-minutes: 5 - run: | - printf '\nDisk usage before cleanup\n' - df --human-readable - # Based on https://github.com/actions/runner-images/issues/2840#issuecomment-790492173 - rm -r /usr/share/dotnet - rm -r /opt/hostedtoolcache/ - printf '\nDisk usage after cleanup\n' - df --human-readable - - name: Checkout - uses: actions/checkout@v4 - - name: Setup operator environment - uses: charmed-kubernetes/actions-operator@main - with: - juju-channel: ${{ matrix.juju.snap_channel }} - bootstrap-options: "--agent-version ${{ matrix.juju.agent }}" - provider: microk8s - channel: 1.28-strict/stable - microk8s-group: snap_microk8s - microk8s-addons: "hostpath-storage dns" - - name: Select tests - id: select-tests - run: | - if [ "${{ github.event_name }}" == "schedule" ] - then - echo Running unstable and stable tests - echo "mark_expression=" >> $GITHUB_OUTPUT - else - echo Skipping unstable tests - echo "mark_expression=not unstable" >> $GITHUB_OUTPUT - fi - - name: Run integration e2e tests - run: | - if [[ "${{ matrix.tls }}" == "true" ]]; then - FLAGS="${{matrix.options}} --tls" - else - FLAGS="${{matrix.options}}" - fi - - make build TLS=${{ matrix.tls }} - echo "======================" - echo "Bundle being deployed:" - echo "======================" - cat build/bundle.yaml - echo "======================" - tox run -e ${{ matrix.tox-environments }} -- -m '${{ steps.select-tests.outputs.mark_expression }}' $FLAGS \ No newline at end of file diff --git a/.github/workflows/on_bundle_update_available.yaml b/.github/workflows/on_bundle_update_available.yaml deleted file mode 100644 index 7a24019..0000000 --- a/.github/workflows/on_bundle_update_available.yaml +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright 2023 Canonical Ltd. -# See LICENSE file for licensing details. -name: Update bundle -on: - schedule: - - cron: '53 0 * * *' # Daily at 00:53 UTC - workflow_dispatch: - -jobs: - update-bundle: - name: Update bundle - uses: canonical/data-platform-workflows/.github/workflows/update_bundle.yaml@v5 - with: - path-to-bundle-file: releases/3/kafka-k8s/bundle.yaml - reviewers: marcoppenheimer,deusebio - secrets: - token: ${{ secrets.PAT }} - - diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml deleted file mode 100644 index 3d1e84d..0000000 --- a/.github/workflows/release.yaml +++ /dev/null @@ -1,30 +0,0 @@ -name: Release to 3/edge - -on: - push: - branches: - - main - -jobs: - integration-test: - uses: ./.github/workflows/ci.yaml - publish-k8s-bundle: - name: Publish K8s bundle - runs-on: ubuntu-22.04 - timeout-minutes: 5 - needs: - - integration-test - env: - CHARMCRAFT_AUTH: ${{ secrets.CHARMHUB_TOKEN }} - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - name: Install tox - run: pipx install tox - - name: Install dependencies - run: sudo snap install charmcraft --classic - - name: Pack and publish bundle - run: | - make release diff --git a/.gitignore b/.gitignore index b44de0e..5ced4fd 100644 --- a/.gitignore +++ b/.gitignore @@ -7,3 +7,6 @@ build/ parts/ prime/ stage/ +*.tfstate* +.terraform* + diff --git a/poetry.lock b/poetry.lock index bbe3880..31dd50a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,68 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. - -[[package]] -name = "asttokens" -version = "3.0.0" -description = "Annotate AST trees with source code positions" -optional = false -python-versions = ">=3.8" -files = [ - {file = "asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2"}, - {file = "asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7"}, -] - -[package.extras] -astroid = ["astroid (>=2,<4)"] -test = ["astroid (>=2,<4)", "pytest", "pytest-cov", "pytest-xdist"] - -[[package]] -name = "backports-strenum" -version = "1.3.1" -description = "Base class for creating enumerated constants that are also subclasses of str" -optional = false -python-versions = ">=3.8.6,<3.11" -files = [ - {file = "backports_strenum-1.3.1-py3-none-any.whl", hash = "sha256:cdcfe36dc897e2615dc793b7d3097f54d359918fc448754a517e6f23044ccf83"}, - {file = "backports_strenum-1.3.1.tar.gz", hash = "sha256:77c52407342898497714f0596e86188bb7084f89063226f4ba66863482f42414"}, -] - -[[package]] -name = "bcrypt" -version = "4.2.1" -description = "Modern password hashing for your software and your servers" -optional = false -python-versions = ">=3.7" -files = [ - {file = "bcrypt-4.2.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:1340411a0894b7d3ef562fb233e4b6ed58add185228650942bdc885362f32c17"}, - {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1ee315739bc8387aa36ff127afc99120ee452924e0df517a8f3e4c0187a0f5f"}, - {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dbd0747208912b1e4ce730c6725cb56c07ac734b3629b60d4398f082ea718ad"}, - {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:aaa2e285be097050dba798d537b6efd9b698aa88eef52ec98d23dcd6d7cf6fea"}, - {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:76d3e352b32f4eeb34703370e370997065d28a561e4a18afe4fef07249cb4396"}, - {file = "bcrypt-4.2.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:b7703ede632dc945ed1172d6f24e9f30f27b1b1a067f32f68bf169c5f08d0425"}, - {file = "bcrypt-4.2.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:89df2aea2c43be1e1fa066df5f86c8ce822ab70a30e4c210968669565c0f4685"}, - {file = "bcrypt-4.2.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:04e56e3fe8308a88b77e0afd20bec516f74aecf391cdd6e374f15cbed32783d6"}, - {file = "bcrypt-4.2.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:cfdf3d7530c790432046c40cda41dfee8c83e29482e6a604f8930b9930e94139"}, - {file = "bcrypt-4.2.1-cp37-abi3-win32.whl", hash = "sha256:adadd36274510a01f33e6dc08f5824b97c9580583bd4487c564fc4617b328005"}, - {file = "bcrypt-4.2.1-cp37-abi3-win_amd64.whl", hash = "sha256:8c458cd103e6c5d1d85cf600e546a639f234964d0228909d8f8dbeebff82d526"}, - {file = "bcrypt-4.2.1-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:8ad2f4528cbf0febe80e5a3a57d7a74e6635e41af1ea5675282a33d769fba413"}, - {file = "bcrypt-4.2.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:909faa1027900f2252a9ca5dfebd25fc0ef1417943824783d1c8418dd7d6df4a"}, - {file = "bcrypt-4.2.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cde78d385d5e93ece5479a0a87f73cd6fa26b171c786a884f955e165032b262c"}, - {file = "bcrypt-4.2.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:533e7f3bcf2f07caee7ad98124fab7499cb3333ba2274f7a36cf1daee7409d99"}, - {file = "bcrypt-4.2.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:687cf30e6681eeda39548a93ce9bfbb300e48b4d445a43db4298d2474d2a1e54"}, - {file = "bcrypt-4.2.1-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:041fa0155c9004eb98a232d54da05c0b41d4b8e66b6fc3cb71b4b3f6144ba837"}, - {file = "bcrypt-4.2.1-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f85b1ffa09240c89aa2e1ae9f3b1c687104f7b2b9d2098da4e923f1b7082d331"}, - {file = "bcrypt-4.2.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c6f5fa3775966cca251848d4d5393ab016b3afed251163c1436fefdec3b02c84"}, - {file = "bcrypt-4.2.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:807261df60a8b1ccd13e6599c779014a362ae4e795f5c59747f60208daddd96d"}, - {file = "bcrypt-4.2.1-cp39-abi3-win32.whl", hash = "sha256:b588af02b89d9fad33e5f98f7838bf590d6d692df7153647724a7f20c186f6bf"}, - {file = "bcrypt-4.2.1-cp39-abi3-win_amd64.whl", hash = "sha256:e84e0e6f8e40a242b11bce56c313edc2be121cec3e0ec2d76fce01f6af33c07c"}, - {file = "bcrypt-4.2.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:76132c176a6d9953cdc83c296aeaed65e1a708485fd55abf163e0d9f8f16ce0e"}, - {file = "bcrypt-4.2.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e158009a54c4c8bc91d5e0da80920d048f918c61a581f0a63e4e93bb556d362f"}, - {file = "bcrypt-4.2.1.tar.gz", hash = "sha256:6765386e3ab87f569b276988742039baab087b2cdb01e809d74e74503c2faafe"}, -] - -[package.extras] -tests = ["pytest (>=3.2.1,!=3.3.0)"] -typecheck = ["mypy"] +# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. [[package]] name = "black" @@ -70,6 +6,7 @@ version = "22.12.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.7" +groups = ["fmt", "lint"] files = [ {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, @@ -104,6 +41,7 @@ version = "1.36.20" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" +groups = ["integration"] files = [ {file = "boto3-1.36.20-py3-none-any.whl", hash = "sha256:e132e31232ee107f1c187f566d96863a907433e5bdd8d8928effddd30a96242f"}, {file = "boto3-1.36.20.tar.gz", hash = "sha256:4a27ffc0543c2a429600542047f00c6a1e95270139d36d8cc636e9cc9a78b835"}, @@ -123,6 +61,7 @@ version = "1.36.19" description = "Type annotations for boto3 1.36.19 generated with mypy-boto3-builder 8.9.0" optional = false python-versions = ">=3.8" +groups = ["integration"] files = [ {file = "boto3_stubs-1.36.19-py3-none-any.whl", hash = "sha256:1bd23cd3a9f7bd313123a3f90ce5da452d9b3782f4a72b40eace6201671b6094"}, {file = "boto3_stubs-1.36.19.tar.gz", hash = "sha256:644e661c5dad882b31a0691d8d5274c99db8d79221d32e9d7c8d077a7fa38834"}, @@ -549,6 +488,7 @@ version = "1.36.20" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" +groups = ["integration"] files = [ {file = "botocore-1.36.20-py3-none-any.whl", hash = "sha256:0110bf2208e4569659d0ccfca94baa4999501334397987b02712a94493cbf48b"}, {file = "botocore-1.36.20.tar.gz", hash = "sha256:3815a05518ff03a8dbc8d5a3c29b95889409a25ac87a282067f6e26fefb7c40a"}, @@ -568,6 +508,7 @@ version = "1.36.18" description = "Type annotations and code completion for botocore" optional = false python-versions = ">=3.8" +groups = ["integration"] files = [ {file = "botocore_stubs-1.36.18-py3-none-any.whl", hash = "sha256:1dac8d9527a57a6f322e4db7533cb9247798791cbcc3d3ee847f354d68eca870"}, {file = "botocore_stubs-1.36.18.tar.gz", hash = "sha256:6d21b2fdf375fab25c403fee63ee31b02fa72253709ce06460933a869160fd32"}, @@ -579,23 +520,13 @@ types-awscrt = "*" [package.extras] botocore = ["botocore"] -[[package]] -name = "cachetools" -version = "5.5.1" -description = "Extensible memoizing collections and decorators" -optional = false -python-versions = ">=3.7" -files = [ - {file = "cachetools-5.5.1-py3-none-any.whl", hash = "sha256:b76651fdc3b24ead3c648bbdeeb940c1b04d365b38b4af66788f9ec4a81d42bb"}, - {file = "cachetools-5.5.1.tar.gz", hash = "sha256:70f238fbba50383ef62e55c6aff6d9673175fe59f7c6782c7a0b9e38f4a9df95"}, -] - [[package]] name = "certifi" version = "2025.1.31" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["integration"] files = [ {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, @@ -607,6 +538,8 @@ version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" +groups = ["integration"] +markers = "platform_python_implementation != \"PyPy\"" files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, @@ -686,6 +619,7 @@ version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" +groups = ["integration"] files = [ {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, @@ -787,6 +721,7 @@ version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" +groups = ["fmt", "lint"] files = [ {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, @@ -801,6 +736,7 @@ version = "2.4.1" description = "Fix common misspellings in text files" optional = false python-versions = ">=3.8" +groups = ["lint"] files = [ {file = "codespell-2.4.1-py3-none-any.whl", hash = "sha256:3dadafa67df7e4a3dbf51e0d7315061b80d265f9552ebd699b3dd6834b47e425"}, {file = "codespell-2.4.1.tar.gz", hash = "sha256:299fcdcb09d23e81e35a671bbe746d5ad7e8385972e65dbb833a2eaac33c01e5"}, @@ -809,7 +745,7 @@ files = [ [package.extras] dev = ["Pygments", "build", "chardet", "pre-commit", "pytest", "pytest-cov", "pytest-dependency", "ruff", "tomli", "twine"] hard-encoding-detection = ["chardet"] -toml = ["tomli"] +toml = ["tomli ; python_version < \"3.11\""] types = ["chardet (>=5.1.0)", "mypy", "pytest", "pytest-cov", "pytest-dependency"] [[package]] @@ -818,10 +754,12 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["fmt", "integration", "lint", "unit"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +markers = {fmt = "platform_system == \"Windows\"", integration = "sys_platform == \"win32\"", lint = "platform_system == \"Windows\"", unit = "sys_platform == \"win32\""} [[package]] name = "coverage" @@ -829,6 +767,7 @@ version = "7.6.12" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" +groups = ["integration", "unit"] files = [ {file = "coverage-7.6.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:704c8c8c6ce6569286ae9622e534b4f5b9759b6f2cd643f1c1a61f666d534fe8"}, {file = "coverage-7.6.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ad7525bf0241e5502168ae9c643a2f6c219fa0a283001cee4cf23a9b7da75879"}, @@ -899,7 +838,7 @@ files = [ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] -toml = ["tomli"] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "cryptography" @@ -907,6 +846,7 @@ version = "44.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = "!=3.9.0,!=3.9.1,>=3.7" +groups = ["integration"] files = [ {file = "cryptography-44.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf688f615c29bfe9dfc44312ca470989279f0e94bb9f631f85e3459af8efc009"}, {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd7c7e2d71d908dc0f8d2027e1604102140d84b155e658c20e8ad1304317691f"}, @@ -945,32 +885,22 @@ files = [ cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0)"] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0) ; python_version >= \"3.8\""] docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] -nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2)"] -pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] +nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_version >= \"3.8\""] +pep8test = ["check-sdist ; python_version >= \"3.8\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] sdist = ["build (>=1.0.0)"] ssh = ["bcrypt (>=3.1.5)"] test = ["certifi (>=2024)", "cryptography-vectors (==44.0.1)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] test-randomorder = ["pytest-randomly"] -[[package]] -name = "decorator" -version = "5.1.1" -description = "Decorators for Humans" -optional = false -python-versions = ">=3.5" -files = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, -] - [[package]] name = "dnspython" version = "2.7.0" description = "DNS toolkit" optional = false python-versions = ">=3.9" +groups = ["integration"] files = [ {file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"}, {file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"}, @@ -991,6 +921,8 @@ version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" +groups = ["integration", "unit"] +markers = "python_version == \"3.10\"" files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, @@ -999,67 +931,13 @@ files = [ [package.extras] test = ["pytest (>=6)"] -[[package]] -name = "executing" -version = "2.2.0" -description = "Get the currently executing AST node of a frame, and other information" -optional = false -python-versions = ">=3.8" -files = [ - {file = "executing-2.2.0-py2.py3-none-any.whl", hash = "sha256:11387150cad388d62750327a53d3339fad4888b39a6fe233c3afbb54ecffd3aa"}, - {file = "executing-2.2.0.tar.gz", hash = "sha256:5d108c028108fe2551d1a7b2e8b713341e2cb4fc0aa7dcf966fa4327a5226755"}, -] - -[package.extras] -tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] - -[[package]] -name = "google-auth" -version = "2.38.0" -description = "Google Authentication Library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google_auth-2.38.0-py2.py3-none-any.whl", hash = "sha256:e7dae6694313f434a2727bf2906f27ad259bae090d7aa896590d86feec3d9d4a"}, - {file = "google_auth-2.38.0.tar.gz", hash = "sha256:8285113607d3b80a3f1543b75962447ba8a09fe85783432a784fdeef6ac094c4"}, -] - -[package.dependencies] -cachetools = ">=2.0.0,<6.0" -pyasn1-modules = ">=0.2.1" -rsa = ">=3.1.4,<5" - -[package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] -enterprise-cert = ["cryptography", "pyopenssl"] -pyjwt = ["cryptography (>=38.0.3)", "pyjwt (>=2.0)"] -pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] -reauth = ["pyu2f (>=0.1.5)"] -requests = ["requests (>=2.20.0,<3.0.0.dev0)"] - -[[package]] -name = "hvac" -version = "2.3.0" -description = "HashiCorp Vault API client" -optional = false -python-versions = "<4.0,>=3.8" -files = [ - {file = "hvac-2.3.0-py3-none-any.whl", hash = "sha256:a3afc5710760b6ee9b3571769df87a0333da45da05a5f9f963e1d3925a84be7d"}, - {file = "hvac-2.3.0.tar.gz", hash = "sha256:1b85e3320e8642dd82f234db63253cda169a817589e823713dc5fca83119b1e2"}, -] - -[package.dependencies] -requests = ">=2.27.1,<3.0.0" - -[package.extras] -parser = ["pyhcl (>=0.4.4,<0.5.0)"] - [[package]] name = "idna" version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" +groups = ["integration"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -1074,90 +952,19 @@ version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" +groups = ["integration", "unit"] files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] -[[package]] -name = "ipdb" -version = "0.13.13" -description = "IPython-enabled pdb" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "ipdb-0.13.13-py3-none-any.whl", hash = "sha256:45529994741c4ab6d2388bfa5d7b725c2cf7fe9deffabdb8a6113aa5ed449ed4"}, - {file = "ipdb-0.13.13.tar.gz", hash = "sha256:e3ac6018ef05126d442af680aad863006ec19d02290561ac88b8b1c0b0cfc726"}, -] - -[package.dependencies] -decorator = {version = "*", markers = "python_version > \"3.6\""} -ipython = {version = ">=7.31.1", markers = "python_version > \"3.6\""} -tomli = {version = "*", markers = "python_version > \"3.6\" and python_version < \"3.11\""} - -[[package]] -name = "ipython" -version = "8.32.0" -description = "IPython: Productive Interactive Computing" -optional = false -python-versions = ">=3.10" -files = [ - {file = "ipython-8.32.0-py3-none-any.whl", hash = "sha256:cae85b0c61eff1fc48b0a8002de5958b6528fa9c8defb1894da63f42613708aa"}, - {file = "ipython-8.32.0.tar.gz", hash = "sha256:be2c91895b0b9ea7ba49d33b23e2040c352b33eb6a519cca7ce6e0c743444251"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -decorator = "*" -exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} -jedi = ">=0.16" -matplotlib-inline = "*" -pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} -prompt_toolkit = ">=3.0.41,<3.1.0" -pygments = ">=2.4.0" -stack_data = "*" -traitlets = ">=5.13.0" -typing_extensions = {version = ">=4.6", markers = "python_version < \"3.12\""} - -[package.extras] -all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] -black = ["black"] -doc = ["docrepr", "exceptiongroup", "intersphinx_registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli", "typing_extensions"] -kernel = ["ipykernel"] -matplotlib = ["matplotlib"] -nbconvert = ["nbconvert"] -nbformat = ["nbformat"] -notebook = ["ipywidgets", "notebook"] -parallel = ["ipyparallel"] -qtconsole = ["qtconsole"] -test = ["packaging", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"] -test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] - -[[package]] -name = "jedi" -version = "0.19.2" -description = "An autocompletion tool for Python that can be used for text editors." -optional = false -python-versions = ">=3.6" -files = [ - {file = "jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9"}, - {file = "jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0"}, -] - -[package.dependencies] -parso = ">=0.8.4,<0.9.0" - -[package.extras] -docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] -qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["Django", "attrs", "colorama", "docopt", "pytest (<9.0.0)"] - [[package]] name = "jinja2" version = "3.1.5" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" +groups = ["render"] files = [ {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, @@ -1175,6 +982,7 @@ version = "0.8.2" description = "A CLI interface to Jinja2" optional = false python-versions = "*" +groups = ["render"] files = [ {file = "jinja2-cli-0.8.2.tar.gz", hash = "sha256:a16bb1454111128e206f568c95938cdef5b5a139929378f72bb8cf6179e18e50"}, {file = "jinja2_cli-0.8.2-py2.py3-none-any.whl", hash = "sha256:b91715c79496beaddad790171e7258a87db21c1a0b6d2b15bca3ba44b74aac5d"}, @@ -1195,40 +1003,26 @@ version = "1.0.1" description = "JSON Matching Expressions" optional = false python-versions = ">=3.7" +groups = ["integration"] files = [ {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, ] [[package]] -name = "juju" -version = "3.6.1.0" -description = "Python library for Juju" +name = "jubilant" +version = "1.6.2" +description = "Juju CLI wrapper, primarily for charm integration testing" optional = false -python-versions = ">=3.8.6" +python-versions = ">=3.8" +groups = ["integration"] files = [ - {file = "juju-3.6.1.0-py3-none-any.whl", hash = "sha256:28b6a10093f2e0243ad0ddd5ef25a3f59d710e9da5a188456ba704142819fbb3"}, - {file = "juju-3.6.1.0.tar.gz", hash = "sha256:59cfde55185bb53877a2bddc2855f3c48471537e130653d77984681676a448bc"}, + {file = "jubilant-1.6.2-py3-none-any.whl", hash = "sha256:fa81995b64e0519fa59407beabbbc2aae097437895addcb3df54f33ab207fbfb"}, + {file = "jubilant-1.6.2.tar.gz", hash = "sha256:d841610fd86f7d77419da8be08e6c936412e581652f55faa23c6c19536b87a8f"}, ] [package.dependencies] -"backports.strenum" = {version = ">=1.3.1", markers = "python_version < \"3.11\""} -hvac = "*" -kubernetes = ">=12.0.1,<31.0.0" -macaroonbakery = ">=1.1,<2.0" -packaging = "*" -paramiko = ">=2.4.0" -pyasn1 = ">=0.4.4" -pyRFC3339 = ">=1.0,<2.0" -pyyaml = ">=5.1.2" -toposort = ">=1.5,<2" -typing-extensions = ">=4.5.0" -typing_inspect = ">=0.6.0" -websockets = ">=13.0.1" - -[package.extras] -dev = ["Twine", "freezegun", "pytest", "pytest-asyncio", "typing-inspect"] -docs = ["sphinx (==5.3.0)", "sphinx_rtd_theme", "sphinxcontrib-asyncio"] +PyYAML = "==6.*" [[package]] name = "kafka-python" @@ -1236,6 +1030,7 @@ version = "2.0.3" description = "Pure Python client for Apache Kafka" optional = false python-versions = "*" +groups = ["integration"] files = [ {file = "kafka-python-2.0.3.tar.gz", hash = "sha256:9ef48bc21f4ac78d80b9f424e5f0f0cd63a29652f5e5f4369960434806f1bbdd"}, {file = "kafka_python-2.0.3-py2.py3-none-any.whl", hash = "sha256:ff28bee2430359256d947cffaf951b95e9c66697b59af28014d5b28e160a6084"}, @@ -1253,6 +1048,7 @@ version = "2.10.0" description = "\"Higher Level Zookeeper Client\"" optional = false python-versions = "*" +groups = ["integration"] files = [ {file = "kazoo-2.10.0-py2.py3-none-any.whl", hash = "sha256:de2d69168de432ff66b457a26c727a5bf7ff53af5806653fd1df7f04b6a5483c"}, {file = "kazoo-2.10.0.tar.gz", hash = "sha256:905796ae4f4c12bd4e4ae92e6e5d018439e6b56c8cfbb24825362e79b230dab1"}, @@ -1265,60 +1061,16 @@ docs = ["Sphinx (>=1.2.2)", "sphinx-autodoc-typehints (>=1)"] eventlet = ["eventlet (>=0.17.1)"] gevent = ["gevent (>=1.2)"] sasl = ["pure-sasl (>=0.5.1)"] -test = ["eventlet (>=0.17.1)", "gevent (>=1.2)", "objgraph", "pyjks", "pyopenssl", "pytest", "pytest-cov"] +test = ["eventlet (>=0.17.1) ; implementation_name != \"pypy\"", "gevent (>=1.2) ; implementation_name != \"pypy\"", "objgraph", "pyjks", "pyopenssl", "pytest", "pytest-cov"] typing = ["mypy (>=0.991)"] -[[package]] -name = "kubernetes" -version = "30.1.0" -description = "Kubernetes python client" -optional = false -python-versions = ">=3.6" -files = [ - {file = "kubernetes-30.1.0-py2.py3-none-any.whl", hash = "sha256:e212e8b7579031dd2e512168b617373bc1e03888d41ac4e04039240a292d478d"}, - {file = "kubernetes-30.1.0.tar.gz", hash = "sha256:41e4c77af9f28e7a6c314e3bd06a8c6229ddd787cad684e0ab9f69b498e98ebc"}, -] - -[package.dependencies] -certifi = ">=14.05.14" -google-auth = ">=1.0.1" -oauthlib = ">=3.2.2" -python-dateutil = ">=2.5.3" -pyyaml = ">=5.4.1" -requests = "*" -requests-oauthlib = "*" -six = ">=1.9.0" -urllib3 = ">=1.24.2" -websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" - -[package.extras] -adal = ["adal (>=1.0.2)"] - -[[package]] -name = "macaroonbakery" -version = "1.3.4" -description = "A Python library port for bakery, higher level operation to work with macaroons" -optional = false -python-versions = "*" -files = [ - {file = "macaroonbakery-1.3.4-py2.py3-none-any.whl", hash = "sha256:1e952a189f5c1e96ef82b081b2852c770d7daa20987e2088e762dd5689fb253b"}, - {file = "macaroonbakery-1.3.4.tar.gz", hash = "sha256:41ca993a23e4f8ef2fe7723b5cd4a30c759735f1d5021e990770c8a0e0f33970"}, -] - -[package.dependencies] -protobuf = ">=3.20.0" -pymacaroons = ">=0.12.0,<1.0" -PyNaCl = ">=1.1.2,<2.0" -pyRFC3339 = ">=1.0,<2.0" -requests = ">=2.18.1,<3.0" -six = ">=1.11.0,<2.0" - [[package]] name = "markupsafe" version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" +groups = ["render"] files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -1383,26 +1135,13 @@ files = [ {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, ] -[[package]] -name = "matplotlib-inline" -version = "0.1.7" -description = "Inline Matplotlib backend for Jupyter" -optional = false -python-versions = ">=3.8" -files = [ - {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, - {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, -] - -[package.dependencies] -traitlets = "*" - [[package]] name = "mypy-boto3-s3" version = "1.36.15" description = "Type annotations for boto3 S3 1.36.15 service generated with mypy-boto3-builder 8.9.0" optional = false python-versions = ">=3.8" +groups = ["integration"] files = [ {file = "mypy_boto3_s3-1.36.15-py3-none-any.whl", hash = "sha256:a48031b4c84898756f03787baf711cf14cb219b5cb2e433a369c018544bfbffa"}, {file = "mypy_boto3_s3-1.36.15.tar.gz", hash = "sha256:e5400f6a008f746c42bbbe767eb492cec96a211ceb34b6032673aedcd87b3d62"}, @@ -1417,33 +1156,19 @@ version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.5" +groups = ["fmt", "lint"] files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] -[[package]] -name = "oauthlib" -version = "3.2.2" -description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -optional = false -python-versions = ">=3.6" -files = [ - {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, - {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, -] - -[package.extras] -rsa = ["cryptography (>=3.0.0)"] -signals = ["blinker (>=1.4.0)"] -signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] - [[package]] name = "ops" version = "2.18.1" description = "The Python library behind great charms" optional = false python-versions = ">=3.8" +groups = ["integration"] files = [ {file = "ops-2.18.1-py3-none-any.whl", hash = "sha256:ba0312366e25b3ae90cf4b8d0af6ea6b612d4951500f856bce609cdb25c9bdeb"}, {file = "ops-2.18.1.tar.gz", hash = "sha256:5619deb370c00ea851f9579b780a09b88b1a1d020e58e1ed81d31c8fb7b28c8a"}, @@ -1463,78 +1188,31 @@ version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" +groups = ["integration", "unit"] files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] -[[package]] -name = "paramiko" -version = "3.5.1" -description = "SSH2 protocol library" -optional = false -python-versions = ">=3.6" -files = [ - {file = "paramiko-3.5.1-py3-none-any.whl", hash = "sha256:43b9a0501fc2b5e70680388d9346cf252cfb7d00b0667c39e80eb43a408b8f61"}, - {file = "paramiko-3.5.1.tar.gz", hash = "sha256:b2c665bc45b2b215bd7d7f039901b14b067da00f3a11e6640995fd58f2664822"}, -] - -[package.dependencies] -bcrypt = ">=3.2" -cryptography = ">=3.3" -pynacl = ">=1.5" - -[package.extras] -all = ["gssapi (>=1.4.1)", "invoke (>=2.0)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8)"] -gssapi = ["gssapi (>=1.4.1)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8)"] -invoke = ["invoke (>=2.0)"] - -[[package]] -name = "parso" -version = "0.8.4" -description = "A Python Parser" -optional = false -python-versions = ">=3.6" -files = [ - {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, - {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, -] - -[package.extras] -qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["docopt", "pytest"] - [[package]] name = "pathspec" version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.8" +groups = ["fmt", "lint"] files = [ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] -[[package]] -name = "pexpect" -version = "4.9.0" -description = "Pexpect allows easy control of interactive console applications." -optional = false -python-versions = "*" -files = [ - {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, - {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, -] - -[package.dependencies] -ptyprocess = ">=0.5" - [[package]] name = "platformdirs" version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" +groups = ["fmt", "lint"] files = [ {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, @@ -1551,6 +1229,7 @@ version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" +groups = ["integration", "unit"] files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -1560,71 +1239,13 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] -[[package]] -name = "prompt-toolkit" -version = "3.0.50" -description = "Library for building powerful interactive command lines in Python" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "prompt_toolkit-3.0.50-py3-none-any.whl", hash = "sha256:9b6427eb19e479d98acff65196a307c555eb567989e6d88ebbb1b509d9779198"}, - {file = "prompt_toolkit-3.0.50.tar.gz", hash = "sha256:544748f3860a2623ca5cd6d2795e7a14f3d0e1c3c9728359013f79877fc89bab"}, -] - -[package.dependencies] -wcwidth = "*" - -[[package]] -name = "protobuf" -version = "5.29.3" -description = "" -optional = false -python-versions = ">=3.8" -files = [ - {file = "protobuf-5.29.3-cp310-abi3-win32.whl", hash = "sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888"}, - {file = "protobuf-5.29.3-cp310-abi3-win_amd64.whl", hash = "sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a"}, - {file = "protobuf-5.29.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e"}, - {file = "protobuf-5.29.3-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84"}, - {file = "protobuf-5.29.3-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f"}, - {file = "protobuf-5.29.3-cp38-cp38-win32.whl", hash = "sha256:84a57163a0ccef3f96e4b6a20516cedcf5bb3a95a657131c5c3ac62200d23252"}, - {file = "protobuf-5.29.3-cp38-cp38-win_amd64.whl", hash = "sha256:b89c115d877892a512f79a8114564fb435943b59067615894c3b13cd3e1fa107"}, - {file = "protobuf-5.29.3-cp39-cp39-win32.whl", hash = "sha256:0eb32bfa5219fc8d4111803e9a690658aa2e6366384fd0851064b963b6d1f2a7"}, - {file = "protobuf-5.29.3-cp39-cp39-win_amd64.whl", hash = "sha256:6ce8cc3389a20693bfde6c6562e03474c40851b44975c9b2bf6df7d8c4f864da"}, - {file = "protobuf-5.29.3-py3-none-any.whl", hash = "sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f"}, - {file = "protobuf-5.29.3.tar.gz", hash = "sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620"}, -] - -[[package]] -name = "ptyprocess" -version = "0.7.0" -description = "Run a subprocess in a pseudo terminal" -optional = false -python-versions = "*" -files = [ - {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, - {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, -] - -[[package]] -name = "pure-eval" -version = "0.2.3" -description = "Safely evaluate AST nodes without side effects" -optional = false -python-versions = "*" -files = [ - {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, - {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, -] - -[package.extras] -tests = ["pytest"] - [[package]] name = "pure-sasl" version = "0.6.2" description = "Pure Python client SASL implementation" optional = false python-versions = "*" +groups = ["integration"] files = [ {file = "pure-sasl-0.6.2.tar.gz", hash = "sha256:53c1355f5da95e2b85b2cc9a6af435518edc20c81193faa0eea65fdc835138f4"}, {file = "pure_sasl-0.6.2-py2-none-any.whl", hash = "sha256:edb33b1a46eb3c602c0166de0442c0fb41f5ac2bfccbde4775183b105ad89ab2"}, @@ -1633,77 +1254,26 @@ files = [ [package.extras] gssapi = ["kerberos (>=1.3.0)"] -[[package]] -name = "pyasn1" -version = "0.6.1" -description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, - {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, -] - -[[package]] -name = "pyasn1-modules" -version = "0.4.1" -description = "A collection of ASN.1-based protocols modules" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"}, - {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"}, -] - -[package.dependencies] -pyasn1 = ">=0.4.6,<0.7.0" - [[package]] name = "pycparser" version = "2.22" description = "C parser in Python" optional = false python-versions = ">=3.8" +groups = ["integration"] +markers = "platform_python_implementation != \"PyPy\"" files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] -[[package]] -name = "pygments" -version = "2.19.1" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, - {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, -] - -[package.extras] -windows-terminal = ["colorama (>=0.4.6)"] - -[[package]] -name = "pymacaroons" -version = "0.13.0" -description = "Macaroon library for Python" -optional = false -python-versions = "*" -files = [ - {file = "pymacaroons-0.13.0-py2.py3-none-any.whl", hash = "sha256:3e14dff6a262fdbf1a15e769ce635a8aea72e6f8f91e408f9a97166c53b91907"}, - {file = "pymacaroons-0.13.0.tar.gz", hash = "sha256:1e6bba42a5f66c245adf38a5a4006a99dcc06a0703786ea636098667d42903b8"}, -] - -[package.dependencies] -PyNaCl = ">=1.1.2,<2.0" -six = ">=1.8.0" - [[package]] name = "pymongo" version = "4.11.1" description = "Python driver for MongoDB " optional = false python-versions = ">=3.9" +groups = ["integration"] files = [ {file = "pymongo-4.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e596caec72db62a3f438559dfa46d22faefea1967279f553f936ddcb873903df"}, {file = "pymongo-4.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:15a88b25efcd61c5e539e9204932849b20f393efa330771676e860c4466fe8ad"}, @@ -1770,45 +1340,20 @@ dnspython = ">=1.16.0,<3.0.0" [package.extras] aws = ["pymongo-auth-aws (>=1.1.0,<2.0.0)"] docs = ["furo (==2024.8.6)", "readthedocs-sphinx-search (>=0.3,<1.0)", "sphinx (>=5.3,<9)", "sphinx-autobuild (>=2020.9.1)", "sphinx-rtd-theme (>=2,<4)", "sphinxcontrib-shellcheck (>=1,<2)"] -encryption = ["certifi", "pymongo-auth-aws (>=1.1.0,<2.0.0)", "pymongocrypt (>=1.12.0,<2.0.0)"] -gssapi = ["pykerberos", "winkerberos (>=0.5.0)"] -ocsp = ["certifi", "cryptography (>=2.5)", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"] +encryption = ["certifi ; os_name == \"nt\" or sys_platform == \"darwin\"", "pymongo-auth-aws (>=1.1.0,<2.0.0)", "pymongocrypt (>=1.12.0,<2.0.0)"] +gssapi = ["pykerberos ; os_name != \"nt\"", "winkerberos (>=0.5.0) ; os_name == \"nt\""] +ocsp = ["certifi ; os_name == \"nt\" or sys_platform == \"darwin\"", "cryptography (>=2.5)", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"] snappy = ["python-snappy"] test = ["pytest (>=8.2)", "pytest-asyncio (>=0.24.0)"] zstd = ["zstandard"] -[[package]] -name = "pynacl" -version = "1.5.0" -description = "Python binding to the Networking and Cryptography (NaCl) library" -optional = false -python-versions = ">=3.6" -files = [ - {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"}, - {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"}, - {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394"}, - {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d"}, - {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858"}, - {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b"}, - {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff"}, - {file = "PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543"}, - {file = "PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93"}, - {file = "PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba"}, -] - -[package.dependencies] -cffi = ">=1.4.1" - -[package.extras] -docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] -tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] - [[package]] name = "pyopenssl" version = "24.3.0" description = "Python wrapper module around the OpenSSL library" optional = false python-versions = ">=3.7" +groups = ["integration"] files = [ {file = "pyOpenSSL-24.3.0-py3-none-any.whl", hash = "sha256:e474f5a473cd7f92221cc04976e48f4d11502804657a08a989fb3be5514c904a"}, {file = "pyopenssl-24.3.0.tar.gz", hash = "sha256:49f7a019577d834746bc55c5fce6ecbcec0f2b4ec5ce1cf43a9a173b8138bb36"}, @@ -1821,26 +1366,13 @@ cryptography = ">=41.0.5,<45" docs = ["sphinx (!=5.2.0,!=5.2.0.post0,!=7.2.5)", "sphinx_rtd_theme"] test = ["pretend", "pytest (>=3.0.1)", "pytest-rerunfailures"] -[[package]] -name = "pyrfc3339" -version = "1.1" -description = "Generate and parse RFC 3339 timestamps" -optional = false -python-versions = "*" -files = [ - {file = "pyRFC3339-1.1-py2.py3-none-any.whl", hash = "sha256:67196cb83b470709c580bb4738b83165e67c6cc60e1f2e4f286cfcb402a926f4"}, - {file = "pyRFC3339-1.1.tar.gz", hash = "sha256:81b8cbe1519cdb79bed04910dd6fa4e181faf8c88dff1e1b987b5f7ab23a5b1a"}, -] - -[package.dependencies] -pytz = "*" - [[package]] name = "pytest" version = "8.3.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" +groups = ["integration", "unit"] files = [ {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, @@ -1857,30 +1389,13 @@ tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] -[[package]] -name = "pytest-asyncio" -version = "0.21.2" -description = "Pytest support for asyncio" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest_asyncio-0.21.2-py3-none-any.whl", hash = "sha256:ab664c88bb7998f711d8039cacd4884da6430886ae8bbd4eded552ed2004f16b"}, - {file = "pytest_asyncio-0.21.2.tar.gz", hash = "sha256:d67738fc232b94b326b9d060750beb16e0074210b98dd8b58a5239fa2a154f45"}, -] - -[package.dependencies] -pytest = ">=7.0.0" - -[package.extras] -docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] -testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] - [[package]] name = "pytest-microceph" version = "0.1.0" description = "" optional = false python-versions = "^3.8" +groups = ["integration"] files = [] develop = false @@ -1895,50 +1410,13 @@ reference = "v22.0.0" resolved_reference = "da2da4b1e4469b5ed8f9187981fe2d747f8ee129" subdirectory = "python/pytest_plugins/microceph" -[[package]] -name = "pytest-operator" -version = "0.40.0" -description = "Fixtures for Operators" -optional = false -python-versions = "*" -files = [ - {file = "pytest_operator-0.40.0-py3-none-any.whl", hash = "sha256:1cfa93ab61b11e8d7bf58dbb1a39e75fcbfcc084781bb571fde08fda7e236713"}, - {file = "pytest_operator-0.40.0.tar.gz", hash = "sha256:45394ade32b7765b6ba89871b676d1fb8aa7578589f74df26ff0fca4692d1c7b"}, -] - -[package.dependencies] -ipdb = "*" -jinja2 = "*" -juju = "*" -pytest = "*" -pytest-asyncio = "<0.23" -pyyaml = "*" - -[[package]] -name = "pytest-operator-cache" -version = "0.1.0" -description = "" -optional = false -python-versions = "^3.8" -files = [] -develop = false - -[package.dependencies] -pyyaml = "*" - -[package.source] -type = "git" -url = "https://github.com/canonical/data-platform-workflows" -reference = "v22.0.0" -resolved_reference = "da2da4b1e4469b5ed8f9187981fe2d747f8ee129" -subdirectory = "python/pytest_plugins/pytest_operator_cache" - [[package]] name = "python-dateutil" version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["integration"] files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -1947,23 +1425,13 @@ files = [ [package.dependencies] six = ">=1.5" -[[package]] -name = "pytz" -version = "2025.1" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57"}, - {file = "pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e"}, -] - [[package]] name = "pyyaml" version = "6.0.2" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" +groups = ["integration"] files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -2026,6 +1494,7 @@ version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" +groups = ["integration"] files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, @@ -2041,44 +1510,13 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] -[[package]] -name = "requests-oauthlib" -version = "2.0.0" -description = "OAuthlib authentication support for Requests." -optional = false -python-versions = ">=3.4" -files = [ - {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, - {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, -] - -[package.dependencies] -oauthlib = ">=3.0.0" -requests = ">=2.0.0" - -[package.extras] -rsa = ["oauthlib[signedtoken] (>=3.0.0)"] - -[[package]] -name = "rsa" -version = "4.9" -description = "Pure-Python RSA implementation" -optional = false -python-versions = ">=3.6,<4" -files = [ - {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, - {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, -] - -[package.dependencies] -pyasn1 = ">=0.1.3" - [[package]] name = "ruff" version = "0.9.6" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" +groups = ["fmt", "lint"] files = [ {file = "ruff-0.9.6-py3-none-linux_armv6l.whl", hash = "sha256:2f218f356dd2d995839f1941322ff021c72a492c470f0b26a34f844c29cdf5ba"}, {file = "ruff-0.9.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b908ff4df65dad7b251c9968a2e4560836d8f5487c2f0cc238321ed951ea0504"}, @@ -2106,6 +1544,7 @@ version = "0.11.2" description = "An Amazon S3 Transfer Manager" optional = false python-versions = ">=3.8" +groups = ["integration"] files = [ {file = "s3transfer-0.11.2-py3-none-any.whl", hash = "sha256:be6ecb39fadd986ef1701097771f87e4d2f821f27f6071c872143884d2950fbc"}, {file = "s3transfer-0.11.2.tar.gz", hash = "sha256:3b39185cb72f5acc77db1a58b6e25b977f28d20496b6e58d6813d75f464d632f"}, @@ -2123,36 +1562,19 @@ version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["integration"] files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] -[[package]] -name = "stack-data" -version = "0.6.3" -description = "Extract data from python stack frames and tracebacks for informative displays" -optional = false -python-versions = "*" -files = [ - {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, - {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, -] - -[package.dependencies] -asttokens = ">=2.1.0" -executing = ">=1.2.0" -pure-eval = "*" - -[package.extras] -tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] - [[package]] name = "tenacity" version = "9.0.0" description = "Retry code until it succeeds" optional = false python-versions = ">=3.8" +groups = ["integration"] files = [ {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"}, {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"}, @@ -2168,6 +1590,7 @@ version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" +groups = ["fmt", "integration", "lint", "unit"] files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, @@ -2202,32 +1625,7 @@ files = [ {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] - -[[package]] -name = "toposort" -version = "1.10" -description = "Implements a topological sort algorithm." -optional = false -python-versions = "*" -files = [ - {file = "toposort-1.10-py3-none-any.whl", hash = "sha256:cbdbc0d0bee4d2695ab2ceec97fe0679e9c10eab4b2a87a9372b929e70563a87"}, - {file = "toposort-1.10.tar.gz", hash = "sha256:bfbb479c53d0a696ea7402601f4e693c97b0367837c8898bc6471adfca37a6bd"}, -] - -[[package]] -name = "traitlets" -version = "5.14.3" -description = "Traitlets Python configuration system" -optional = false -python-versions = ">=3.8" -files = [ - {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, - {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, -] - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"] +markers = {fmt = "python_full_version < \"3.11.0a7\"", integration = "python_full_version <= \"3.11.0a6\"", lint = "python_full_version < \"3.11.0a7\"", unit = "python_full_version <= \"3.11.0a6\""} [[package]] name = "types-awscrt" @@ -2235,6 +1633,7 @@ version = "0.23.10" description = "Type annotations and code completion for awscrt" optional = false python-versions = ">=3.8" +groups = ["integration"] files = [ {file = "types_awscrt-0.23.10-py3-none-any.whl", hash = "sha256:7391bf502f6093221e68da8fb6a2af7ec67a98d376c58d5b76cc3938f449d121"}, {file = "types_awscrt-0.23.10.tar.gz", hash = "sha256:965659260599b421564204b895467684104a2c0311bbacfd3c2423b8b0d3f3e9"}, @@ -2246,6 +1645,7 @@ version = "0.11.2" description = "Type annotations and code completion for s3transfer" optional = false python-versions = ">=3.8" +groups = ["integration"] files = [ {file = "types_s3transfer-0.11.2-py3-none-any.whl", hash = "sha256:09c31cff8c79a433fcf703b840b66d1f694a6c70c410ef52015dd4fe07ee0ae2"}, {file = "types_s3transfer-0.11.2.tar.gz", hash = "sha256:3ccb8b90b14434af2fb0d6c08500596d93f3a83fb804a2bb843d9bf4f7c2ca60"}, @@ -2257,60 +1657,38 @@ version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["integration"] +markers = "python_version < \"3.12\"" files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] -[[package]] -name = "typing-inspect" -version = "0.9.0" -description = "Runtime inspection utilities for typing module." -optional = false -python-versions = "*" -files = [ - {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, - {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, -] - -[package.dependencies] -mypy-extensions = ">=0.3.0" -typing-extensions = ">=3.7.4" - [[package]] name = "urllib3" version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" +groups = ["integration"] files = [ {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] -[[package]] -name = "wcwidth" -version = "0.2.13" -description = "Measures the displayed width of unicode strings in a terminal" -optional = false -python-versions = "*" -files = [ - {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, - {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, -] - [[package]] name = "websocket-client" version = "1.8.0" description = "WebSocket client for Python with low level API options" optional = false python-versions = ">=3.8" +groups = ["integration"] files = [ {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, @@ -2321,85 +1699,7 @@ docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] optional = ["python-socks", "wsaccel"] test = ["websockets"] -[[package]] -name = "websockets" -version = "14.2" -description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" -optional = false -python-versions = ">=3.9" -files = [ - {file = "websockets-14.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e8179f95323b9ab1c11723e5d91a89403903f7b001828161b480a7810b334885"}, - {file = "websockets-14.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0d8c3e2cdb38f31d8bd7d9d28908005f6fa9def3324edb9bf336d7e4266fd397"}, - {file = "websockets-14.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:714a9b682deb4339d39ffa674f7b674230227d981a37d5d174a4a83e3978a610"}, - {file = "websockets-14.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2e53c72052f2596fb792a7acd9704cbc549bf70fcde8a99e899311455974ca3"}, - {file = "websockets-14.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3fbd68850c837e57373d95c8fe352203a512b6e49eaae4c2f4088ef8cf21980"}, - {file = "websockets-14.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b27ece32f63150c268593d5fdb82819584831a83a3f5809b7521df0685cd5d8"}, - {file = "websockets-14.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4daa0faea5424d8713142b33825fff03c736f781690d90652d2c8b053345b0e7"}, - {file = "websockets-14.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:bc63cee8596a6ec84d9753fd0fcfa0452ee12f317afe4beae6b157f0070c6c7f"}, - {file = "websockets-14.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7a570862c325af2111343cc9b0257b7119b904823c675b22d4ac547163088d0d"}, - {file = "websockets-14.2-cp310-cp310-win32.whl", hash = "sha256:75862126b3d2d505e895893e3deac0a9339ce750bd27b4ba515f008b5acf832d"}, - {file = "websockets-14.2-cp310-cp310-win_amd64.whl", hash = "sha256:cc45afb9c9b2dc0852d5c8b5321759cf825f82a31bfaf506b65bf4668c96f8b2"}, - {file = "websockets-14.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3bdc8c692c866ce5fefcaf07d2b55c91d6922ac397e031ef9b774e5b9ea42166"}, - {file = "websockets-14.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c93215fac5dadc63e51bcc6dceca72e72267c11def401d6668622b47675b097f"}, - {file = "websockets-14.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1c9b6535c0e2cf8a6bf938064fb754aaceb1e6a4a51a80d884cd5db569886910"}, - {file = "websockets-14.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a52a6d7cf6938e04e9dceb949d35fbdf58ac14deea26e685ab6368e73744e4c"}, - {file = "websockets-14.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9f05702e93203a6ff5226e21d9b40c037761b2cfb637187c9802c10f58e40473"}, - {file = "websockets-14.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22441c81a6748a53bfcb98951d58d1af0661ab47a536af08920d129b4d1c3473"}, - {file = "websockets-14.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd9b868d78b194790e6236d9cbc46d68aba4b75b22497eb4ab64fa640c3af56"}, - {file = "websockets-14.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1a5a20d5843886d34ff8c57424cc65a1deda4375729cbca4cb6b3353f3ce4142"}, - {file = "websockets-14.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:34277a29f5303d54ec6468fb525d99c99938607bc96b8d72d675dee2b9f5bf1d"}, - {file = "websockets-14.2-cp311-cp311-win32.whl", hash = "sha256:02687db35dbc7d25fd541a602b5f8e451a238ffa033030b172ff86a93cb5dc2a"}, - {file = "websockets-14.2-cp311-cp311-win_amd64.whl", hash = "sha256:862e9967b46c07d4dcd2532e9e8e3c2825e004ffbf91a5ef9dde519ee2effb0b"}, - {file = "websockets-14.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1f20522e624d7ffbdbe259c6b6a65d73c895045f76a93719aa10cd93b3de100c"}, - {file = "websockets-14.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:647b573f7d3ada919fd60e64d533409a79dcf1ea21daeb4542d1d996519ca967"}, - {file = "websockets-14.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6af99a38e49f66be5a64b1e890208ad026cda49355661549c507152113049990"}, - {file = "websockets-14.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:091ab63dfc8cea748cc22c1db2814eadb77ccbf82829bac6b2fbe3401d548eda"}, - {file = "websockets-14.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b374e8953ad477d17e4851cdc66d83fdc2db88d9e73abf755c94510ebddceb95"}, - {file = "websockets-14.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a39d7eceeea35db85b85e1169011bb4321c32e673920ae9c1b6e0978590012a3"}, - {file = "websockets-14.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0a6f3efd47ffd0d12080594f434faf1cd2549b31e54870b8470b28cc1d3817d9"}, - {file = "websockets-14.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:065ce275e7c4ffb42cb738dd6b20726ac26ac9ad0a2a48e33ca632351a737267"}, - {file = "websockets-14.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e9d0e53530ba7b8b5e389c02282f9d2aa47581514bd6049d3a7cffe1385cf5fe"}, - {file = "websockets-14.2-cp312-cp312-win32.whl", hash = "sha256:20e6dd0984d7ca3037afcb4494e48c74ffb51e8013cac71cf607fffe11df7205"}, - {file = "websockets-14.2-cp312-cp312-win_amd64.whl", hash = "sha256:44bba1a956c2c9d268bdcdf234d5e5ff4c9b6dc3e300545cbe99af59dda9dcce"}, - {file = "websockets-14.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6f1372e511c7409a542291bce92d6c83320e02c9cf392223272287ce55bc224e"}, - {file = "websockets-14.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4da98b72009836179bb596a92297b1a61bb5a830c0e483a7d0766d45070a08ad"}, - {file = "websockets-14.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8a86a269759026d2bde227652b87be79f8a734e582debf64c9d302faa1e9f03"}, - {file = "websockets-14.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86cf1aaeca909bf6815ea714d5c5736c8d6dd3a13770e885aafe062ecbd04f1f"}, - {file = "websockets-14.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9b0f6c3ba3b1240f602ebb3971d45b02cc12bd1845466dd783496b3b05783a5"}, - {file = "websockets-14.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:669c3e101c246aa85bc8534e495952e2ca208bd87994650b90a23d745902db9a"}, - {file = "websockets-14.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:eabdb28b972f3729348e632ab08f2a7b616c7e53d5414c12108c29972e655b20"}, - {file = "websockets-14.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2066dc4cbcc19f32c12a5a0e8cc1b7ac734e5b64ac0a325ff8353451c4b15ef2"}, - {file = "websockets-14.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ab95d357cd471df61873dadf66dd05dd4709cae001dd6342edafc8dc6382f307"}, - {file = "websockets-14.2-cp313-cp313-win32.whl", hash = "sha256:a9e72fb63e5f3feacdcf5b4ff53199ec8c18d66e325c34ee4c551ca748623bbc"}, - {file = "websockets-14.2-cp313-cp313-win_amd64.whl", hash = "sha256:b439ea828c4ba99bb3176dc8d9b933392a2413c0f6b149fdcba48393f573377f"}, - {file = "websockets-14.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7cd5706caec1686c5d233bc76243ff64b1c0dc445339bd538f30547e787c11fe"}, - {file = "websockets-14.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ec607328ce95a2f12b595f7ae4c5d71bf502212bddcea528290b35c286932b12"}, - {file = "websockets-14.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da85651270c6bfb630136423037dd4975199e5d4114cae6d3066641adcc9d1c7"}, - {file = "websockets-14.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3ecadc7ce90accf39903815697917643f5b7cfb73c96702318a096c00aa71f5"}, - {file = "websockets-14.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1979bee04af6a78608024bad6dfcc0cc930ce819f9e10342a29a05b5320355d0"}, - {file = "websockets-14.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dddacad58e2614a24938a50b85969d56f88e620e3f897b7d80ac0d8a5800258"}, - {file = "websockets-14.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:89a71173caaf75fa71a09a5f614f450ba3ec84ad9fca47cb2422a860676716f0"}, - {file = "websockets-14.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:6af6a4b26eea4fc06c6818a6b962a952441e0e39548b44773502761ded8cc1d4"}, - {file = "websockets-14.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:80c8efa38957f20bba0117b48737993643204645e9ec45512579132508477cfc"}, - {file = "websockets-14.2-cp39-cp39-win32.whl", hash = "sha256:2e20c5f517e2163d76e2729104abc42639c41cf91f7b1839295be43302713661"}, - {file = "websockets-14.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4c8cef610e8d7c70dea92e62b6814a8cd24fbd01d7103cc89308d2bfe1659ef"}, - {file = "websockets-14.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:d7d9cafbccba46e768be8a8ad4635fa3eae1ffac4c6e7cb4eb276ba41297ed29"}, - {file = "websockets-14.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:c76193c1c044bd1e9b3316dcc34b174bbf9664598791e6fb606d8d29000e070c"}, - {file = "websockets-14.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd475a974d5352390baf865309fe37dec6831aafc3014ffac1eea99e84e83fc2"}, - {file = "websockets-14.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c6c0097a41968b2e2b54ed3424739aab0b762ca92af2379f152c1aef0187e1c"}, - {file = "websockets-14.2-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d7ff794c8b36bc402f2e07c0b2ceb4a2424147ed4785ff03e2a7af03711d60a"}, - {file = "websockets-14.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dec254fcabc7bd488dab64846f588fc5b6fe0d78f641180030f8ea27b76d72c3"}, - {file = "websockets-14.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:bbe03eb853e17fd5b15448328b4ec7fb2407d45fb0245036d06a3af251f8e48f"}, - {file = "websockets-14.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a3c4aa3428b904d5404a0ed85f3644d37e2cb25996b7f096d77caeb0e96a3b42"}, - {file = "websockets-14.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:577a4cebf1ceaf0b65ffc42c54856214165fb8ceeba3935852fc33f6b0c55e7f"}, - {file = "websockets-14.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad1c1d02357b7665e700eca43a31d52814ad9ad9b89b58118bdabc365454b574"}, - {file = "websockets-14.2-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f390024a47d904613577df83ba700bd189eedc09c57af0a904e5c39624621270"}, - {file = "websockets-14.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3c1426c021c38cf92b453cdf371228d3430acd775edee6bac5a4d577efc72365"}, - {file = "websockets-14.2-py3-none-any.whl", hash = "sha256:7a6ceec4ea84469f15cf15807a747e9efe57e369c384fa86e022b3bea679b79b"}, - {file = "websockets-14.2.tar.gz", hash = "sha256:5059ed9c54945efb321f097084b4c7e52c246f2c869815876a69d1efc4ad6eb5"}, -] - [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = "^3.10" -content-hash = "39af6fac5437ac56c3fb32ba4dc7eac6f893b86d01f8f339d30ccfaf1dd6817f" +content-hash = "c06dee0ffeb71c14701abf2031c46e12e6638ead2ebde54550ad4c353b6feaff" diff --git a/pyproject.toml b/pyproject.toml index 2efef8e..67fa1c1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -68,9 +68,8 @@ optional = true [tool.poetry.group.integration.dependencies] ops = ">2.0.0" pytest = ">=7.2" -juju = "^3.2.0" +jubilant = ">=1.6" coverage = { extras = ["toml"], version = ">7.0" } -pytest-operator = ">0.20" kazoo = ">=2.8" tenacity = ">=7.0" pure-sasl = ">=0.5" @@ -80,7 +79,6 @@ boto3 = "^1.34.159" pyopenssl = "^24.2.1" pymongo = "^4.3.3" boto3-stubs = { extras = ["s3"], version = "^1.35.8" } -pytest-operator-cache = { git = "https://github.com/canonical/data-platform-workflows", tag = "v22.0.0", subdirectory = "python/pytest_plugins/pytest_operator_cache" } pytest-microceph = { git = "https://github.com/canonical/data-platform-workflows", tag = "v22.0.0", subdirectory = "python/pytest_plugins/microceph" } [tool.ruff] diff --git a/terraform/applications.tf b/terraform/applications.tf new file mode 100644 index 0000000..ca7d3a7 --- /dev/null +++ b/terraform/applications.tf @@ -0,0 +1,14 @@ +resource "juju_application" "integrator" { + model_uuid = var.model_uuid + name = var.integrator.app_name + units = var.integrator.units + + charm { + name = "data-integrator" + channel = var.integrator.channel + revision = var.integrator.revision + base = var.integrator.base + } + + config = var.integrator.config +} diff --git a/terraform/integrations.tf b/terraform/integrations.tf new file mode 100644 index 0000000..b45c50d --- /dev/null +++ b/terraform/integrations.tf @@ -0,0 +1,294 @@ +# Integrations between Kafka products + +resource "juju_integration" "kafka_kraft" { + count = local.deployment_mode == "split" ? 1 : 0 + model_uuid = var.model_uuid + + application { + name = module.broker.app_name + endpoint = "peer-cluster-orchestrator" + } + + application { + name = module.controller[0].app_name + endpoint = "peer-cluster" + } +} + +resource "juju_integration" "kafka_connect" { + count = var.connect.units > 0 ? 1 : 0 + model_uuid = var.model_uuid + + application { + name = module.broker.app_name + endpoint = "kafka-client" + } + + application { + name = module.connect[0].app_name + } +} + +resource "juju_integration" "kafka_karapace" { + count = var.karapace.units > 0 ? 1 : 0 + model_uuid = var.model_uuid + + application { + name = module.broker.app_name + endpoint = "kafka-client" + } + + application { + name = module.karapace[0].app_name + } +} + +resource "juju_integration" "kafka_ui" { + count = var.ui.units > 0 ? 1 : 0 + model_uuid = var.model_uuid + + application { + name = module.broker.app_name + endpoint = "kafka-client" + } + + application { + name = module.ui[0].app_name + } +} + + +resource "juju_integration" "karapace_ui" { + count = var.karapace.units > 0 && var.ui.units > 0 ? 1 : 0 + model_uuid = var.model_uuid + + application { + name = module.karapace[0].app_name + endpoint = "karapace" + } + + application { + name = module.ui[0].app_name + } +} + +resource "juju_integration" "kafka_connect_ui" { + count = var.connect.units > 0 && var.ui.units > 0 ? 1 : 0 + model_uuid = var.model_uuid + + application { + name = module.connect[0].app_name + endpoint = "connect-client" + } + + application { + name = module.ui[0].app_name + } +} + +resource "juju_integration" "integrator_kafka" { + model_uuid = var.model_uuid + + application { + name = juju_application.integrator.name + } + + application { + name = module.broker.app_name + } +} + +# TLS Integrations + +resource "juju_integration" "kafka_tls" { + count = local.tls_enabled ? 1 : 0 + model_uuid = var.model_uuid + + application { + name = module.broker.app_name + endpoint = "certificates" + } + + application { + offer_url = var.tls_offer + } +} + +resource "juju_integration" "kafka_connect_tls" { + count = local.tls_enabled && var.connect.units > 0 ? 1 : 0 + model_uuid = var.model_uuid + + application { + name = module.connect[0].app_name + endpoint = "certificates" + } + + application { + offer_url = var.tls_offer + } +} + +resource "juju_integration" "karapace_tls" { + count = local.tls_enabled && var.karapace.units > 0 ? 1 : 0 + model_uuid = var.model_uuid + + application { + name = module.karapace[0].app_name + endpoint = "certificates" + } + + application { + offer_url = var.tls_offer + } +} + +resource "juju_integration" "kafka_ui_ingress" { + count = var.ingress_offer != null && var.ui.units > 0 ? 1 : 0 + model_uuid = var.model_uuid + + application { + name = module.ui[0].app_name + endpoint = "ingress" + } + + application { + offer_url = var.ingress_offer + } +} + +# COS Integrations + +resource "juju_integration" "kafka_cos_metrics" { + count = local.cos_enabled ? 1 : 0 + model_uuid = var.model_uuid + + application { + name = module.broker.app_name + endpoint = "metrics-endpoint" + } + + application { + offer_url = var.cos_offers.metrics + } + +} + +resource "juju_integration" "kafka_cos_dashboard" { + count = local.cos_enabled ? 1 : 0 + model_uuid = var.model_uuid + + application { + name = module.broker.app_name + endpoint = "grafana-dashboard" + } + + application { + offer_url = var.cos_offers.dashboard + } + +} + +resource "juju_integration" "kafka_cos_logging" { + count = local.cos_enabled ? 1 : 0 + model_uuid = var.model_uuid + + application { + name = module.broker.app_name + endpoint = "logging" + } + + application { + offer_url = var.cos_offers.logging + } + +} + +resource "juju_integration" "kraft_cos_metrics" { + count = local.cos_enabled && local.deployment_mode == "split" ? 1 : 0 + model_uuid = var.model_uuid + + application { + name = module.controller[0].app_name + endpoint = "metrics-endpoint" + } + + application { + offer_url = var.cos_offers.metrics + } + +} + +resource "juju_integration" "kraft_cos_dashboard" { + count = local.cos_enabled && local.deployment_mode == "split" ? 1 : 0 + model_uuid = var.model_uuid + + application { + name = module.controller[0].app_name + endpoint = "grafana-dashboard" + } + + application { + offer_url = var.cos_offers.dashboard + } + +} + +resource "juju_integration" "kraft_cos_logging" { + count = local.cos_enabled && local.deployment_mode == "split" ? 1 : 0 + model_uuid = var.model_uuid + + application { + name = module.controller[0].app_name + endpoint = "logging" + } + + application { + offer_url = var.cos_offers.logging + } + +} + +resource "juju_integration" "connect_cos_metrics" { + count = local.cos_enabled && var.connect.units > 0 ? 1 : 0 + model_uuid = var.model_uuid + + application { + name = module.connect[0].app_name + endpoint = "metrics-endpoint" + } + + application { + offer_url = var.cos_offers.metrics + } + +} + +resource "juju_integration" "connect_cos_dashboard" { + count = local.cos_enabled && var.connect.units > 0 ? 1 : 0 + model_uuid = var.model_uuid + + application { + name = module.connect[0].app_name + endpoint = "grafana-dashboard" + } + + application { + offer_url = var.cos_offers.dashboard + } + +} + +resource "juju_integration" "connect_cos_logging" { + count = local.cos_enabled && var.connect.units > 0 ? 1 : 0 + model_uuid = var.model_uuid + + application { + name = module.connect[0].app_name + endpoint = "logging" + } + + application { + offer_url = var.cos_offers.logging + } + +} \ No newline at end of file diff --git a/terraform/main.tf b/terraform/main.tf new file mode 100644 index 0000000..be87267 --- /dev/null +++ b/terraform/main.tf @@ -0,0 +1,84 @@ +locals { + deployment_mode = var.controller.units > 0 ? "split" : "single" + controller_app_name = var.controller.units > 0 ? var.controller.app_name : var.broker.app_name + connect_app_name = var.connect.units > 0 ? module.connect[0].app_name : null + karapace_app_name = var.karapace.units > 0 ? module.karapace[0].app_name : null + ui_app_name = var.ui.units > 0 ? module.ui[0].app_name : null + cos_enabled = var.cos_offers.dashboard != null ? true : false + tls_enabled = var.tls_offer != null ? true : false + cos_agent_charm = "grafana-agent" + cos_agent_channel = "1/stable" +} + +module "broker" { + source = "git::https://github.com/canonical/kafka-k8s-operator//terraform?ref=main" + model_uuid = var.model_uuid + app_name = var.broker.app_name + channel = var.broker.channel + revision = var.broker.revision + constraints = var.broker.constraints + base = var.broker.base + units = var.broker.units + storage = var.broker.storage + config = merge(var.broker.config, { + profile = var.profile + roles = local.deployment_mode == "single" ? "broker,controller" : "broker" + }) +} + +module "controller" { + count = local.deployment_mode == "split" ? 1 : 0 + source = "git::https://github.com/canonical/kafka-k8s-operator//terraform?ref=main" + model_uuid = var.model_uuid + app_name = var.controller.app_name + channel = var.controller.channel + revision = var.controller.revision + constraints = var.controller.constraints + base = var.controller.base + units = var.controller.units + storage = var.controller.storage + config = merge(var.controller.config, { + profile = var.profile + roles = "controller" + }) +} + + +module "connect" { + count = var.connect.units > 0 ? 1 : 0 + source = "git::https://github.com/canonical/kafka-connect-k8s-operator//terraform?ref=main" + model_uuid = var.model_uuid + app_name = var.connect.app_name + channel = var.connect.channel + revision = var.connect.revision + constraints = var.connect.constraints + base = var.connect.base + units = var.connect.units + config = var.connect.config +} + +module "karapace" { + count = var.karapace.units > 0 ? 1 : 0 + source = "git::https://github.com/canonical/karapace-k8s-operator//terraform?ref=main" + model_uuid = var.model_uuid + app_name = var.karapace.app_name + channel = var.karapace.channel + revision = var.karapace.revision + constraints = var.karapace.constraints + base = var.karapace.base + units = var.karapace.units + config = var.karapace.config +} + +module "ui" { + count = var.ui.units > 0 ? 1 : 0 + source = "git::https://github.com/canonical/kafka-ui-k8s-operator//terraform?ref=main" + model_uuid = var.model_uuid + app_name = var.ui.app_name + channel = var.ui.channel + revision = var.ui.revision + constraints = var.ui.constraints + base = var.ui.base + units = var.ui.units + config = var.ui.config +} diff --git a/terraform/outputs.tf b/terraform/outputs.tf new file mode 100644 index 0000000..293894b --- /dev/null +++ b/terraform/outputs.tf @@ -0,0 +1,25 @@ +output "offers" { + description = "List of offers URLs." + value = merge( + { + kafka-client = module.broker.offers.kafka-client + }, + { + connect-client = var.connect.units > 0 ? module.connect[0].offers.connect-client : null + }, + { + karapace-client = var.karapace.units > 0 ? module.karapace[0].offers.karapace-client : null + } + ) +} + +output "app_names" { + description = "Output of all deployed application names." + value = { + broker = module.broker.app_name + controller = local.controller_app_name, + connect = local.connect_app_name, + karapace = local.karapace_app_name, + ui = local.ui_app_name + } +} \ No newline at end of file diff --git a/terraform/providers.tf b/terraform/providers.tf new file mode 100644 index 0000000..4b61781 --- /dev/null +++ b/terraform/providers.tf @@ -0,0 +1,10 @@ +terraform { + required_version = ">=1.7.3" + + required_providers { + juju = { + version = ">=1.0.0" + source = "juju/juju" + } + } +} diff --git a/terraform/variables.tf b/terraform/variables.tf new file mode 100644 index 0000000..6c0cb5f --- /dev/null +++ b/terraform/variables.tf @@ -0,0 +1,148 @@ +variable "model_uuid" { + description = "The Juju Model UUID to deploy to" + type = string +} + +variable "profile" { + description = "The deployment profile to use, either 'production' or 'testing'" + type = string + default = "testing" +} + +variable "tls_offer" { + description = "TLS Provider endpoint to be used on Client relations." + type = string + default = null +} + +variable "ingress_offer" { + description = "Ingress provider endpoint to be used on for the Kafka UI." + type = string + default = null +} + +variable "cos_offers" { + description = "COS offers for observability." + type = object({ + dashboard = optional(string, null), + metrics = optional(string, null), + logging = optional(string, null), + tracing = optional(string, null) + }) + + default = {} + + validation { + condition = (( + var.cos_offers.dashboard != null && + var.cos_offers.metrics != null && + var.cos_offers.logging != null + ) || ( + var.cos_offers.dashboard == null && + var.cos_offers.metrics == null && + var.cos_offers.logging == null + )) + error_message = "Either all or none of the COS offers should be provided: 'dashboard', 'metrics', 'logging'." + } +} + +variable "broker" { + description = "Defines the Apache Kafka broker application configuration" + type = object({ + app_name = optional(string, "kafka-broker") + channel = optional(string, "4/edge") + config = optional(map(string), {}) + constraints = optional(string, "arch=amd64") + resources = optional(map(string), {}) + revision = optional(number, null) + base = optional(string, "ubuntu@24.04") + units = optional(number, 3) + storage = optional(map(string), {}) + }) + default = {} +} + +variable "controller" { + description = "Defines the Apache Kafka KRaft controller application configuration" + type = object({ + app_name = optional(string, "kafka-controller") + channel = optional(string, "4/edge") + config = optional(map(string), {}) + constraints = optional(string, "arch=amd64") + resources = optional(map(string), {}) + revision = optional(number, null) + base = optional(string, "ubuntu@24.04") + units = optional(number, 3) + storage = optional(map(string), {}) + }) + default = {} + + validation { + condition = var.controller.units == 0 || var.controller.units % 2 != 0 + error_message = "The number of Apache Kafka KRaft controllers must be odd (e.g., 1, 3, 5, ...)." + } +} + + +variable "connect" { + description = "Defines the Kafka Connect application configuration" + type = object({ + app_name = optional(string, "kafka-connect") + channel = optional(string, "latest/edge") + config = optional(map(string), {}) + constraints = optional(string, "arch=amd64") + resources = optional(map(string), {}) + revision = optional(number, null) + base = optional(string, "ubuntu@22.04") + units = optional(number, 1) + }) + default = {} +} + +variable "karapace" { + description = "Defines the Karapace application configuration" + type = object({ + app_name = optional(string, "karapace") + channel = optional(string, "latest/edge") + config = optional(map(string), {}) + constraints = optional(string, "arch=amd64") + resources = optional(map(string), {}) + revision = optional(number, null) + base = optional(string, "ubuntu@24.04") + units = optional(number, 1) + }) + default = {} +} + +variable "ui" { + description = "Defines the Kafbat Kafka UI application configuration" + type = object({ + app_name = optional(string, "kafka-ui") + channel = optional(string, "latest/edge") + config = optional(map(string), {}) + constraints = optional(string, "arch=amd64") + resources = optional(map(string), {}) + revision = optional(number, null) + base = optional(string, "ubuntu@24.04") + units = optional(number, 1) + }) + default = {} +} + +variable "integrator" { + description = "Defines the Integrator application configuration" + type = object({ + app_name = optional(string, "data-integrator") + channel = optional(string, "latest/edge") + config = optional(map(string), { + topic-name = "__admin-user" + extra-user-roles = "admin" + }) + constraints = optional(string, "arch=amd64") + resources = optional(map(string), {}) + revision = optional(number, null) + base = optional(string, "ubuntu@24.04") + units = optional(number, 1) + }) + default = {} +} diff --git a/tests/integration/bundle/auth.py b/tests/integration/bundle/auth.py deleted file mode 100644 index 025f7ce..0000000 --- a/tests/integration/bundle/auth.py +++ /dev/null @@ -1,69 +0,0 @@ -#!/usr/bin/env python3 -# Copyright 2022 Canonical Ltd. -# See LICENSE file for licensing details. - -"""Supporting objects for Kafka user and ACL management.""" - -import logging -import re -from dataclasses import dataclass -from typing import List, Set - -logger = logging.getLogger(__name__) - - -@dataclass(unsafe_hash=True) -class Acl: - """Convenience object for representing a Kafka ACL.""" - - resource_name: str - resource_type: str - operation: str - username: str - - -class KafkaAuth: - """Object for updating Kafka users and ACLs.""" - - def __init__(self, charm, opts: List[str], zookeeper: str): - self.charm = charm - self.opts = opts - self.zookeeper = zookeeper - self.current_acls: Set[Acl] = set() - self.new_user_acls: Set[Acl] = set() - - @staticmethod - def _parse_acls(acls: str) -> Set[Acl]: - """Parses output from raw ACLs provided by the cluster.""" - current_acls = set() - resource_type, name, user, operation = None, None, None, None - for line in acls.splitlines(): - resource_search = re.search(r"resourceType=([^\,]+),", line) - if resource_search: - resource_type = resource_search[1] - - name_search = re.search(r"name=([^\,]+),", line) - if name_search: - name = name_search[1] - - user_search = re.search(r"principal=User\:([^\,]+),", line) - if user_search: - user = user_search[1] - - operation_search = re.search(r"operation=([^\,]+),", line) - if operation_search: - operation = operation_search[1] - else: - continue - - if resource_type and name and user and operation: - current_acls.add( - Acl( - resource_type=resource_type, - resource_name=name, - username=user, - operation=operation, - ) - ) - - return current_acls diff --git a/tests/integration/bundle/conftest.py b/tests/integration/bundle/conftest.py deleted file mode 100644 index 6bdc591..0000000 --- a/tests/integration/bundle/conftest.py +++ /dev/null @@ -1,28 +0,0 @@ -#!/usr/bin/env python3 -# Copyright 2023 Canonical Ltd. -# See LICENSE file for licensing details. - -from literals import BUNDLE_BUILD - - -def pytest_addoption(parser): - """Defines pytest parsers.""" - parser.addoption("--tls", action="store_true", help="set tls for e2e tests") - - parser.addoption( - "--bundle-file", - action="store", - help="name of the bundle zip when provided.", - default=BUNDLE_BUILD, - ) - - -def pytest_generate_tests(metafunc): - """Processes pytest parsers.""" - tls = metafunc.config.option.tls - if "tls" in metafunc.fixturenames: - metafunc.parametrize("tls", [bool(tls)], scope="module") - - bundle_file = metafunc.config.option.bundle_file - if "bundle_file" in metafunc.fixturenames: - metafunc.parametrize("bundle_file", [bundle_file], scope="module") diff --git a/tests/integration/bundle/helpers.py b/tests/integration/bundle/helpers.py deleted file mode 100644 index 4e579e5..0000000 --- a/tests/integration/bundle/helpers.py +++ /dev/null @@ -1,279 +0,0 @@ -#!/usr/bin/env python3 -# Copyright 2023 Canonical Ltd. -# See LICENSE file for licensing details. -import json -import logging -import re -from subprocess import PIPE, CalledProcessError, check_output -from typing import Any, Dict, List, Set, Tuple - -import yaml -from pytest_operator.plugin import OpsTest -from tests.integration.bundle.literals import BINARIES_PATH, CONF_PATH - -from .auth import Acl, KafkaAuth - -logger = logging.getLogger(__name__) - - -class NoSecretFoundError(Exception): - def __init__(self, owner: str, label: str): - self.owner = owner - self.label = label - - -def load_acls(model_full_name: str, bootstrap_server: str, unit_name: str) -> Set[Acl]: - container_command = f"KAFKA_OPTS=-Djava.security.auth.login.config={CONF_PATH}/kafka-jaas.cfg {BINARIES_PATH}/bin/kafka-acls.sh --bootstrap-server {bootstrap_server} --command-config {CONF_PATH}/client.properties --list" - try: - result = check_output( - f"JUJU_MODEL={model_full_name} juju ssh --container kafka {unit_name} '{container_command}'", - stderr=PIPE, - shell=True, - universal_newlines=True, - ) - return KafkaAuth._parse_acls(acls=result) - except CalledProcessError as e: - logger.error(f"{str(e.stdout)=}") - raise e - - -def load_super_users(model_full_name: str, unit_name: str) -> List[str]: - - command = ( - f"JUJU_MODEL={model_full_name} juju ssh --container kafka {unit_name} 'cat /data/kafka/config/server.properties'", - ) - result = check_output( - command, - stderr=PIPE, - shell=True, - universal_newlines=True, - ) - properties = result.splitlines() - - for prop in properties: - if "super.users" in prop: - return prop.split("=")[1].split(";") - - return [] - - -def check_produced_messages(model_full_name: str, unit_name: str) -> bool: - - command = (f"JUJU_MODEL={model_full_name} juju ssh {unit_name} 'cat /tmp/*.log'",) - result = check_output( - command, - stderr=PIPE, - shell=True, - universal_newlines=True, - ) - logs = result.splitlines() - for line in logs: - print(line) - if "Message #" in line: - return True - - return False - - -def check_user(model_full_name: str, username: str, bootstrap_server: str, unit_name: str) -> None: - container_command = f"KAFKA_OPTS=-Djava.security.auth.login.config={CONF_PATH}/kafka-jaas.cfg {BINARIES_PATH}/bin/kafka-configs.sh --bootstrap-server {bootstrap_server} --command-config {CONF_PATH}/client.properties --describe --entity-type users --entity-name {username}" - - try: - result = check_output( - f"JUJU_MODEL={model_full_name} juju ssh --container kafka {unit_name} '{container_command}'", - stderr=PIPE, - shell=True, - universal_newlines=True, - ) - assert "SCRAM-SHA-512" in result - except CalledProcessError as e: - logger.error(f"{str(e.stdout)=}") - raise e - - -def show_unit(unit_name: str, model_full_name: str) -> Any: - result = check_output( - f"JUJU_MODEL={model_full_name} juju show-unit {unit_name}", - stderr=PIPE, - shell=True, - universal_newlines=True, - ) - - return yaml.safe_load(result) - - -def get_secret_by_label(model_full_name: str, label: str, owner: str) -> dict[str, str]: - secrets_meta_raw = check_output( - f"JUJU_MODEL={model_full_name} juju list-secrets --format json", - stderr=PIPE, - shell=True, - universal_newlines=True, - ).strip() - secrets_meta = json.loads(secrets_meta_raw) - - secret_ids = [ - secret_id - for secret_id in secrets_meta - if owner and secrets_meta[secret_id]["owner"] == owner - if secrets_meta[secret_id]["label"] == label - ] - - if len(secret_ids) > 1: - raise ValueError( - f"Multiple secrets carry the same (label, owner) combination: ({label}, {owner})" - ) - - if len(secret_ids) == 0: - raise NoSecretFoundError(owner=owner, label=label) - - secret_id = secret_ids[0] - - secrets_data_raw = check_output( - f"JUJU_MODEL={model_full_name} juju show-secret --format json --reveal {secret_id}", - stderr=PIPE, - shell=True, - universal_newlines=True, - ) - - secret_data = json.loads(secrets_data_raw) - return secret_data[secret_id]["content"]["Data"] - - -def get_kafka_zk_relation_data(model_full_name: str, owner: str, unit_name: str) -> dict[str, str]: - unit_data = show_unit(unit_name, model_full_name) - - relation_name = "zookeeper" - - kafka_zk_relation_data = {} - for info in unit_data[unit_name]["relation-info"]: - if info["endpoint"] == relation_name: - kafka_zk_relation_data["relation-id"] = info["relation-id"] - - # initially collects all non-secret keys - kafka_zk_relation_data.update(dict(info["application-data"])) - - try: - user_secret = get_secret_by_label( - model_full_name, - label=f"{relation_name}.{kafka_zk_relation_data['relation-id']}.user.secret", - owner=owner, - ) - except NoSecretFoundError: - logger.warning("ZooKeeper relation data is not using secrets for users.") - user_secret = {} - - try: - tls_secret = get_secret_by_label( - model_full_name, - label=f"{relation_name}.{kafka_zk_relation_data['relation-id']}.tls.secret", - owner=owner, - ) - except NoSecretFoundError: - logger.warning("ZooKeeper relation data is not using secrets for tls.") - tls_secret = {} - - # overrides to secret keys if found - return kafka_zk_relation_data | user_secret | tls_secret - - -def get_peer_relation_data(model_full_name: str, unit_name: str) -> dict[str, str]: - - owner, *_ = unit_name.split("/") - unit_data = show_unit(unit_name, model_full_name) - - relation_name = "cluster" - - relation_data = {} - for info in unit_data[unit_name]["relation-info"]: - if info["endpoint"] == relation_name: - relation_data["relation-id"] = info["relation-id"] - - # initially collects all non-secret keys - relation_data.update(dict(info["application-data"])) - - try: - user_secret = get_secret_by_label( - model_full_name, - label=f"{relation_name}.{owner}.app", - owner=owner, - ) - except NoSecretFoundError: - logger.warning("Peer relation data is not using secrets for users.") - user_secret = {} - - try: - tls_secret = get_secret_by_label( - model_full_name, - label=f"{relation_name}.{owner}.unit", - owner=unit_name, - ) - except NoSecretFoundError: - logger.warning("Peer relation data is not using secrets for tls.") - tls_secret = {} - - # overrides to secret keys if found - return relation_data | user_secret | tls_secret - - -def get_zookeeper_connection( - unit_name: str, owner: str, model_full_name: str -) -> Tuple[List[str], str]: - - data = get_kafka_zk_relation_data(model_full_name, owner, unit_name) - - return [data["username"]], data["uris"] - - -def get_kafka_users(unit_name: str, model_full_name: str): - data = get_peer_relation_data(model_full_name, unit_name) - - return [key for key in data if re.match(r"(relation\-[\d]+)", key)] - - -def check_properties(model_full_name: str, unit: str): - properties = check_output( - f"JUJU_MODEL={model_full_name} juju ssh --container zookeeper {unit} 'cat /etc/zookeeper/zoo.cfg'", - stderr=PIPE, - shell=True, - universal_newlines=True, - ) - return properties.splitlines() - - -def srvr(host: str) -> Dict: - """Retrieves attributes returned from the 'srvr' 4lw command. - - Specifically for this test, we are interested in the "Mode" of the ZK server, - which allows checking quorum leadership and follower active status. - """ - response = check_output( - f"echo srvr | nc {host} 2181", stderr=PIPE, shell=True, universal_newlines=True - ) - - result = {} - for item in response.splitlines(): - k = re.split(": ", item)[0] - v = re.split(": ", item)[1] - result[k] = v - - return result - - -async def get_address(ops_test: OpsTest, app_name: str, unit_num: str) -> str: - """Get the address for a unit.""" - status = await ops_test.model.get_status() # noqa: F821 - address = status["applications"][app_name]["units"][f"{app_name}/{unit_num}"]["address"] - return address - - -async def ping_servers(ops_test: OpsTest, zookeeper_app_name: str) -> bool: - for unit in ops_test.model.applications[zookeeper_app_name].units: - host = await get_address( - ops_test, app_name=zookeeper_app_name, unit_num=unit.name.split("/")[-1] - ) - assert host - mode = srvr(host)["Mode"] - if mode not in ["leader", "follower"]: - return False - - return True diff --git a/tests/integration/bundle/literals.py b/tests/integration/bundle/literals.py deleted file mode 100644 index 2d67ea6..0000000 --- a/tests/integration/bundle/literals.py +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env python3 -# Copyright 2023 Canonical Ltd. -# See LICENSE file for licensing details. - -"""Collection of globals common to the Kafka bundle.""" - -BUNDLE_BUILD = "build/kafka-k8s-bundle.zip" -APP_CHARM_PATH = "tests/integration/bundle/app-charm" -ZOOKEEPER = "zookeeper-k8s" -KAFKA = "kafka-k8s" -CLIENT_CHARM_NAME = "kafka-test-app" - -TLS_CHARM_NAME = "self-signed-certificates" - -CONF_PATH = "/etc/kafka" -DATA_PATH = "/var/lib/kafka" -LOGS_PATH = "/var/log/kafka" -BINARIES_PATH = "/opt/kafka" -TLS_PORT = 9093 diff --git a/tests/integration/bundle/test_bundle.py b/tests/integration/bundle/test_bundle.py deleted file mode 100644 index 4e0002b..0000000 --- a/tests/integration/bundle/test_bundle.py +++ /dev/null @@ -1,166 +0,0 @@ -#!/usr/bin/env python3 -# Copyright 2023 Canonical Ltd. -# See LICENSE file for licensing details. - -import asyncio -import logging -from zipfile import ZipFile - -import pytest -import yaml -from pytest_operator.plugin import OpsTest -from tests.integration.bundle.helpers import ( - check_produced_messages, - check_properties, - check_user, - get_address, - get_kafka_users, - get_zookeeper_connection, - load_acls, - ping_servers, -) -from tests.integration.bundle.literals import ( - CLIENT_CHARM_NAME, - KAFKA, - TLS_CHARM_NAME, - TLS_PORT, - ZOOKEEPER, -) - -logger = logging.getLogger(__name__) - -PRODUCER = "producer" -CONSUMER = "consumer" -TOPIC = "test-topic" - - -@pytest.fixture(scope="module") -def usernames(): - return set() - - -@pytest.mark.abort_on_fail -async def test_verify_tls_flags_consistency(ops_test: OpsTest, bundle_file, tls): - """Deploy the bundle.""" - with ZipFile(bundle_file) as fp: - bundle_data = yaml.safe_load(fp.read("bundle.yaml")) - - applications = [] - - bundle_tls = False - for app in bundle_data["applications"]: - applications.append(app) - if TLS_CHARM_NAME in app: - bundle_tls = True - - assert tls == bundle_tls - - -@pytest.mark.abort_on_fail -async def test_deploy_bundle_active(ops_test: OpsTest, bundle_file, tls): - """Deploy the bundle.""" - logger.info(f"Deploying Bundle with file {bundle_file}") - retcode, stdout, stderr = await ops_test.run( - *["juju", "deploy", "--trust", "-m", ops_test.model_full_name, f"./{bundle_file}"] - ) - assert retcode == 0, f"Deploy failed: {(stderr or stdout).strip()}" - logger.info(stdout) - - with ZipFile(bundle_file) as fp: - bundle = yaml.safe_load(fp.read("bundle.yaml")) - - async with ops_test.fast_forward(fast_interval="30s"): - await ops_test.model.wait_for_idle( - apps=list(bundle["applications"].keys()), - idle_period=10, - status="active", - timeout=1800, - ) - - -@pytest.mark.abort_on_fail -async def test_active_zookeeper(ops_test: OpsTest): - """Test the status the correct status of Zookeeper.""" - assert await ping_servers(ops_test, ZOOKEEPER) - - -@pytest.mark.abort_on_fail -async def test_deploy_app_charm_relate(ops_test: OpsTest, bundle_file, tls): - """Deploy dummy app and relate with Kafka and TLS operator.""" - with ZipFile(bundle_file) as fp: - bundle_data = yaml.safe_load(fp.read("bundle.yaml")) - - applications = list(bundle_data["applications"].keys()) - - config = {"role": "producer", "topic_name": TOPIC, "num_messages": 50} - await ops_test.model.deploy( - CLIENT_CHARM_NAME, - application_name=PRODUCER, - num_units=1, - series="jammy", - channel="edge", - config=config, - ) - await ops_test.model.wait_for_idle(apps=[PRODUCER]) - - if tls: - await ops_test.model.add_relation(PRODUCER, TLS_CHARM_NAME) - - await ops_test.model.wait_for_idle( - apps=applications, timeout=1200, idle_period=30, status="active" - ) - await ops_test.model.add_relation(KAFKA, PRODUCER) - - await ops_test.model.wait_for_idle( - apps=applications + [PRODUCER], status="active", timeout=1000, idle_period=30 - ) - - for app in applications + [PRODUCER]: - assert ops_test.model.applications[app].status == "active" - - await asyncio.sleep(10) - - check_produced_messages(ops_test.model_full_name, f"{PRODUCER}/0") - - -@pytest.mark.abort_on_fail -async def test_apps_up_and_running(ops_test: OpsTest, usernames): - """Test that all apps are up and running.""" - assert await ping_servers(ops_test, ZOOKEEPER) - - for unit in ops_test.model.applications[ZOOKEEPER].units: - assert "sslQuorum=true" in check_properties( - model_full_name=ops_test.model_full_name, unit=unit.name - ) - - # implicitly tests setting of kafka app data - zookeeper_usernames, zookeeper_uri = get_zookeeper_connection( - unit_name=f"{KAFKA}/0", owner=ZOOKEEPER, model_full_name=ops_test.model_full_name - ) - assert zookeeper_uri - assert len(zookeeper_usernames) > 0 - - usernames.update(get_kafka_users(f"{KAFKA}/0", ops_test.model_full_name)) - - ip_address = await get_address(ops_test, app_name=KAFKA, unit_num="0") - bootstrap_server = f"{ip_address}:{TLS_PORT}" - - for username in usernames: - check_user( - username=username, - bootstrap_server=bootstrap_server, - model_full_name=ops_test.model_full_name, - unit_name=f"{KAFKA}/0", - ) - - for acl in load_acls( - model_full_name=ops_test.model_full_name, - bootstrap_server=bootstrap_server, - unit_name=f"{KAFKA}/0", - ): - assert acl.username in usernames - assert acl.operation in ["CREATE", "READ", "WRITE", "DESCRIBE"] - assert acl.resource_type in ["GROUP", "TOPIC"] - if acl.resource_type == "TOPIC": - assert acl.resource_name == TOPIC - assert await ping_servers(ops_test, ZOOKEEPER) diff --git a/tests/integration/e2e/conftest.py b/tests/integration/e2e/conftest.py deleted file mode 100644 index 11ee0c1..0000000 --- a/tests/integration/e2e/conftest.py +++ /dev/null @@ -1,248 +0,0 @@ -#!/usr/bin/env python3 -# Copyright 2023 Canonical Ltd. -# See LICENSE file for licensing details. - -"""The pytest fixtures to support cmd options for local running and CI/CD.""" - -import logging -import random -import string -from typing import Dict, Literal, Optional -from zipfile import ZipFile - -import pytest -import yaml -from literals import ( - BUNDLE_BUILD, - DATABASE_CHARM_NAME, - INTEGRATOR_CHARM_NAME, - KAFKA_CHARM_NAME, - KAFKA_TEST_APP_CHARM_NAME, - TLS_CHARM_NAME, - ZOOKEEPER_CHARM_NAME, -) -from pytest_operator.plugin import OpsTest - -logger = logging.getLogger(__name__) - - -def pytest_addoption(parser): - """Defines pytest parsers.""" - parser.addoption("--tls", action="store_true", help="set tls for e2e tests") - parser.addoption( - "--kafka", action="store", help="name of pre-deployed kafka app", default=KAFKA_CHARM_NAME - ) - parser.addoption( - "--zookeeper", - action="store", - help="name of pre-deployed zookeeper app", - default=ZOOKEEPER_CHARM_NAME, - ) - parser.addoption( - "--certificates", - action="store", - help="name of pre-deployed tls-certificates app", - default=TLS_CHARM_NAME, - ) - parser.addoption( - "--integrator", - action="store_true", - help="set usage of credentials provided by the data-integrator", - ) - parser.addoption( - "--database", - action="store", - help="name of pre-deployed mongoDB instance.", - default=DATABASE_CHARM_NAME, - ) - parser.addoption( - "--bundle-file", - action="store", - help="name of the bundle zip when provided.", - default=BUNDLE_BUILD, - ) - - -def pytest_generate_tests(metafunc): - """Processes pytest parsers.""" - tls = metafunc.config.option.tls - if "tls" in metafunc.fixturenames: - metafunc.parametrize("tls", [bool(tls)], scope="module") - - kafka = metafunc.config.option.kafka - if "kafka" in metafunc.fixturenames: - metafunc.parametrize("kafka", [kafka], scope="module") - - zookeeper = metafunc.config.option.zookeeper - if "zookeeper" in metafunc.fixturenames: - metafunc.parametrize("zookeeper", [zookeeper], scope="module") - - certificates = metafunc.config.option.certificates - if "certificates" in metafunc.fixturenames: - metafunc.parametrize("certificates", [certificates], scope="module") - - integrator = metafunc.config.option.integrator - if "integrator" in metafunc.fixturenames: - metafunc.parametrize("integrator", [bool(integrator)], scope="module") - - database = metafunc.config.option.database - if "database" in metafunc.fixturenames: - metafunc.parametrize("database", [database], scope="module") - - bundle_file = metafunc.config.option.bundle_file - if "bundle_file" in metafunc.fixturenames: - metafunc.parametrize("bundle_file", [bundle_file], scope="module") - - -### - FIXTURES - ### - - -@pytest.fixture(scope="module") -async def deploy_cluster(ops_test: OpsTest, bundle_file): - """Fixture for deploying Kafka+ZK clusters.""" - if not ops_test.model: # avoids a multitude of linting errors - raise RuntimeError("model not set") - - logger.info(f"Deploying Bundle with file {bundle_file}") - retcode, stdout, stderr = await ops_test.run( - *["juju", "deploy", "--trust", "-m", ops_test.model_full_name, f"./{bundle_file}"] - ) - assert retcode == 0, f"Deploy failed: {(stderr or stdout).strip()}" - logger.info(stdout) - - with ZipFile(bundle_file) as fp: - bundle = yaml.safe_load(fp.read("bundle.yaml")) - - apps = list(bundle["applications"].keys()) - logger.info(f"Applications: {','.join(apps)}") - - async with ops_test.fast_forward(fast_interval="60s"): - await ops_test.model.wait_for_idle( - apps=apps, idle_period=30, status="active", timeout=1800, raise_on_error=False - ) - - logger.info("Bundle deployed!") - - -@pytest.fixture(scope="function") -async def deploy_data_integrator(ops_test: OpsTest, kafka): - """Factory fixture for deploying + tearing down client applications.""" - # tracks deployed app names for teardown later - apps = [] - - async def _deploy_data_integrator(config: Dict[str, str]): - """Deploys client with specified role and uuid.""" - if not ops_test.model: # avoids a multitude of linting errors - raise RuntimeError("model not set") - - # uuid to avoid name clashes for same applications - key = "".join(random.choices(string.ascii_lowercase, k=4)) - generated_app_name = f"data-integrator-{key}" - apps.append(generated_app_name) - - logger.info(f"{generated_app_name=} - {apps=}") - await ops_test.model.deploy( - INTEGRATOR_CHARM_NAME, - application_name=generated_app_name, - num_units=1, - series="jammy", - channel="edge", - config=config, - ) - await ops_test.model.wait_for_idle(apps=[generated_app_name]) - - return generated_app_name - - logger.info(f"setting up data_integrator - current apps {apps}") - yield _deploy_data_integrator - - logger.info(f"tearing down {apps}") - for app in apps: - logger.info(f"tearing down {app}") - await ops_test.model.applications[app].remove() - - await ops_test.model.wait_for_idle(apps=[kafka], idle_period=30, status="active", timeout=1800) - - -@pytest.fixture(scope="function") -async def deploy_test_app(ops_test: OpsTest, kafka, certificates, tls): - """Factory fixture for deploying + tearing down client applications.""" - # tracks deployed app names for teardown later - apps = [] - - async def _deploy_test_app( - role: Literal["producer", "consumer"], - topic_name: str = "test-topic", - consumer_group_prefix: Optional[str] = None, - num_messages: int = 1500, - ): - """Deploys client with specified role and uuid.""" - if not ops_test.model: # avoids a multitude of linting errors - raise RuntimeError("model not set") - - # uuid to avoid name clashes for same applications - key = "".join(random.choices(string.ascii_lowercase, k=4)) - generated_app_name = f"{role}-{key}" - apps.append(generated_app_name) - - logger.info(f"{generated_app_name=} - {apps=}") - - config = {"role": role, "topic_name": topic_name, "num_messages": num_messages} - - if consumer_group_prefix: - config["consumer_group_prefix"] = consumer_group_prefix - - await ops_test.model.deploy( - KAFKA_TEST_APP_CHARM_NAME, - application_name=generated_app_name, - num_units=1, - series="jammy", - channel="edge", - config=config, - ) - await ops_test.model.wait_for_idle( - apps=[generated_app_name], idle_period=20, status="active" - ) - - # Relate with TLS operator - if tls: - await ops_test.model.add_relation(generated_app_name, certificates) - await ops_test.model.wait_for_idle( - apps=[generated_app_name, certificates], - idle_period=30, - status="active", - timeout=1800, - ) - - # Relate with MongoDB - await ops_test.model.add_relation(generated_app_name, DATABASE_CHARM_NAME) - await ops_test.model.wait_for_idle( - apps=[generated_app_name, DATABASE_CHARM_NAME], - idle_period=30, - status="active", - timeout=1800, - ) - - return generated_app_name - - logger.info(f"setting up test_app - current apps {apps}") - yield _deploy_test_app - - logger.info(f"tearing down {apps}") - # stop producers before consumers - for app in sorted(apps, reverse=True): - logger.info(f"tearing down {app}") - # check if application is in the - if app in ops_test.model.applications: - await ops_test.model.applications[app].remove_relation( - f"{app}:database", f"{DATABASE_CHARM_NAME}" - ) - await ops_test.model.wait_for_idle(apps=[DATABASE_CHARM_NAME, app], idle_period=10) - await ops_test.model.applications[app].remove() - await ops_test.model.wait_for_idle( - apps=[kafka], idle_period=10, status="active", timeout=1800 - ) - else: - logger.info(f"App: {app} already removed!") - - await ops_test.model.wait_for_idle(apps=[kafka], idle_period=30, status="active", timeout=1800) diff --git a/tests/integration/e2e/helpers.py b/tests/integration/e2e/helpers.py deleted file mode 100644 index 85338aa..0000000 --- a/tests/integration/e2e/helpers.py +++ /dev/null @@ -1,273 +0,0 @@ -#!/usr/bin/env python3 -# Copyright 2023 Canonical Ltd. -# See LICENSE file for licensing details. - -import logging -import random -import string -from subprocess import PIPE, STDOUT, CalledProcessError, check_output -from typing import NamedTuple - -import ops -from juju.unit import Unit -from pymongo import MongoClient -from pytest_operator.plugin import OpsTest -from tests.integration.e2e.literals import KAFKA_INTERNAL_PORT, SUBSTRATE - -logger = logging.getLogger() - -ExecArgs = NamedTuple("ExecArgs", container_arg=str, sudo_arg=str, bin_cmd=str, config_file=str) - - -def check_produced_and_consumed_messages(uris: str, collection_name: str): - """Check that messages produced and consumed are consistent.""" - logger.debug(f"MongoDB uris: {uris}") - logger.debug(f"Topic: {collection_name}") - produced_messages = [] - consumed_messages = [] - logger.info(f"URI: {uris}") - try: - client = MongoClient( - uris, - directConnection=True, - connect=False, - serverSelectionTimeoutMS=1000, - connectTimeoutMS=2000, - ) - db = client[collection_name] - consumer_collection = db["consumer"] - producer_collection = db["producer"] - - logger.info(f"Number of messages from consumer: {consumer_collection.count_documents({})}") - logger.info(f"Number of messages from producer: {producer_collection.count_documents({})}") - assert consumer_collection.count_documents({}) > 0 - assert producer_collection.count_documents({}) > 0 - cursor = consumer_collection.find({}) - for document in cursor: - consumed_messages.append((document["origin"], document["content"])) - - cursor = producer_collection.find({}) - for document in cursor: - produced_messages.append((document["origin"], document["content"])) - - logger.info(f"Number of produced messages: {len(produced_messages)}") - logger.info(f"Number of unique produced messages: {len(set(produced_messages))}") - logger.info(f"Number of consumed messages: {len(consumed_messages)}") - logger.info(f"Number of unique consumed messages: {len(set(consumed_messages))}") - - assert len(consumed_messages) >= len(produced_messages) - assert abs(len(consumed_messages) - len(produced_messages)) < 3 - if len(consumed_messages) < len(produced_messages): - missing_elem = list(set(produced_messages) - set(consumed_messages)) - logger.error(missing_elem) - - client.close() - except Exception as e: - logger.error("Cannot connect to MongoDB collection.") - raise e - - -async def fetch_action_get_credentials(unit: Unit) -> dict: - """Helper to run an action to fetch connection info. - - Args: - unit: The juju unit on which to run the get_credentials action for credentials - Returns: - A dictionary with the username, password and access info for the service. - """ - action = await unit.run_action(action_name="get-credentials") - result = await action.wait() - return result.results - - -async def kubectl_delete(ops_test: OpsTest, unit: ops.model.Unit, wait: bool = True) -> None: - """Delete the underlying pod for a unit.""" - kubectl_cmd = ( - "microk8s", - "kubectl", - "delete", - "pod", - f"--wait={wait}", - f"-n{ops_test.model_name}", - unit.name.replace("/", "-"), - ) - logger.info(f"Command: {kubectl_cmd}") - ret_code, _, _ = await ops_test.run(*kubectl_cmd) - assert ret_code == 0, "Unit failed to delete" - - -async def scale_application( - ops_test: OpsTest, application_name: str, desired_count: int, wait: bool = True -) -> None: - """Scale a given application to the desired unit count. - - Args: - ops_test: The ops test framework - application_name: The name of the application - desired_count: The number of units to scale to - wait: Boolean indicating whether to wait until units - reach desired count. - """ - if len(ops_test.model.applications[application_name].units) == desired_count: - return - await ops_test.model.applications[application_name].scale(desired_count) - - if desired_count > 0 and wait: - async with ops_test.fast_forward(): - await ops_test.model.wait_for_idle( - apps=[application_name], - status="active", - timeout=15 * 60, - wait_for_exact_units=desired_count, - raise_on_blocked=True, - ) - - assert len(ops_test.model.applications[application_name].units) == desired_count - - -async def get_address(ops_test: OpsTest, app_name, unit_num=0) -> str: - """Get the address for a unit.""" - status = await ops_test.model.get_status() # noqa: F821 - address = status["applications"][app_name]["units"][f"{app_name}/{unit_num}"]["address"] - return address - - -def get_action_parameters(credentials: dict, topic_name: str): - """Construct parameter dictionary needed to stark consumer/producer with the action.""" - logger.info(f"Credentials: {credentials}") - assert "kafka" in credentials - action_data = { - "servers": credentials["kafka"]["endpoints"], - "username": credentials["kafka"]["username"], - "password": credentials["kafka"]["password"], - "topic-name": topic_name, - } - if "consumer-group-prefix" in credentials["kafka"]: - action_data["consumer-group-prefix"] = credentials["kafka"]["consumer-group-prefix"] - return action_data - - -async def fetch_action_start_process(unit: Unit, action_params: dict[str, str]) -> dict: - """Helper to run an action to start consumer/producer. - - Args: - unit: the target unit. - action_params: A dictionary that contains all commands parameters. - - Returns: - A dictionary with the result of the action. - """ - action = await unit.run_action(action_name="start-process", **action_params) - result = await action.wait() - return result.results - - -async def fetch_action_stop_process(unit: Unit) -> dict: - """Helper to run an action to stop consumer/producer. - - Args: - unit: the target unit. - - Returns: - A dictionary with the result of the action. - """ - action = await unit.run_action(action_name="stop-process") - result = await action.wait() - return result.results - - -def get_random_topic() -> str: - """Return a random topic name.""" - return f"topic-{''.join(random.choices(string.ascii_lowercase, k=4))}" - - -def _get_exec_args_params() -> ExecArgs: - if SUBSTRATE == "k8s": - container_arg = "--container kafka" - sudo_arg = "" - bin_cmd = "/opt/kafka/bin/kafka-{sub}.sh" - config_file = "/etc/kafka/client.properties" - else: - container_arg = "" - sudo_arg = "sudo -i" - bin_cmd = "charmed-kafka.{sub}" - config_file = "/var/snap/charmed-kafka/current/etc/kafka/client.properties" - - return ExecArgs(container_arg, sudo_arg, bin_cmd, config_file) - - -def create_topic(model_full_name: str, app_name: str, topic: str) -> None: - """Helper to create a topic. - - Args: - model_full_name: Juju model - app_name: Kafka app name in the Juju model - topic: the desired topic to configure - """ - args = _get_exec_args_params() - try: - check_output( - f"JUJU_MODEL={model_full_name} juju ssh {args.container_arg} {app_name}/leader {args.sudo_arg} " - f"'{args.bin_cmd.format(sub='topics')} --create --topic {topic} --bootstrap-server localhost:{KAFKA_INTERNAL_PORT} " - f"--command-config {args.config_file}'", - stderr=STDOUT, - shell=True, - universal_newlines=True, - ) - - except CalledProcessError as e: - logger.error(f"command '{e.cmd}' return with error (code {e.returncode}): {e.output}") - raise - - -def write_topic_message_size_config( - model_full_name: str, app_name: str, topic: str, size: int -) -> None: - """Helper to configure a topic's message max size. - - Args: - model_full_name: Juju model - app_name: Kafka app name in the Juju model - topic: the desired topic to configure - size: the maximal message size in bytes - """ - args = _get_exec_args_params() - try: - result = check_output( - f"JUJU_MODEL={model_full_name} juju ssh {args.container_arg} {app_name}/leader {args.sudo_arg} " - f"'{args.bin_cmd.format(sub='configs')} --bootstrap-server localhost:{KAFKA_INTERNAL_PORT} " - f"--entity-type topics --entity-name {topic} --alter --add-config max.message.bytes={size} --command-config {args.config_file}'", - stderr=STDOUT, - shell=True, - universal_newlines=True, - ) - - except CalledProcessError as e: - logger.error(f"command '{e.cmd}' return with error (code {e.returncode}): {e.output}") - raise - assert f"Completed updating config for topic {topic}." in result - - -def read_topic_config(model_full_name: str, app_name: str, topic: str) -> str: - """Helper to get a topic's configuration. - - Args: - model_full_name: Juju model - app_name: Kafka app name in the Juju model - topic: the desired topic to read the configuration from - """ - args = _get_exec_args_params() - try: - result = check_output( - f"JUJU_MODEL={model_full_name} juju ssh {args.container_arg} {app_name}/leader {args.sudo_arg} " - f"'{args.bin_cmd.format(sub='configs')} --bootstrap-server localhost:{KAFKA_INTERNAL_PORT} " - f"--entity-type topics --entity-name {topic} --describe --command-config {args.config_file}'", - stderr=PIPE, - shell=True, - universal_newlines=True, - ) - - except CalledProcessError as e: - logger.error(f"command '{e.cmd}' return with error (code {e.returncode}): {e.output}") - raise - return result diff --git a/tests/integration/e2e/literals.py b/tests/integration/e2e/literals.py deleted file mode 100644 index 982430b..0000000 --- a/tests/integration/e2e/literals.py +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env python3 -# Copyright 2023 Canonical Ltd. -# See LICENSE file for licensing details. - -"""Collection of globals common to the Kafka bundle.""" - -INTEGRATOR_CHARM_NAME = "data-integrator" -BUNDLE_BUILD = "build/kafka-k8s-bundle.zip" -TLS_CHARM_NAME = "self-signed-certificates" -KAFKA_CHARM_NAME = "kafka-k8s" -ZOOKEEPER_CHARM_NAME = "zookeeper-k8s" - -TLS_REL_NAME = "certificates" - -DATABASE_CHARM_NAME = "mongodb-k8s" -KAFKA_TEST_APP_CHARM_NAME = "kafka-test-app" -SUBSTRATE = "k8s" - -KAFKA_INTERNAL_PORT = 19092 diff --git a/tests/integration/e2e/test_backup.py b/tests/integration/e2e/test_backup.py deleted file mode 100644 index c320901..0000000 --- a/tests/integration/e2e/test_backup.py +++ /dev/null @@ -1,232 +0,0 @@ -#!/usr/bin/env python3 -# Copyright 2023 Canonical Ltd. -# See LICENSE file for licensing details. - -import asyncio -import json -import logging -import socket - -import boto3 -import pytest -import pytest_microceph -from mypy_boto3_s3.service_resource import Bucket -from pytest_operator.plugin import OpsTest -from tests.integration.e2e.helpers import ( - create_topic, - get_random_topic, - read_topic_config, - write_topic_message_size_config, -) -from tests.integration.e2e.literals import ZOOKEEPER_CHARM_NAME - -logger = logging.getLogger(__name__) - -TOPIC = get_random_topic() -S3_INTEGRATOR = "s3-integrator" -S3_CHANNEL = "latest/stable" - -NON_DEFAULT_TOPIC_SIZE = 123_123 -UPDATED_TOPIC_SIZE = 456_456 - - -@pytest.fixture(scope="session") -def cloud_credentials(microceph: pytest_microceph.ConnectionInformation) -> dict[str, str]: - """Read cloud credentials.""" - return { - "access-key": microceph.access_key_id, - "secret-key": microceph.secret_access_key, - } - - -@pytest.fixture(scope="session") -def cloud_configs(microceph: pytest_microceph.ConnectionInformation): - host_ip = socket.gethostbyname(socket.gethostname()) - return { - "endpoint": f"http://{host_ip}", - "bucket": microceph.bucket, - "path": "mysql", - "region": "", - } - - -@pytest.fixture(scope="function") -def s3_bucket(cloud_credentials, cloud_configs): - - session = boto3.Session( - aws_access_key_id=cloud_credentials["access-key"], - aws_secret_access_key=cloud_credentials["secret-key"], - region_name=cloud_configs["region"] if cloud_configs["region"] else None, - ) - s3 = session.resource("s3", endpoint_url=cloud_configs["endpoint"]) - bucket = s3.Bucket(cloud_configs["bucket"]) - yield bucket - - -@pytest.mark.skip_if_deployed -@pytest.mark.abort_on_fail -async def test_deploy(ops_test: OpsTest, deploy_cluster): - await asyncio.sleep(0) # do nothing, await deploy_cluster - - -@pytest.mark.abort_on_fail -async def test_set_up_deployment( - ops_test: OpsTest, - kafka, - zookeeper, - cloud_configs, - cloud_credentials, - s3_bucket, -): - assert ops_test.model.applications[kafka].status == "active" - assert ops_test.model.applications[zookeeper].status == "active" - await ops_test.model.deploy(S3_INTEGRATOR, channel=S3_CHANNEL) - await ops_test.model.wait_for_idle(apps=[S3_INTEGRATOR], status="blocked", timeout=1000) - - logger.info("Syncing credentials") - - await ops_test.model.applications[S3_INTEGRATOR].set_config(cloud_configs) - leader_unit = ops_test.model.applications[S3_INTEGRATOR].units[0] - - sync_action = await leader_unit.run_action( - "sync-s3-credentials", - **cloud_credentials, - ) - await sync_action.wait() - await ops_test.model.add_relation(zookeeper, S3_INTEGRATOR) - await ops_test.model.wait_for_idle( - apps=[zookeeper, S3_INTEGRATOR], - status="active", - timeout=1000, - ) - - # bucket exists - assert s3_bucket.meta.client.head_bucket(Bucket=s3_bucket.name) - - -@pytest.mark.abort_on_fail -async def test_point_in_time_recovery(ops_test: OpsTest, s3_bucket: Bucket, kafka, zookeeper): - - logger.info("Creating topic") - - create_topic(model_full_name=ops_test.model_full_name, app_name=kafka, topic=TOPIC) - write_topic_message_size_config( - model_full_name=ops_test.model_full_name, - app_name=kafka, - topic=TOPIC, - size=NON_DEFAULT_TOPIC_SIZE, - ) - assert f"max.message.bytes={NON_DEFAULT_TOPIC_SIZE}" in read_topic_config( - model_full_name=ops_test.model_full_name, app_name=kafka, topic=TOPIC - ) - - logger.info("Creating initial backup") - - for unit in ops_test.model.applications[zookeeper].units: - if await unit.is_leader_from_status(): - leader_unit = unit - - create_action = await leader_unit.run_action("create-backup") - await create_action.wait() - - list_action = await leader_unit.run_action("list-backups") - response = await list_action.wait() - - backups = json.loads(response.results.get("backups", "[]")) - assert len(backups) == 1 - - logger.info("Restoring backup") - - write_topic_message_size_config( - model_full_name=ops_test.model_full_name, - app_name=kafka, - topic=TOPIC, - size=UPDATED_TOPIC_SIZE, - ) - - assert f"max.message.bytes={UPDATED_TOPIC_SIZE}" in read_topic_config( - model_full_name=ops_test.model_full_name, app_name=kafka, topic=TOPIC - ) - - backup_to_restore = backups[0]["id"] - list_action = await leader_unit.run_action("restore", **{"backup-id": backup_to_restore}) - await ops_test.model.wait_for_idle( - apps=[zookeeper, kafka], status="active", timeout=1000, idle_period=30 - ) - assert f"max.message.bytes={NON_DEFAULT_TOPIC_SIZE}" in read_topic_config( - model_full_name=ops_test.model_full_name, app_name=kafka, topic=TOPIC - ) - - assert ops_test.model.applications[kafka].status == "active" - assert ops_test.model.applications[zookeeper].status == "active" - - -@pytest.mark.abort_on_fail -async def test_new_cluster_migration(ops_test: OpsTest, s3_bucket: Bucket, kafka, zookeeper): - - status = await ops_test.model.get_status() - - zookeeper_status = status.applications[zookeeper] - - logger.info(f"status: {zookeeper_status}") - - logger.info(f"charm url: {zookeeper_status.charm}") - - revision = int(zookeeper_status.charm.split("-")[-1]) - - data = { - "channel": zookeeper_status.charm_channel, - "revision": revision, - } - - logging.info(f"Fetched current deployment revision: {data}") - - logging.info("Removing and redeploying apps") - - await ops_test.model.applications[zookeeper].remove() - - await ops_test.model.deploy( - ZOOKEEPER_CHARM_NAME, - application_name="new-zk", - num_units=3, - series="jammy", - **data, - ) - await ops_test.model.wait_for_idle(apps=[kafka, "new-zk"], timeout=3600) - - await ops_test.model.add_relation("new-zk", S3_INTEGRATOR) - await ops_test.model.wait_for_idle( - apps=["new-zk", S3_INTEGRATOR], - status="active", - timeout=1000, - ) - - logging.info("Restoring backup") - - for unit in ops_test.model.applications["new-zk"].units: - if await unit.is_leader_from_status(): - leader_unit = unit - - list_action = await leader_unit.run_action("list-backups") - response = await list_action.wait() - - backups = json.loads(response.results.get("backups", "[]")) - assert len(backups) == 1 - - backup_to_restore = backups[0]["id"] - list_action = await leader_unit.run_action("restore", **{"backup-id": backup_to_restore}) - await ops_test.model.wait_for_idle( - apps=["new-zk"], status="active", timeout=1000, idle_period=30 - ) - - await ops_test.model.add_relation(kafka, "new-zk") - await ops_test.model.wait_for_idle( - apps=[kafka, "new-zk"], - idle_period=30, - status="active", - timeout=1200, - ) - - assert f"max.message.bytes={NON_DEFAULT_TOPIC_SIZE}" in read_topic_config( - model_full_name=ops_test.model_full_name, app_name=kafka, topic=TOPIC - ) diff --git a/tests/integration/e2e/test_basic_flow.py b/tests/integration/e2e/test_basic_flow.py deleted file mode 100644 index 66a5498..0000000 --- a/tests/integration/e2e/test_basic_flow.py +++ /dev/null @@ -1,251 +0,0 @@ -#!/usr/bin/env python3 -# Copyright 2023 Canonical Ltd. -# See LICENSE file for licensing details. - -import asyncio -import logging - -import pytest -from literals import DATABASE_CHARM_NAME, KAFKA_CHARM_NAME, ZOOKEEPER_CHARM_NAME -from pytest_operator.plugin import OpsTest -from tests.integration.e2e.helpers import ( - check_produced_and_consumed_messages, - fetch_action_get_credentials, - fetch_action_start_process, - fetch_action_stop_process, - get_action_parameters, - get_address, - get_random_topic, - scale_application, -) - -logger = logging.getLogger(__name__) - -TOPIC = get_random_topic() - - -@pytest.mark.skip_if_deployed -@pytest.mark.abort_on_fail -async def test_deploy(ops_test: OpsTest, deploy_cluster): - await asyncio.sleep(0) # do nothing, await deploy_cluster - - -@pytest.mark.abort_on_fail -async def test_cluster_is_deployed_successfully( - ops_test: OpsTest, kafka, zookeeper, tls, certificates -): - assert ops_test.model.applications[kafka].status == "active" - assert ops_test.model.applications[zookeeper].status == "active" - - if tls: - assert ops_test.model.applications[certificates].status == "active" - - # deploy MongoDB - - await asyncio.gather( - ops_test.model.deploy( - DATABASE_CHARM_NAME, - application_name=DATABASE_CHARM_NAME, - num_units=1, - series="jammy", - channel="5/edge", - ), - ) - await ops_test.model.wait_for_idle( - apps=[KAFKA_CHARM_NAME, ZOOKEEPER_CHARM_NAME, DATABASE_CHARM_NAME], - status="active", - timeout=1200, - idle_period=30, - ) - - -@pytest.mark.abort_on_fail -async def test_test_app_actually_set_up( - ops_test: OpsTest, deploy_test_app, deploy_data_integrator, kafka, integrator -): - producer_parameters = None - consumer_parameters = None - if integrator: - # deploy integrators and get credentials - data_integrator_producer = await deploy_data_integrator( - {"topic-name": TOPIC, "extra-user-roles": "producer"} - ) - - await ops_test.model.add_relation(data_integrator_producer, kafka) - await ops_test.model.wait_for_idle( - apps=[data_integrator_producer, kafka], idle_period=30, status="active", timeout=1800 - ) - - producer_credentials = await fetch_action_get_credentials( - ops_test.model.applications[data_integrator_producer].units[0] - ) - producer_parameters = get_action_parameters(producer_credentials, TOPIC) - - data_integrator_consumer = await deploy_data_integrator( - {"topic-name": TOPIC, "extra-user-roles": "consumer"} - ) - await ops_test.model.add_relation(data_integrator_consumer, kafka) - await ops_test.model.wait_for_idle( - apps=[data_integrator_consumer, kafka], idle_period=30, status="active", timeout=1800 - ) - consumer_credentials = await fetch_action_get_credentials( - ops_test.model.applications[data_integrator_consumer].units[0] - ) - consumer_parameters = get_action_parameters(consumer_credentials, TOPIC) - assert producer_parameters != consumer_parameters - - # deploy producer and consumer - - producer = await deploy_test_app(role="producer", topic_name=TOPIC) - assert ops_test.model.applications[producer].status == "active" - - if integrator: - # start producer with action - assert producer_parameters - pid = await fetch_action_start_process( - ops_test.model.applications[producer].units[0], producer_parameters - ) - logger.info(f"Producer process started with pid: {pid}") - else: - # Relate with Kafka and automatically start producer - await ops_test.model.add_relation(producer, kafka) - await ops_test.model.wait_for_idle( - apps=[producer, kafka], idle_period=30, status="active", timeout=1800 - ) - logger.info(f"Producer {producer} related to Kafka") - - consumer = await deploy_test_app(role="consumer", topic_name=TOPIC) - assert ops_test.model.applications[consumer].status == "active" - - if integrator: - # start consumer with action - assert consumer_parameters - pid = await fetch_action_start_process( - ops_test.model.applications[consumer].units[0], consumer_parameters - ) - logger.info(f"Consumer process started with pid: {pid}") - else: - # Relate with Kafka and automatically start consumer - await ops_test.model.add_relation(consumer, kafka) - await ops_test.model.wait_for_idle( - apps=[consumer, kafka], idle_period=30, status="active", timeout=1800 - ) - logger.info(f"Consumer {consumer} related to Kafka") - - await asyncio.sleep(100) - - # scale up producer - logger.info("Scale up producer") - await ops_test.model.applications[producer].add_units(count=2) - await ops_test.model.block_until(lambda: len(ops_test.model.applications[producer].units) == 3) - await ops_test.model.wait_for_idle( - apps=[producer], status="active", timeout=1000, idle_period=40 - ) - if integrator: - # start producer process on new units - assert producer_parameters - pid_1 = await fetch_action_start_process( - ops_test.model.applications[producer].units[1], producer_parameters - ) - logger.info(f"Producer process started with pid: {pid_1}") - pid_2 = await fetch_action_start_process( - ops_test.model.applications[producer].units[2], producer_parameters - ) - logger.info(f"Producer process started with pid: {pid_2}") - - await asyncio.sleep(100) - - # scale up consumer - logger.info("Scale up consumer") - await ops_test.model.applications[consumer].add_units(count=2) - await ops_test.model.block_until(lambda: len(ops_test.model.applications[consumer].units) == 3) - await ops_test.model.wait_for_idle( - apps=[consumer], status="active", timeout=1000, idle_period=40 - ) - - if integrator: - # start consumer process on new units - assert consumer_parameters - pid_1 = await fetch_action_start_process( - ops_test.model.applications[consumer].units[1], consumer_parameters - ) - logger.info(f"Consumer process started with pid: {pid_1}") - pid_2 = await fetch_action_start_process( - ops_test.model.applications[consumer].units[2], consumer_parameters - ) - logger.info(f"Consumer process started with pid: {pid_2}") - - await asyncio.sleep(100) - - # skip scale down for the moment due the scale down bug in juju: https://bugs.launchpad.net/juju/+bug/1977582 - - logger.info("Scale down") - await scale_application(ops_test, application_name=producer, desired_count=1) - await scale_application(ops_test, application_name=consumer, desired_count=1) - - await ops_test.model.block_until( - lambda: len(ops_test.model.applications[consumer].units) == 1, timeout=1000 - ) - await ops_test.model.block_until( - lambda: len(ops_test.model.applications[producer].units) == 1, timeout=1000 - ) - - await ops_test.model.wait_for_idle(apps=[consumer, producer], status="active", timeout=1000) - - logger.info("End scale down") - - # Stop producers first - if integrator: - await fetch_action_stop_process(ops_test.model.applications[producer].units[0]) - else: - await ops_test.model.applications[producer].remove_relation( - f"{producer}:kafka-cluster", f"{kafka}" - ) - - await asyncio.sleep(10) - - # Then stop consumers - if integrator: - await fetch_action_stop_process(ops_test.model.applications[consumer].units[0]) - else: - await ops_test.model.applications[consumer].remove_relation( - f"{consumer}:kafka-cluster", f"{kafka}" - ) - - await asyncio.sleep(30) - - # destroy producer and consumer during teardown. - logger.info("End of the test!") - - -@pytest.mark.abort_on_fail -async def test_consumed_messages(ops_test: OpsTest, deploy_data_integrator): - - # get mongodb credentials - mongo_integrator = await deploy_data_integrator({"database-name": TOPIC}) - - await ops_test.model.add_relation(mongo_integrator, DATABASE_CHARM_NAME) - await ops_test.model.wait_for_idle( - apps=[mongo_integrator, DATABASE_CHARM_NAME], idle_period=30, status="active", timeout=1800 - ) - - credentials = await fetch_action_get_credentials( - ops_test.model.applications[mongo_integrator].units[0] - ) - - logger.info(f"Credentials: {credentials}") - - uris = credentials["mongodb"]["uris"] - - address = await get_address(ops_test=ops_test, app_name=DATABASE_CHARM_NAME) - - hostname = "mongodb-k8s-0.mongodb-k8s-endpoints" - - uri = str(uris).replace(hostname, address) - - check_produced_and_consumed_messages(uri, TOPIC) - - await ops_test.model.applications[DATABASE_CHARM_NAME].remove() - await ops_test.model.wait_for_idle( - apps=[mongo_integrator], idle_period=10, status="blocked", timeout=1800 - ) diff --git a/tests/integration/e2e/test_e2e.py b/tests/integration/e2e/test_e2e.py deleted file mode 100644 index c43f307..0000000 --- a/tests/integration/e2e/test_e2e.py +++ /dev/null @@ -1,64 +0,0 @@ -#!/usr/bin/env python3 -# Copyright 2023 Canonical Ltd. -# See LICENSE file for licensing details. - -import asyncio -import logging - -import pytest -from pytest_operator.plugin import OpsTest -from tests.integration.e2e.literals import KAFKA_CHARM_NAME - -logger = logging.getLogger(__name__) - - -@pytest.mark.skip_if_deployed -@pytest.mark.abort_on_fail -async def test_deploy(ops_test: OpsTest, deploy_cluster): - await asyncio.sleep(0) # do nothing, await deploy_cluster - - -@pytest.mark.abort_on_fail -async def test_cluster_is_deployed_successfully( - ops_test: OpsTest, kafka, zookeeper, tls, certificates -): - assert ops_test.model.applications[kafka].status == "active" - assert ops_test.model.applications[zookeeper].status == "active" - - if tls: - assert ops_test.model.applications[certificates].status == "active" - - -@pytest.mark.abort_on_fail -async def test_clients_actually_set_up(ops_test: OpsTest, deploy_data_integrator, kafka): - producer = await deploy_data_integrator( - {"extra-user-roles": "producer", "topic-name": "test-topic"} - ) - consumer = await deploy_data_integrator( - {"extra-user-roles": "producer", "topic-name": "test-topic"} - ) - - await ops_test.model.add_relation(producer, KAFKA_CHARM_NAME) - await ops_test.model.wait_for_idle( - apps=[producer, KAFKA_CHARM_NAME], idle_period=30, status="active", timeout=1800 - ) - - await ops_test.model.add_relation(consumer, KAFKA_CHARM_NAME) - await ops_test.model.wait_for_idle( - apps=[consumer, KAFKA_CHARM_NAME], idle_period=30, status="active", timeout=1800 - ) - - assert ops_test.model.applications[consumer].status == "active" - assert ops_test.model.applications[producer].status == "active" - - await ops_test.model.applications[consumer].remove_relation(f"{consumer}:kafka", f"{kafka}") - await ops_test.model.wait_for_idle(apps=[consumer, kafka], idle_period=10) - - await ops_test.model.applications[producer].remove_relation(f"{producer}:kafka", f"{kafka}") - await ops_test.model.wait_for_idle(apps=[producer, kafka], idle_period=10) - - -@pytest.mark.abort_on_fail -async def test_clients_actually_tear_down_after_test_exit(ops_test: OpsTest): - assert "consumer" not in "".join(ops_test.model.applications.keys()) - assert "producer" not in "".join(ops_test.model.applications.keys()) diff --git a/tests/integration/e2e/test_password_rotation.py b/tests/integration/e2e/test_password_rotation.py deleted file mode 100644 index b1fd6f5..0000000 --- a/tests/integration/e2e/test_password_rotation.py +++ /dev/null @@ -1,276 +0,0 @@ -#!/usr/bin/env python3 -# Copyright 2023 Canonical Ltd. -# See LICENSE file for licensing details. - -import asyncio -import logging - -import pytest -from literals import DATABASE_CHARM_NAME, KAFKA_CHARM_NAME, ZOOKEEPER_CHARM_NAME -from pytest_operator.plugin import OpsTest -from tests.integration.e2e.helpers import ( - check_produced_and_consumed_messages, - fetch_action_get_credentials, - fetch_action_start_process, - fetch_action_stop_process, - get_action_parameters, - get_address, - get_random_topic, -) - -logger = logging.getLogger(__name__) - -TOPIC = get_random_topic() - - -@pytest.mark.skip_if_deployed -@pytest.mark.abort_on_fail -async def test_deploy(ops_test: OpsTest, deploy_cluster): - await asyncio.sleep(0) # do nothing, await deploy_cluster - - -@pytest.mark.abort_on_fail -async def test_cluster_is_deployed_successfully( - ops_test: OpsTest, kafka, zookeeper, tls, certificates -): - assert ops_test.model.applications[kafka].status == "active" - assert ops_test.model.applications[zookeeper].status == "active" - - if tls: - assert ops_test.model.applications[certificates].status == "active" - - # deploy MongoDB - - await asyncio.gather( - ops_test.model.deploy( - DATABASE_CHARM_NAME, - application_name=DATABASE_CHARM_NAME, - num_units=1, - series="jammy", - channel="5/edge", - ), - ) - await ops_test.model.wait_for_idle( - apps=[KAFKA_CHARM_NAME, ZOOKEEPER_CHARM_NAME, DATABASE_CHARM_NAME], status="active" - ) - - -@pytest.mark.abort_on_fail -async def test_test_app_actually_set_up( - ops_test: OpsTest, deploy_test_app, deploy_data_integrator, kafka, integrator -): - # producer credentials - producer_parameters_1 = None - producer_parameters_2 = None - # consumer credentials - consumer_parameters_1 = None - consumer_parameters_2 = None - - if integrator: - # get credentials for producers and consumers - data_integrator_producer_1 = await deploy_data_integrator( - {"topic-name": TOPIC, "extra-user-roles": "producer"} - ) - await ops_test.model.add_relation(data_integrator_producer_1, kafka) - await ops_test.model.wait_for_idle( - apps=[data_integrator_producer_1, kafka], idle_period=30, status="active", timeout=1800 - ) - producer_credentials_1 = await fetch_action_get_credentials( - ops_test.model.applications[data_integrator_producer_1].units[0] - ) - producer_parameters_1 = get_action_parameters(producer_credentials_1, TOPIC) - data_integrator_producer_2 = await deploy_data_integrator( - {"topic-name": TOPIC, "extra-user-roles": "producer"} - ) - await ops_test.model.add_relation(data_integrator_producer_2, kafka) - await ops_test.model.wait_for_idle( - apps=[data_integrator_producer_2, kafka], idle_period=30, status="active", timeout=1800 - ) - producer_credentials_2 = await fetch_action_get_credentials( - ops_test.model.applications[data_integrator_producer_2].units[0] - ) - producer_parameters_2 = get_action_parameters(producer_credentials_2, TOPIC) - - assert producer_parameters_2 != producer_parameters_1 - - data_integrator_consumer_1 = await deploy_data_integrator( - {"topic-name": TOPIC, "extra-user-roles": "consumer", "consumer-group-prefix": "cg"} - ) - await ops_test.model.add_relation(data_integrator_consumer_1, kafka) - await ops_test.model.wait_for_idle( - apps=[data_integrator_consumer_1, kafka], idle_period=30, status="active", timeout=1800 - ) - consumer_credentials_1 = await fetch_action_get_credentials( - ops_test.model.applications[data_integrator_consumer_1].units[0] - ) - consumer_parameters_1 = get_action_parameters(consumer_credentials_1, TOPIC) - data_integrator_consumer_2 = await deploy_data_integrator( - {"topic-name": TOPIC, "extra-user-roles": "consumer", "consumer-group-prefix": "cg"} - ) - await ops_test.model.add_relation(data_integrator_consumer_2, kafka) - await ops_test.model.wait_for_idle( - apps=[data_integrator_consumer_2, kafka], idle_period=30, status="active", timeout=1800 - ) - consumer_credentials_2 = await fetch_action_get_credentials( - ops_test.model.applications[data_integrator_consumer_2].units[0] - ) - consumer_parameters_2 = get_action_parameters(consumer_credentials_2, TOPIC) - - assert consumer_parameters_2 != consumer_parameters_1 - - producer_1 = await deploy_test_app(role="producer", topic_name=TOPIC, num_messages=2500) - assert ops_test.model.applications[producer_1].status == "active" - - if integrator: - # start producer - assert producer_parameters_1 - pid = await fetch_action_start_process( - ops_test.model.applications[producer_1].units[0], producer_parameters_1 - ) - logger.info(f"Producer process started with pid: {pid}") - else: - # Relate with Kafka and automatically start first producer - await ops_test.model.add_relation(producer_1, kafka) - await ops_test.model.wait_for_idle( - apps=[producer_1, kafka], idle_period=30, status="active", timeout=1800 - ) - logger.info(f"Producer {producer_1} related to Kafka") - - consumer_1 = await deploy_test_app( - role="consumer", topic_name=TOPIC, consumer_group_prefix="cg" - ) - assert ops_test.model.applications[consumer_1].status == "active" - - if integrator: - # start consumer - assert consumer_parameters_1 - pid = await fetch_action_start_process( - ops_test.model.applications[consumer_1].units[0], consumer_parameters_1 - ) - logger.info(f"Consumer process started with pid: {pid}") - else: - # Relate with Kafka and automatically start first consumer - await ops_test.model.add_relation(consumer_1, kafka) - await ops_test.model.wait_for_idle( - apps=[consumer_1, kafka], idle_period=30, status="active", timeout=1800 - ) - logger.info(f"Consumer {consumer_1} related to Kafka") - - await asyncio.sleep(100) - - # deploy second consumer - - consumer_2 = await deploy_test_app( - role="consumer", topic_name=TOPIC, consumer_group_prefix="cg" - ) - assert ops_test.model.applications[consumer_2].status == "active" - - if integrator: - assert consumer_parameters_2 - # start second consumer - pid = await fetch_action_start_process( - ops_test.model.applications[consumer_2].units[0], consumer_parameters_2 - ) - logger.info(f"Consumer process started with pid: {pid}") - else: - # Relate with Kafka and automatically start second consumer - await ops_test.model.add_relation(consumer_2, kafka) - await ops_test.model.wait_for_idle( - apps=[consumer_2, kafka], idle_period=30, status="active", timeout=1800 - ) - logger.info(f"Consumer {consumer_2} related to Kafka") - - await asyncio.sleep(100) - - # remove first consumer - if integrator: - pid = await fetch_action_stop_process(ops_test.model.applications[consumer_1].units[0]) - logger.info(f"Consumer 1 process stopped with pid: {pid}") - else: - await ops_test.model.applications[consumer_1].remove_relation( - f"{consumer_1}:kafka-cluster", f"{kafka}" - ) - await ops_test.model.wait_for_idle(apps=[consumer_1, kafka], idle_period=10) - logger.info(f"Consumer {consumer_1} unrelate from Kafka") - - await ops_test.model.wait_for_idle( - apps=[KAFKA_CHARM_NAME], idle_period=10, status="active", timeout=1800 - ) - await asyncio.sleep(100) - - # deploy new producer - - producer_2 = await deploy_test_app(role="producer", topic_name=TOPIC, num_messages=2000) - assert ops_test.model.applications[producer_2].status == "active" - if integrator: - assert producer_parameters_2 - # start second producer - pid = await fetch_action_start_process( - ops_test.model.applications[producer_2].units[0], producer_parameters_2 - ) - logger.info(f"Producer process started with pid: {pid}") - else: - # Relate with Kafka and automatically start first producer - await ops_test.model.add_relation(producer_2, kafka) - await ops_test.model.wait_for_idle( - apps=[producer_2, kafka], idle_period=30, status="active", timeout=1800 - ) - logger.info(f"Producer {producer_2} related to Kafka") - - await asyncio.sleep(100) - - # destroy producer and consumer during teardown. - - if integrator: - # stop process - pid = await fetch_action_stop_process(ops_test.model.applications[producer_2].units[0]) - logger.info(f"Producer process stopped with pid: {pid}") - pid = await fetch_action_stop_process(ops_test.model.applications[producer_1].units[0]) - logger.info(f"Producer process stopped with pid: {pid}") - - await asyncio.sleep(60) - else: - # stop producers - await ops_test.model.applications[producer_1].remove_relation( - f"{producer_1}:kafka-cluster", f"{kafka}" - ) - await ops_test.model.wait_for_idle(apps=[producer_1, kafka], idle_period=10) - logger.info(f"Producer {producer_1} unrelate from Kafka") - await ops_test.model.applications[producer_2].remove_relation( - f"{producer_2}:kafka-cluster", f"{kafka}" - ) - await ops_test.model.wait_for_idle(apps=[producer_2, kafka], idle_period=10) - logger.info(f"Producer {producer_2} unrelate from Kafka") - - # destroy producer and consumer during teardown. - - -@pytest.mark.abort_on_fail -async def test_consumed_messages(ops_test: OpsTest, deploy_data_integrator): - - # get mongodb credentials - mongo_integrator = await deploy_data_integrator({"database-name": TOPIC}) - - await ops_test.model.add_relation(mongo_integrator, DATABASE_CHARM_NAME) - await ops_test.model.wait_for_idle( - apps=[mongo_integrator, DATABASE_CHARM_NAME], idle_period=30, status="active", timeout=1800 - ) - - credentials = await fetch_action_get_credentials( - ops_test.model.applications[mongo_integrator].units[0] - ) - - logger.info(f"Credentials: {credentials}") - - uris = credentials["mongodb"]["uris"] - address = await get_address(ops_test=ops_test, app_name=DATABASE_CHARM_NAME) - - hostname = "mongodb-k8s-0.mongodb-k8s-endpoints" - - uri = str(uris).replace(hostname, address) - check_produced_and_consumed_messages(uri, TOPIC) - - await ops_test.model.applications[DATABASE_CHARM_NAME].remove() - await ops_test.model.wait_for_idle( - apps=[mongo_integrator], idle_period=10, status="blocked", timeout=1800 - ) diff --git a/tests/integration/terraform/__init__.py b/tests/integration/terraform/__init__.py new file mode 100644 index 0000000..7136cf6 --- /dev/null +++ b/tests/integration/terraform/__init__.py @@ -0,0 +1,3 @@ +#!/usr/bin/env python3 +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. diff --git a/tests/integration/terraform/component_validation.py b/tests/integration/terraform/component_validation.py new file mode 100644 index 0000000..b6d7964 --- /dev/null +++ b/tests/integration/terraform/component_validation.py @@ -0,0 +1,336 @@ +#!/usr/bin/env python3 +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Tests specific functionality of each component.""" + +import json +import logging +import re +from functools import cached_property +from subprocess import PIPE, CalledProcessError, check_output +from uuid import uuid4 + +import jubilant +import requests +from tests.integration.terraform.helpers import ( + CA_FILE, + CONNECT_API_PORT, + CONNECT_APP_NAME, + KAFKA_BROKER_APP_NAME, + KAFKA_INTERNAL_PORT, + KAFKA_UI_APP_NAME, + KAFKA_UI_SECRET_KEY, + KARAPACE_APP_NAME, + KARAPACE_PORT, + check_socket, + get_secret_by_label, +) + +logger = logging.getLogger(__name__) + + +class ComponentValidation: + """Test all Kafka ecosystem components functionality.""" + + def __init__(self, juju: jubilant.Juju, tls: bool = False): + self.juju = juju + self.model = juju.model + self.tls = tls + + @cached_property + def kafka_unit_name(self) -> str: + """Get an available Kafka unit.""" + return next(iter(self.juju.status().get_units(app=KAFKA_BROKER_APP_NAME).keys())) + + @cached_property + def connect_unit_name(self) -> str: + """Get an available Connect unit.""" + return next(iter(self.juju.status().get_units(app=CONNECT_APP_NAME).keys())) + + @cached_property + def karapace_unit_name(self) -> str: + """Get an available Karapace unit.""" + return next(iter(self.juju.status().get_units(app=KARAPACE_APP_NAME).keys())) + + @cached_property + def ui_unit_name(self) -> str: + """Get an available Kafka UI unit.""" + return next(iter(self.juju.status().get_units(app=KAFKA_UI_APP_NAME).keys())) + + def test_kafka_admin_operations(self): + """Test basic Kafka admin operations. + + Creates `test` topic and adds ACLs for principal `User:*`. + """ + bootstrap_server = self.get_kafka_bootstrap_server() + _ = check_output( + f"JUJU_MODEL={self.model} juju ssh --container kafka {self.kafka_unit_name} '/opt/kafka/bin/kafka-topics.sh --bootstrap-server {bootstrap_server} --command-config /etc/kafka/client.properties -create -topic test'", + stderr=PIPE, + shell=True, + universal_newlines=True, + ) + + _ = check_output( + f"JUJU_MODEL={self.model} juju ssh --container kafka {self.kafka_unit_name} '/opt/kafka/bin/kafka-acls.sh --bootstrap-server {bootstrap_server} --add --allow-principal=User:* --operation READ --operation WRITE --operation CREATE --topic test --command-config /etc/kafka/client.properties'", + stderr=PIPE, + shell=True, + universal_newlines=True, + ) + + _ = check_output( + f"JUJU_MODEL={self.model} juju ssh --container kafka {self.kafka_unit_name} '/opt/kafka/bin/kafka-topics.sh --bootstrap-server {bootstrap_server} --command-config /etc/kafka/client.properties -delete -topic test'", + stderr=PIPE, + shell=True, + universal_newlines=True, + ) + + def test_kafka_producer_consumer(self): + """Test Kafka producer and consumer operations using charmed-kafka CLI tools.""" + test_topic = f"test-topic-{uuid4().hex[:8]}" + test_message = f"test-message-{uuid4().hex}" + bootstrap_server = self.get_kafka_bootstrap_server() + + try: + # Create topic using charmed-kafka.topics + _ = check_output( + f"JUJU_MODEL={self.model} juju ssh --container kafka {self.kafka_unit_name} '/opt/kafka/bin/kafka-topics.sh --bootstrap-server {bootstrap_server} --command-config /etc/kafka/client.properties --create --topic {test_topic} --partitions 1 --replication-factor 1'", + stderr=PIPE, + shell=True, + universal_newlines=True, + ) + + # Produce message using charmed-kafka.console-producer + check_output( + f"JUJU_MODEL={self.model} juju ssh --container kafka {self.kafka_unit_name} 'echo \"{test_message}\" | /opt/kafka/bin/kafka-console-producer.sh --bootstrap-server {bootstrap_server} --producer.config /etc/kafka/client.properties --topic {test_topic}'", + stderr=PIPE, + shell=True, + universal_newlines=True, + ) + + output = check_output( + f"JUJU_MODEL={self.model} juju ssh --container kafka {self.kafka_unit_name} 'timeout 10 /opt/kafka/bin/kafka-console-consumer.sh --bootstrap-server {bootstrap_server} --consumer.config /etc/kafka/client.properties --topic {test_topic} --from-beginning --max-messages 1'", + stderr=PIPE, + shell=True, + universal_newlines=True, + ) + + # Verify message was consumed + assert test_message in output.strip() + + finally: + # Clean up topic + try: + check_output( + f"JUJU_MODEL={self.model} juju ssh --container kafka {self.kafka_unit_name} '/opt/kafka/bin/kafka-topics.sh --bootstrap-server {bootstrap_server} --command-config /etc/kafka/client.properties --delete --topic {test_topic}'", + stderr=PIPE, + shell=True, + universal_newlines=True, + ) + except CalledProcessError: + # Ignore cleanup errors + pass + + def test_karapace(self): + """Test creating a schema subject in Karapace, listing it and then deletes it.""" + schema_name = "test-key" + result = self.juju.run(unit=self.karapace_unit_name, action="get-password") + password = result.results.get("password") + karapace_endpoint = self.get_karapace_endpoint() + base_url = f"http://{karapace_endpoint}" + auth = ("operator", password) + + # Create the schema + schema_data = { + "schema": '{"type": "record", "name": "Obj", "fields":[{"name": "age", "type": "int"}]}' + } + + response = requests.post( + f"{base_url}/subjects/{schema_name}/versions", + json=schema_data, + headers={"Content-Type": "application/vnd.schemaregistry.v1+json"}, + auth=auth, + ) + response.raise_for_status() + result = response.text + assert '{"id":1}' in result + + # Listing it + expected_schema = f'["{schema_name}"]' + + logger.info("Requesting schemas") + response = requests.get( + f"{base_url}/subjects", + auth=auth, + ) + response.raise_for_status() + result = response.text + assert expected_schema in result + + # Deleting the schema + logger.info("Deleting schema") + response = requests.delete( + f"{base_url}/subjects/{schema_name}", + auth=auth, + ) + response.raise_for_status() + + def test_connect_endpoints(self): + """Test Kafka Connect health.""" + connect_address = self.get_unit_ipv4_address(self.connect_unit_name) + status = check_socket(connect_address, CONNECT_API_PORT) + + # assert all endpoints are up + assert status + + def test_create_mm2_connector(self): + """Test creating a basic MM2 (MirrorMaker 2) connector.""" + connector_name = "mm2-test" + connect_endpoint = self.get_connect_endpoint() + connect_password = self._get_connect_admin_password() + + # TLS setup + protocol = "https" if self.tls else "http" + base_url = f"{protocol}://{connect_endpoint}" + verify = CA_FILE if self.tls else False + + # Basic MM2 connector configuration + mm2_config = { + "name": connector_name, + "config": { + "connector.class": "org.apache.kafka.connect.mirror.MirrorSourceConnector", + "source.cluster.alias": "source", + "target.cluster.alias": "target", + "source.cluster.bootstrap.servers": "localhost:9092", + "target.cluster.bootstrap.servers": "localhost:9092", + "topics": "test.*", + "groups": "test-group", + "replication.factor": 1, + "checkpoints.topic.replication.factor": 1, + "heartbeats.topic.replication.factor": 1, + "offset-syncs.topic.replication.factor": 1, + "sync.topic.acls.enabled": "false", + }, + } + + # Create connector + response = requests.post( + f"{base_url}/connectors", + json=mm2_config, + headers={"Content-Type": "application/json"}, + auth=("admin", connect_password), + verify=verify, + ) + + assert response.status_code in [200, 201, 400, 409] + + if response.status_code in [200, 201]: + requests.delete( + f"{base_url}/connectors/{connector_name}", + auth=("admin", connect_password), + verify=verify, + ) + + def test_ui_accessibility(self): + """Test that Kafka UI is accessible.""" + # Get LoadBalancer IP address + raw = check_output( + "microk8s.kubectl get services -A -o json", + shell=True, + universal_newlines=True, + stderr=PIPE, + ) + lb_json = json.loads(raw) + lb_ip = None + + for item in lb_json["items"]: + if item.get("spec", {}).get("type") == "LoadBalancer": + lb_ip = item["status"]["loadBalancer"]["ingress"][0]["ip"] + break + + if not lb_ip: + raise Exception("Can't find LoadBalancer external IP") + + # Generate the URL for Kafka UI based on LB IP address + url = f"https://{lb_ip}/{self.juju.model}-{KAFKA_UI_APP_NAME}" + + secret_data = get_secret_by_label( + self.model, f"cluster.{KAFKA_UI_APP_NAME}.app", owner=KAFKA_UI_APP_NAME + ) + password = secret_data.get(KAFKA_UI_SECRET_KEY) + + if not password: + raise Exception("Can't fetch the admin user's password.") + + login_resp = requests.post( + f"{url}/login", + headers={"Content-Type": "application/x-www-form-urlencoded"}, + data={"username": "admin", "password": password}, + verify=False, + ) + assert login_resp.status_code == 200 + # Successful login would lead to a redirect + assert len(login_resp.history) > 0 + + cookies = login_resp.history[0].cookies + clusters_resp = requests.get( + f"{url}/api/clusters", + headers={"Content-Type": "application/json"}, + cookies=cookies, + verify=False, + ) + + clusters_json = clusters_resp.json() + logger.info(f"{clusters_json=}") + assert len(clusters_json) > 0 + assert clusters_json[0].get("status") == "online" + + def get_kafka_bootstrap_server(self) -> str | None: + """Get the Kafka bootstrap server address.""" + return f"{self.get_unit_ipv4_address(self.kafka_unit_name)}:{KAFKA_INTERNAL_PORT}" + + def get_karapace_endpoint(self) -> str | None: + """Get the Karapace endpoint address.""" + return f"{self.get_unit_ipv4_address(self.karapace_unit_name)}:{KARAPACE_PORT}" + + def get_connect_endpoint(self) -> str | None: + """Get the Connect endpoint address.""" + return f"{self.get_unit_ipv4_address(self.connect_unit_name)}:{CONNECT_API_PORT}" + + def get_unit_ipv4_address(self, unit_name: str) -> str | None: + """A safer alternative for `juju.unit.get_public_address()` which is robust to network changes.""" + try: + stdout = check_output( + f"JUJU_MODEL={self.model} juju ssh {unit_name} hostname -i", + stderr=PIPE, + shell=True, + universal_newlines=True, + ) + except CalledProcessError: + return None + + ipv4_matches = re.findall(r"[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}", stdout) + + if ipv4_matches: + return ipv4_matches[0] + + return None + + def _get_connect_admin_password(self) -> str: + """Get admin user's password of a unit by reading credentials file.""" + password_path = "/etc/connect/connect.password" + res = check_output( + f"JUJU_MODEL={self.model} juju ssh --container kafka-connect {self.connect_unit_name} 'cat {password_path}'", + shell=True, + universal_newlines=True, + ) + raw = res.strip().split("\n") + + if not raw: + raise Exception("Unable to read the Connect credentials file.") + + for line in raw: + if line.startswith("admin"): + return line.split(":")[-1].strip() + + raise Exception("Admin user not defined in the Connect credentials file.") diff --git a/tests/integration/terraform/conftest.py b/tests/integration/terraform/conftest.py new file mode 100644 index 0000000..1ebd0a2 --- /dev/null +++ b/tests/integration/terraform/conftest.py @@ -0,0 +1,173 @@ +#!/usr/bin/env python3 +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +import json +import typing + +import jubilant +import pytest +from tests.integration.terraform.helpers import ( + CA_FILE, + CERTIFICATES_APP_NAME, + CORE_MODEL_NAME, + TRAEFIK_APP_NAME, + TerraformDeployer, + all_active_idle, + get_app_list, + get_terraform_config, +) + +KRaftMode = typing.Literal["single", "multi"] + + +def pytest_addoption(parser): + """Defines pytest parsers.""" + parser.addoption( + "--model", + action="store", + help="Juju model to use; if not provided, a new model " + "will be created for each test which requires one", + ) + parser.addoption( + "--keep-models", + action="store_true", + help="Keep models handled by opstest, can be overridden in track_model", + ) + parser.addoption( + "--kraft-mode", + action="store", + help="KRaft mode to run the tests, 'single' or 'multi'", + default="single", + ) + parser.addoption( + "--ingress-offer", + action="store", + help="The ingress offer URL to use for deployment. If not provided," + " Traefik K8s operator is deployed in the core model.", + default="", + ) + + +@pytest.fixture(scope="module") +def kraft_mode(request: pytest.FixtureRequest) -> KRaftMode: + """Returns the KRaft mode which is used to run the tests, should be either `single` or `multi`.""" + mode = f'{request.config.getoption("--kraft-mode")}' or "single" + if mode not in ("single", "multi"): + raise Exception("Unknown --kraft-mode, valid options are 'single' and 'multi'") + + return mode + + +@pytest.fixture(scope="module") +def ingress_offer( + request: pytest.FixtureRequest, +) -> str | None: + offer = ( + f'{request.config.getoption("--ingress-offer")}' + or f"admin/{CORE_MODEL_NAME}.{TRAEFIK_APP_NAME}" + ) + return offer + + +# -- Terraform -- + + +@pytest.fixture() +def deploy_cluster( + juju: jubilant.Juju, model_uuid: str, kraft_mode: KRaftMode, ingress_offer: str +): + """Deploy the cluster in single mode.""" + terraform_deployer = TerraformDeployer(model_uuid) + + # Ensure cleanup of any previous state + terraform_deployer.cleanup() + + config = get_terraform_config(split_mode=(kraft_mode == "multi")) + config["ingress_offer"] = ingress_offer.split(":")[-1] # Remove the controller: prefix + tfvars_file = terraform_deployer.create_tfvars(config) + + terraform_deployer.terraform_init() + terraform_deployer.terraform_apply(tfvars_file) + + +@pytest.fixture() +def enable_terraform_tls(model_uuid: str, kraft_mode: KRaftMode, ingress_offer: str): + """Deploy a tls endpoint and update terraform.""" + core_juju = jubilant.Juju(model=CORE_MODEL_NAME) + + # Store the CA cert for requests + result = core_juju.run(f"{CERTIFICATES_APP_NAME}/0", "get-ca-certificate") + ca = result.results.get("ca-certificate") + open(CA_FILE, "w").write(ca) + + terraform_deployer = TerraformDeployer(model_uuid) + config = get_terraform_config(enable_tls=True, split_mode=(kraft_mode == "multi")) + config["ingress_offer"] = ingress_offer.split(":")[-1] # Remove the controller: prefix + tfvars_file = terraform_deployer.create_tfvars(config) + + terraform_deployer.terraform_apply(tfvars_file) + + +@pytest.fixture() +def disable_terraform_tls(juju: jubilant.Juju, model_uuid: str, kraft_mode): + """Remove the tls endpoint and update terraform.""" + terraform_deployer = TerraformDeployer(model_uuid) + config = get_terraform_config(enable_tls=False, split_mode=(kraft_mode == "multi")) + tfvars_file = terraform_deployer.create_tfvars(config) + + terraform_deployer.terraform_apply(tfvars_file) + + juju.wait( + lambda status: all_active_idle(status, *get_app_list(kraft_mode)), + delay=5, + successes=6, + timeout=1800, + ) + + juju.destroy_model(model=CORE_MODEL_NAME, force=True) + + +# -- Jubilant -- + + +@pytest.fixture(scope="module") +def juju(request: pytest.FixtureRequest): + model = request.config.getoption("--model") + keep_models = typing.cast(bool, request.config.getoption("--keep-models")) + + if model is None: + with jubilant.temp_model(keep=keep_models) as juju: + juju.wait_timeout = 10 * 60 + juju.model_config({"update-status-hook-interval": "180s"}) + yield juju + + log = juju.debug_log(limit=1000) + else: + juju = jubilant.Juju(model=model) + yield juju + log = juju.debug_log(limit=1000) + + if request.session.testsfailed: + print(log, end="") + + +@pytest.fixture(scope="module") +def model_uuid(juju: jubilant.Juju) -> str: + return next( + iter( + mdl["model-uuid"] + for mdl in json.loads(juju.cli("models", "--format", "json", include_model=False))[ + "models" + ] + if mdl["short-name"] == juju.model + ) + ) + + +@pytest.fixture(scope="module") +def models(juju: jubilant.Juju) -> set[str]: + return { + m["short-name"] + for m in json.loads(juju.cli("models", "--format", "json", include_model=False))["models"] + } diff --git a/tests/integration/terraform/helpers.py b/tests/integration/terraform/helpers.py new file mode 100644 index 0000000..46f2884 --- /dev/null +++ b/tests/integration/terraform/helpers.py @@ -0,0 +1,302 @@ +#!/usr/bin/env python3 +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Terraform deployment helpers for integration tests.""" + +import json +import logging +import shutil +import socket +import subprocess +import tempfile +from contextlib import closing +from pathlib import Path +from typing import Any, Dict, Optional + +import jubilant +import yaml + +logger = logging.getLogger(__name__) + + +KAFKA_INTERNAL_PORT = 19093 +KARAPACE_PORT = 8081 +KAFKA_UI_PORT = 8080 +KAFKA_UI_PROTO = "https" +CONNECT_API_PORT = 8083 + +CONNECT_APP_NAME = "kafka-connect" +KARAPACE_APP_NAME = "karapace" +KAFKA_UI_APP_NAME = "kafka-ui" +KAFKA_BROKER_APP_NAME = "kafka-broker" +KAFKA_CONTROLLER_APP_NAME = "kafka-controller" + +CERTIFICATES_APP_NAME = "self-signed-certificates" +CORE_MODEL_NAME = "test-core" +TLS_RELATION_OFFER = f"admin/{CORE_MODEL_NAME}.{CERTIFICATES_APP_NAME}" +TRAEFIK_APP_NAME = "traefik-k8s" +INGRESS_OFFER_NAME = "traefik" +CA_FILE = "/tmp/ca.pem" + +KAFKA_UI_SECRET_KEY = "admin-password" + +# Base Terraform configs +SINGLE_MODE_DEFAULT_CONFIG = { + "profile": "testing", + "broker": {"units": 1}, + "connect": {"units": 1}, + "karapace": {"units": 1}, + "ui": {"units": 1}, + "integrator": {"units": 1}, +} +SPLIT_MODE_DEFAULT_CONFIG = { + "profile": "testing", + "broker": {"units": 3}, + "controller": {"units": 3}, + "connect": {"units": 1}, + "karapace": {"units": 1}, + "ui": {"units": 1}, + "integrator": {"units": 1}, +} + + +class TerraformDeployer: + """Helper class to manage Terraform deployments for testing.""" + + def __init__(self, model_uuid: str, terraform_dir: str = "terraform"): + self.model_uuid = model_uuid + self.terraform_dir = Path(terraform_dir).resolve() + self.tfvars_file = None + + def create_tfvars(self, config: Dict[str, Any]) -> str: + """Create a .tfvars.json file with the given configuration.""" + self.tfvars_file = tempfile.NamedTemporaryFile( + mode="w", suffix=".tfvars.json", delete=False + ) + + # Always include model + config["model_uuid"] = self.model_uuid + + # Write JSON content + json.dump(config, self.tfvars_file, indent=2) + + self.tfvars_file.close() + return self.tfvars_file.name + + def get_controller_credentials(self) -> Dict[str, str]: + """Get Juju controller credentials for Terraform.""" + controller_credentials = yaml.safe_load( + subprocess.check_output( + "juju show-controller --show-password", + stderr=subprocess.PIPE, + shell=True, + universal_newlines=True, + ) + ) + + def get_value(obj: dict, key: str): + """Recursively gets value for given key in nested dict.""" + if key in obj: + return obj.get(key, "") + for _, v in obj.items(): + if isinstance(v, dict): + item = get_value(v, key) + if item is not None: + return item + + username = get_value(obj=controller_credentials, key="user") + password = get_value(obj=controller_credentials, key="password") + controller_addresses = ",".join(get_value(obj=controller_credentials, key="api-endpoints")) + ca_cert = get_value(obj=controller_credentials, key="ca-cert") + + return { + "JUJU_USERNAME": username, + "JUJU_PASSWORD": password, + "JUJU_CONTROLLER_ADDRESSES": controller_addresses, + "JUJU_CA_CERT": ca_cert, + } + + def terraform_init(self): + """Initialize Terraform in the terraform directory.""" + result = subprocess.run( + ["terraform", "init"], cwd=self.terraform_dir, capture_output=True, text=True + ) + if result.returncode != 0: + raise RuntimeError(f"Terraform init failed: {result.stderr}") + + logger.info(f"\n\nTerraform initialized:\n\n{result.stdout}") + + def terraform_apply(self, tfvars_file: str): + """Apply Terraform configuration.""" + env = self.get_controller_credentials() + result = subprocess.run( + ["terraform", "apply", "-auto-approve", f"-var-file={tfvars_file}"], + cwd=self.terraform_dir, + text=True, + env={**env, **dict(subprocess.os.environ)}, + ) + if result.returncode != 0: + raise RuntimeError(f"Terraform apply failed: {result.stderr}") + + logger.info(f"\n\nTerraform applied:\n\n{result.stdout}") + + def terraform_destroy(self, tfvars_file: Optional[str] = None): + """Destroy Terraform-managed resources.""" + env = self.get_controller_credentials() + cmd = ["terraform", "destroy", "-auto-approve"] + if tfvars_file: + cmd.append(f"-var-file={tfvars_file}") + + result = subprocess.run( + cmd, + cwd=self.terraform_dir, + text=True, + env={**env, **dict(subprocess.os.environ)}, + ) + if result.returncode != 0: + raise RuntimeError(f"Terraform destroy failed: {result.stderr}") + + def cleanup(self): + """Clean up temporary files.""" + if self.tfvars_file and Path(self.tfvars_file.name).exists(): + Path(self.tfvars_file.name).unlink() + + # Clean up terraform artifacts + shutil.rmtree(self.terraform_dir / ".terraform", ignore_errors=True) + for pattern in [".terraform.lock.hcl", "terraform.tfstate*", "*.tfplan"]: + for file_path in self.terraform_dir.glob(pattern): + file_path.unlink(missing_ok=True) + + +def deploy_core_apps(ingress: str | None = None) -> None: + jubilant.Juju().add_model(model=CORE_MODEL_NAME) + core_juju = jubilant.Juju(model=CORE_MODEL_NAME) + core_juju.deploy( + CERTIFICATES_APP_NAME, config={"ca-common-name": "test-ca"}, channel="1/stable" + ) + apps = {CERTIFICATES_APP_NAME} + + if not ingress: + core_juju.deploy(TRAEFIK_APP_NAME, trust=True) + apps.add(TRAEFIK_APP_NAME) + core_juju.integrate(CERTIFICATES_APP_NAME, f"{TRAEFIK_APP_NAME}:certificates") + + core_juju.wait( + lambda status: all_active_idle(status, *apps), + delay=5, + successes=5, + timeout=600, + ) + core_juju.offer(f"{CORE_MODEL_NAME}.{CERTIFICATES_APP_NAME}", endpoint="certificates") + + if ingress: + core_juju.offer(ingress) + else: + core_juju.offer(f"{CORE_MODEL_NAME}.{TRAEFIK_APP_NAME}", endpoint="ingress") + + +def get_terraform_config( + enable_cruise_control: bool = False, + enable_tls: bool = False, + split_mode: bool = False, +) -> Dict[str, Any]: + """Get Terraform configuration based on deployment mode.""" + if split_mode: + return get_multi_app_config( + enable_cruise_control=enable_cruise_control, + enable_tls=enable_tls, + ) + else: + return get_single_mode_config( + enable_cruise_control=enable_cruise_control, + enable_tls=enable_tls, + ) + + +def get_single_mode_config( + enable_cruise_control: bool = False, enable_tls: bool = False +) -> Dict[str, Any]: + """Get Terraform configuration for single-mode deployment.""" + config = SINGLE_MODE_DEFAULT_CONFIG.copy() + if enable_tls: + config = enable_tls_config(config) + + if enable_cruise_control: + # Add balancer role while preserving existing roles + config["broker"]["config"] = {"roles": "broker,balancer"} + + return config + + +def get_multi_app_config( + enable_cruise_control: bool = False, enable_tls: bool = False +) -> Dict[str, Any]: + """Get Terraform configuration for multi-app (split) mode deployment.""" + config = SPLIT_MODE_DEFAULT_CONFIG.copy() + + if enable_tls: + config = enable_tls_config(config) + + if enable_cruise_control: + # Add balancer role while preserving existing roles + config["broker"]["config"] = {"roles": "broker,balancer"} + + return config + + +def enable_tls_config(base_config: Dict[str, Any]) -> Dict[str, Any]: + """Modify configuration to enable TLS across all components.""" + tls_config = base_config.copy() + + # For TLS testing, we'll need to add TLS offer + # This would typically come from a TLS certificates operator + tls_config["tls_offer"] = TLS_RELATION_OFFER + + return tls_config + + +def get_secret_by_label(model: str, label: str, owner: str) -> dict[str, str]: + secrets_meta_raw = subprocess.check_output( + f"JUJU_MODEL={model} juju list-secrets --format json", + stderr=subprocess.PIPE, + shell=True, + universal_newlines=True, + ).strip() + secrets_meta = json.loads(secrets_meta_raw) + + for secret_id in secrets_meta: + if owner and not secrets_meta[secret_id]["owner"] == owner: + continue + if secrets_meta[secret_id]["label"] == label: + break + + secrets_data_raw = subprocess.check_output( + f"JUJU_MODEL={model} juju show-secret --format json --reveal {secret_id}", + stderr=subprocess.PIPE, + shell=True, + universal_newlines=True, + ) + + secret_data = json.loads(secrets_data_raw) + return secret_data[secret_id]["content"]["Data"] + + +def check_socket(host: str | None, port: int) -> bool: + """Checks whether IPv4 socket is up or not.""" + if host is None: + return False + + with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock: + return sock.connect_ex((host, port)) == 0 + + +def all_active_idle(status: jubilant.Status, *apps: str): + """Helper function for jubilant all units active|idle checks.""" + return jubilant.all_agents_idle(status, *apps) and jubilant.all_active(status, *apps) + + +def get_app_list(kraft_mode): + """Get the list of expected applications based on kraft_mode.""" + base_apps = [KAFKA_UI_APP_NAME, KARAPACE_APP_NAME, CONNECT_APP_NAME, KAFKA_BROKER_APP_NAME] + return base_apps + ([KAFKA_CONTROLLER_APP_NAME] if kraft_mode == "multi" else []) diff --git a/tests/integration/terraform/test_terraform.py b/tests/integration/terraform/test_terraform.py new file mode 100644 index 0000000..b601f60 --- /dev/null +++ b/tests/integration/terraform/test_terraform.py @@ -0,0 +1,75 @@ +#!/usr/bin/env python3 +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Tests both single-mode and multi-app mode deployments with all components.""" + +import logging + +from jubilant import Juju +from tests.integration.terraform.component_validation import ComponentValidation +from tests.integration.terraform.helpers import all_active_idle, deploy_core_apps, get_app_list + +logger = logging.getLogger(__name__) + + +def test_deploy_core_model(request): + """Deploy the ingress and TLS provider charms.""" + provided_ingress = f'{request.config.getoption("--ingress-offer")}' + deploy_core_apps(ingress=provided_ingress) + + +def test_deployment_active(juju: Juju, kraft_mode, deploy_cluster): + """Test that Kafka is deployed and active.""" + # Fixtures will deploy using terraform + # Wait for all applications to be active + app_list = get_app_list(kraft_mode) + juju.wait( + lambda status: all_active_idle(status, *app_list), + delay=3, + successes=20, + timeout=3600, + ) + status = juju.status() + for app in app_list: + assert status.apps[app].app_status.current == "active" + + +def test_components(juju: Juju): + """Test that all components are deployed.""" + validator = ComponentValidation(juju=juju) + + validator.test_kafka_admin_operations() + validator.test_kafka_producer_consumer() + validator.test_karapace() + validator.test_ui_accessibility() + validator.test_connect_endpoints() + validator.test_create_mm2_connector() + + +def test_tls_toggle(juju: Juju, kraft_mode, enable_terraform_tls): + """Test enabling and disabling TLS across the cluster.""" + app_list = get_app_list(kraft_mode) + + juju.wait( + lambda status: all_active_idle(status, *app_list), + delay=3, + successes=20, + timeout=3600, + ) + status = juju.status() + for app in app_list: + assert status.apps[app].app_status.current == "active" + + +def test_tls_components(juju: Juju): + """Test that all components work with TLS enabled.""" + validator = ComponentValidation(juju=juju, tls=True) + + validator.test_kafka_admin_operations() + validator.test_kafka_producer_consumer() + # FIXME: enable Karapace tests when the TLS toggle issue is fixes + # validator.test_karapace() + validator.test_ui_accessibility() + validator.test_connect_endpoints() + validator.test_create_mm2_connector() diff --git a/tox.ini b/tox.ini index 804d2c6..3964443 100644 --- a/tox.ini +++ b/tox.ini @@ -76,8 +76,8 @@ commands = cp {env:FOLDER}/metadata.yaml {env:BUILD_DIRECTORY} cp {env:FOLDER}/README.md {env:BUILD_DIRECTORY} -[testenv:integration-bundle] -description = Run vm bundle integration tests +[testenv:integration-terraform] +description = Run super cluster integration tests using Terraform set_env = {[testenv]set_env} # Workaround for https://github.com/python-poetry/poetry/issues/6958 @@ -87,54 +87,4 @@ pass_env = CI commands = poetry install --with integration - poetry run pytest -vv tests/integration/bundle/ --tb native --log-cli-level=INFO -s {posargs} - -[testenv:integration-e2e] -description = Run vm e2e integration tests. Add `--tls` to specify tls tests. Add `--model= --no-deploy` to run on existing deployments. -set_env = - {[testenv]set_env} - # Workaround for https://github.com/python-poetry/poetry/issues/6958 - POETRY_INSTALLER_PARALLEL = false -pass_env = - {[testenv]pass_env} - CI -commands = - poetry install --with integration - poetry run pytest -vv tests/integration/e2e/test_e2e.py --tb native --ignore={[vars]tests_path} --log-cli-level=INFO -s {posargs} - -[testenv:integration-e2e-basic-flow] -description = Run vm e2e integration tests. Add `--tls` to specify tls tests. Add `--model= --no-deploy` to run on existing deployments. -set_env = - {[testenv]set_env} - # Workaround for https://github.com/python-poetry/poetry/issues/6958 - POETRY_INSTALLER_PARALLEL = false -pass_env = - {[testenv]pass_env} -commands = - poetry install --with integration - poetry run pytest -vv tests/integration/e2e/test_basic_flow.py --tb native --ignore={[vars]tests_path} --log-cli-level=INFO -s {posargs} - -[testenv:integration-e2e-password-rotation] -description = Run vm e2e integration tests. Add `--tls` to specify tls tests. Add `--model= --no-deploy` to run on existing deployments. -set_env = - {[testenv]set_env} - # Workaround for https://github.com/python-poetry/poetry/issues/6958 - POETRY_INSTALLER_PARALLEL = false -pass_env = - {[testenv]pass_env} -commands = - poetry install --with integration - poetry run pytest -vv tests/integration/e2e/test_password_rotation.py --tb native --ignore={[vars]tests_path} --log-cli-level=INFO -s {posargs} - -[testenv:integration-e2e-backup] -description = Run vm e2e integration tests. Add `--tls` to specify tls tests. Add `--model= --no-deploy` to run on existing deployments. -set_env = - {[testenv]set_env} - # Workaround for https://github.com/python-poetry/poetry/issues/6958 - POETRY_INSTALLER_PARALLEL = false -pass_env = - {[testenv]pass_env} - CI -commands = - poetry install --with integration - poetry run pytest -vv tests/integration/e2e/test_backup.py --tb native --ignore={[vars]tests_path} --log-cli-level=INFO -s {posargs} + poetry run pytest -x -vv tests/integration/terraform/test_terraform.py --tb native --log-cli-level=INFO -s {posargs} \ No newline at end of file