diff --git a/.github/workflows/e2e-py-check.yml b/.github/workflows/e2e-py-check.yml
new file mode 100644
index 0000000000..f4b5ae9aa5
--- /dev/null
+++ b/.github/workflows/e2e-py-check.yml
@@ -0,0 +1,31 @@
+name: e2e-tests Python Quality Check
+
+on:
+ pull_request:
+ paths:
+ - 'e2e-tests/**/*.py'
+
+jobs:
+ quality-check:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+
+ - name: Install uv
+ uses: astral-sh/setup-uv@v5
+
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ with:
+ python-version-file: "pyproject.toml"
+
+ - name: Install dependencies
+ run: uv sync --locked
+
+ - name: Run ruff check
+ run: uv run ruff check e2e-tests/
+
+ - name: Run mypy
+ run: uv run mypy e2e-tests/
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index 2c8d6a1d78..69c56fdfa3 100644
--- a/.gitignore
+++ b/.gitignore
@@ -189,3 +189,7 @@ bin/
projects/
installers/olm/operator_*.yaml
installers/olm/bundles
+
+# Test Reports
+e2e-tests/reports/
+e2e-tests/**/__pycache__/
\ No newline at end of file
diff --git a/Jenkinsfile b/Jenkinsfile
index 71191c067e..cfdfc5bba1 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -101,6 +101,16 @@ void pushLogFile(String FILE_NAME) {
}
}
+void pushReportFile() {
+ echo "Push final_report.html to S3!"
+ withCredentials([aws(credentialsId: 'AMI/OVF', accessKeyVariable: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY')]) {
+ sh """
+ S3_PATH=s3://percona-jenkins-artifactory-public/\$JOB_NAME/\$(git rev-parse --short HEAD)
+ aws s3 cp --content-type text/html --quiet final_report.html \$S3_PATH/final_report.html || :
+ """
+ }
+}
+
void pushArtifactFile(String FILE_NAME) {
echo "Push $FILE_NAME file to S3!"
@@ -146,24 +156,6 @@ void markPassedTests() {
}
}
-void printKubernetesStatus(String LOCATION, String CLUSTER_SUFFIX) {
- sh """
- export KUBECONFIG=/tmp/${CLUSTER_NAME}-${CLUSTER_SUFFIX}
- echo "========== KUBERNETES STATUS $LOCATION TEST =========="
- gcloud container clusters list|grep -E "NAME|${CLUSTER_NAME}-${CLUSTER_SUFFIX} "
- echo
- kubectl get nodes
- echo
- kubectl top nodes
- echo
- kubectl get pods --all-namespaces
- echo
- kubectl top pod --all-namespaces
- echo
- kubectl get events --field-selector type!=Normal --all-namespaces --sort-by=".lastTimestamp"
- echo "======================================================"
- """
-}
String formatTime(def time) {
if (!time || time == "N/A") return "N/A"
@@ -217,6 +209,57 @@ void makeReport() {
"""
}
+void generateMissingReports() {
+ sh "mkdir -p e2e-tests/reports"
+
+ for (int i = 0; i < tests.size(); i++) {
+ def testName = tests[i]["name"]
+ def testResult = tests[i]["result"]
+ def testTime = tests[i]["time"] ?: 0
+
+ if (testResult == "skipped") {
+ continue
+ }
+
+ def xmlFile = "e2e-tests/reports/${testName}.xml"
+ def htmlFile = "e2e-tests/reports/${testName}.html"
+
+ if (!fileExists(xmlFile)) {
+ def failures = testResult == "failure" ? 1 : 0
+ def failureElement = testResult == "failure" ?
+ 'Test did not complete - possible causes: node abort, timeout, cluster creation failure' : ''
+
+ writeFile file: xmlFile, text: """
+
+
+
+${failureElement}
+
+
+"""
+ }
+
+ if (!fileExists(htmlFile)) {
+ def resultCapitalized = testResult == "failure" ? "Failed" : "Passed"
+ def formattedTime = formatTime(testTime)
+ def logMessage = testResult == "failure" ?
+ "Test did not complete - possible causes: node abort, timeout, cluster creation failure" :
+ "Test marked as passed (from previous run)"
+
+ writeFile file: htmlFile, text: """
+
+
+
+${testName}.html
+
+
+
+
+"""
+ }
+ }
+}
+
void clusterRunner(String cluster) {
withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AMI/OVF', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]){
def clusterCreated=0
@@ -258,7 +301,17 @@ void runTest(Integer TEST_ID) {
export DEBUG_TESTS=1
fi
export KUBECONFIG=/tmp/${CLUSTER_NAME}-${clusterSuffix}
- time ./e2e-tests/$testName/run
+ export PATH="\$HOME/.local/bin:\$PATH"
+ mkdir -p e2e-tests/reports
+
+ REPORT_OPTS="--html=e2e-tests/reports/${testName}.html --junitxml=e2e-tests/reports/${testName}.xml"
+
+ # Run native pytest if test_*.py exists, otherwise run bash via wrapper
+ if ls e2e-tests/$testName/test_*.py 1>/dev/null 2>&1; then
+ uv run pytest e2e-tests/$testName/ \$REPORT_OPTS
+ else
+ uv run pytest e2e-tests/test_pytest_wrapper.py --test-name=$testName \$REPORT_OPTS
+ fi
"""
}
pushArtifactFile("${env.GIT_BRANCH}-${env.GIT_SHORT_COMMIT}-$testName")
@@ -266,7 +319,6 @@ void runTest(Integer TEST_ID) {
return true
}
catch (exc) {
- printKubernetesStatus("AFTER","$clusterSuffix")
echo "Test $testName has failed!"
if (retryCount >= 1 || currentBuild.nextBuild != null) {
currentBuild.result = 'FAILURE'
@@ -290,7 +342,7 @@ void prepareNode() {
sudo curl -sLo /usr/local/bin/kubectl https://dl.k8s.io/release/\$(curl -Ls https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl && sudo chmod +x /usr/local/bin/kubectl
kubectl version --client --output=yaml
- curl -fsSL https://get.helm.sh/helm-v3.19.0-linux-amd64.tar.gz | sudo tar -C /usr/local/bin --strip-components 1 -xzf - linux-amd64/helm
+ curl -fsSL https://get.helm.sh/helm-v3.20.0-linux-amd64.tar.gz | sudo tar -C /usr/local/bin --strip-components 1 -xzf - linux-amd64/helm
sudo curl -fsSL https://github.com/mikefarah/yq/releases/download/v4.48.1/yq_linux_amd64 -o /usr/local/bin/yq && sudo chmod +x /usr/local/bin/yq
sudo curl -fsSL https://github.com/jqlang/jq/releases/download/jq-1.7.1/jq-linux64 -o /usr/local/bin/jq && sudo chmod +x /usr/local/bin/jq
@@ -307,6 +359,10 @@ EOF
sudo yum install -y google-cloud-cli google-cloud-cli-gke-gcloud-auth-plugin
curl -sL https://github.com/mitchellh/golicense/releases/latest/download/golicense_0.2.0_linux_x86_64.tar.gz | sudo tar -C /usr/local/bin -xzf - golicense
+
+ curl -LsSf https://astral.sh/uv/install.sh | sh
+ export PATH="\$HOME/.local/bin:\$PATH"
+ uv sync --locked
"""
installAzureCLI()
azureAuth()
@@ -423,10 +479,10 @@ pipeline {
CLOUDSDK_CORE_DISABLE_PROMPTS = 1
CLEAN_NAMESPACE = 1
OPERATOR_NS = 'psmdb-operator'
- GIT_SHORT_COMMIT = sh(script: 'git rev-parse --short HEAD', , returnStdout: true).trim()
+ GIT_SHORT_COMMIT = sh(script: 'git rev-parse --short HEAD', returnStdout: true).trim()
VERSION = "${env.GIT_BRANCH}-${env.GIT_SHORT_COMMIT}"
- CLUSTER_NAME = sh(script: "echo jen-psmdb-${env.CHANGE_ID}-${GIT_SHORT_COMMIT}-${env.BUILD_NUMBER} | tr '[:upper:]' '[:lower:]'", , returnStdout: true).trim()
- AUTHOR_NAME = sh(script: "echo ${CHANGE_AUTHOR_EMAIL} | awk -F'@' '{print \$1}'", , returnStdout: true).trim()
+ CLUSTER_NAME = sh(script: "echo jen-psmdb-${env.CHANGE_ID}-${GIT_SHORT_COMMIT}-${env.BUILD_NUMBER} | tr '[:upper:]' '[:lower:]'", returnStdout: true).trim()
+ AUTHOR_NAME = sh(script: "echo ${CHANGE_AUTHOR_EMAIL} | awk -F'@' '{print \$1}'", returnStdout: true).trim()
ENABLE_LOGGING = "true"
}
agent {
@@ -458,7 +514,7 @@ pipeline {
prepareNode()
script {
if (AUTHOR_NAME == 'null') {
- AUTHOR_NAME = sh(script: "git show -s --pretty=%ae | awk -F'@' '{print \$1}'", , returnStdout: true).trim()
+ AUTHOR_NAME = sh(script: "git show -s --pretty=%ae | awk -F'@' '{print \$1}'", returnStdout: true).trim()
}
for (comment in pullRequest.comments) {
println("Author: ${comment.user}, Comment: ${comment.body}")
@@ -675,12 +731,20 @@ pipeline {
}
}
makeReport()
- junit testResults: '*.xml', healthScaleFactor: 1.0
- archiveArtifacts '*.xml'
+ generateMissingReports()
+
+ sh """
+ export PATH="\$HOME/.local/bin:\$PATH"
+ uv run pytest_html_merger -i e2e-tests/reports -o final_report.html
+ uv run junitparser merge --glob 'e2e-tests/reports/*.xml' final_report.xml
+ """
+ junit testResults: 'final_report.xml', healthScaleFactor: 1.0
+ archiveArtifacts 'final_report.xml, final_report.html'
+ pushReportFile()
unstash 'IMAGE'
def IMAGE = sh(returnStdout: true, script: "cat results/docker/TAG").trim()
- TestsReport = TestsReport + "\r\n\r\ncommit: ${env.CHANGE_URL}/commits/${env.GIT_COMMIT}\r\nimage: `${IMAGE}`\r\n"
+ TestsReport = TestsReport + "\r\n\r\nCommit: ${env.CHANGE_URL}/commits/${env.GIT_COMMIT}\r\nImage: `${IMAGE}`\r\nTest report: [report](${testUrlPrefix}/${env.GIT_BRANCH}/${env.GIT_SHORT_COMMIT}/final_report.html)\r\n"
pullRequest.comment(TestsReport)
}
deleteOldClusters("$CLUSTER_NAME")
diff --git a/Makefile b/Makefile
index c01ba1cb11..6d51cf7fd8 100644
--- a/Makefile
+++ b/Makefile
@@ -70,6 +70,20 @@ undeploy: ## Undeploy operator
test: envtest generate ## Run tests.
DISABLE_TELEMETRY=true KUBEBUILDER_ASSETS="$(shell $(ENVTEST) --arch=amd64 use $(ENVTEST_K8S_VERSION) -p path)" go test ./... -coverprofile cover.out
+py-deps: uv ## Install e2e-tests Python dependencies
+ $(UV) sync --locked
+
+py-update-deps: uv ## Update e2e-tests Python dependencies
+ $(UV) lock --upgrade
+
+py-fmt: uv ## Format and organize imports in e2e-tests
+ $(UV) run ruff check --select I --fix e2e-tests/
+ $(UV) run ruff format e2e-tests/
+
+py-check: uv ## Run ruff and mypy checks on e2e-tests
+ $(UV) run ruff check e2e-tests/
+ $(UV) run mypy e2e-tests/
+
# go-get-tool will 'go get' any package $2 and install it to $1.
PROJECT_DIR := $(shell dirname $(abspath $(lastword $(MAKEFILE_LIST))))
define go-get-tool
@@ -104,6 +118,13 @@ MOCKGEN = $(shell pwd)/bin/mockgen
mockgen: ## Download mockgen locally if necessary.
$(call go-get-tool,$(MOCKGEN), github.com/golang/mock/mockgen@latest)
+UV = $(shell pwd)/bin/uv
+uv: ## Download uv locally if necessary.
+ @[ -f $(UV) ] || { \
+ set -e ;\
+ curl -LsSf https://astral.sh/uv/install.sh | UV_INSTALL_DIR=$(PROJECT_DIR)/bin sh ;\
+ }
+
# Prepare release
include e2e-tests/release_versions
CERT_MANAGER_VER := $(shell grep -Eo "cert-manager v.*" go.mod|grep -Eo "[0-9]+\.[0-9]+\.[0-9]+")
diff --git a/e2e-tests/README.md b/e2e-tests/README.md
index f227aab5c2..64e6cd986d 100644
--- a/e2e-tests/README.md
+++ b/e2e-tests/README.md
@@ -121,6 +121,65 @@ Test execution produces excessive output. It is recommended to redirect the outp
./e2e-tests/run >> /tmp/tests-run.out 2>&1
```
+## Python development setup
+
+The e2e tests are being migrated to pytest. This section covers setting up the Python environment.
+
+### Installing uv
+
+[uv](https://github.com/astral-sh/uv) is used for Python dependency management. Install it via make:
+
+```
+make uv
+```
+
+Or manually:
+
+```
+curl -LsSf https://astral.sh/uv/install.sh | sh
+```
+
+### Python make targets
+
+```
+make py-deps # Install Python dependencies (locked versions)
+make py-update-deps # Update Python dependencies
+make py-fmt # Format code and organize imports with ruff
+make py-check # Run ruff linter and mypy type checks
+```
+
+### Running tests with pytest
+
+First, install dependencies:
+
+```
+make py-deps
+```
+
+Run all pytest-based tests:
+
+```
+uv run pytest e2e-tests/
+```
+
+Run a specific test file:
+
+```
+uv run pytest e2e-tests/init-deploy/test_init_deploy.py
+```
+
+Run a specific test:
+
+```
+uv run pytest e2e-tests/init-deploy/test_init_deploy.py::TestInitDeploy::test_cluster_creation
+```
+
+Run tests matching a pattern:
+
+```
+uv run pytest e2e-tests/ -k "init"
+```
+
## Using environment variables to customize the testing process
### Re-declaring default image names
@@ -129,7 +188,7 @@ You can use environment variables to re-declare all default images used for test
full list of variables is the following one:
* `IMAGE` - Percona Server for MongoDB Operator, `perconalab/percona-server-mongodb-operator:main` by default,
-* `IMAGE_MONGOD` - mongod, `perconalab/percona-server-mongodb-operator:main-mongod4.4` by default,
+* `IMAGE_MONGOD` - mongod, `perconalab/percona-server-mongodb-operator:main-mongod8.0` by default,
* `IMAGE_PMM_CLIENT` - Percona Monitoring and Management (PMM) client, `perconalab/pmm-client:dev-latest` by default,
* `IMAGE_BACKUP` - backup, `perconalab/percona-server-mongodb-operator:main-backup` by default,
diff --git a/e2e-tests/conftest.py b/e2e-tests/conftest.py
new file mode 100644
index 0000000000..4cb349b12c
--- /dev/null
+++ b/e2e-tests/conftest.py
@@ -0,0 +1,548 @@
+import logging
+import os
+import random
+import subprocess
+import time
+from concurrent.futures import ThreadPoolExecutor
+from pathlib import Path
+from typing import Any, Callable, Dict, Generator
+
+import pytest
+import yaml
+from lib.kubectl import (
+ clean_all_namespaces,
+ get_k8s_versions,
+ is_minikube,
+ is_openshift,
+ kubectl_bin,
+ wait_pod,
+)
+from lib.mongo import MongoManager
+from lib.operator import check_crd_for_deletion, delete_crd_rbac, deploy_operator
+from lib.secrets import get_cloud_secret_default
+from lib.utils import (
+ K8sHighlighter,
+ get_cr_version,
+ get_git_branch,
+ get_git_commit,
+ k8s_theme,
+ retry,
+)
+from rich.console import Console
+from rich.logging import RichHandler
+
+pytest_plugins = ["lib.report_generator"]
+
+logging.basicConfig(
+ level=os.environ.get("LOG_LEVEL", "INFO").upper(),
+ format="%(message)s",
+ handlers=[
+ RichHandler(
+ console=Console(theme=k8s_theme),
+ highlighter=K8sHighlighter(),
+ show_time=True,
+ show_path=False,
+ markup=False,
+ rich_tracebacks=True,
+ log_time_format="[%X.%f]",
+ )
+ ],
+)
+logging.getLogger("pytest_dependency").setLevel(logging.WARNING)
+logger = logging.getLogger(__name__)
+
+_current_namespace: str | None = None
+
+
+def pytest_addoption(parser: pytest.Parser) -> None:
+ parser.addoption("--test-name", action="store", default=None, help="Bash test name to run")
+
+
+def pytest_collection_modifyitems(
+ session: pytest.Session, config: pytest.Config, items: list[pytest.Item]
+) -> None:
+ """Rename bash wrapper tests to show actual test name."""
+ test_name = config.getoption("--test-name")
+ if not test_name:
+ return
+
+ for item in items:
+ if item.name == "test_bash_wrapper":
+ item._nodeid = item._nodeid.replace(
+ "test_bash_wrapper", f"test_bash_wrapper[{test_name}]"
+ )
+
+
+@pytest.hookimpl(tryfirst=True)
+def pytest_runtest_setup(item: pytest.Item) -> None:
+ """Print newline after pytest's verbose test name output."""
+ print()
+
+
+def _get_current_namespace() -> str | None:
+ """Get namespace from global or temp file (for bash wrapper tests)."""
+ if _current_namespace:
+ return _current_namespace
+ try:
+ with open("/tmp/pytest_current_namespace") as f:
+ return f.read().strip() or None
+ except Exception:
+ return None
+
+
+@pytest.hookimpl(tryfirst=True, hookwrapper=True)
+def pytest_runtest_makereport(
+ item: pytest.Item, call: pytest.CallInfo[None]
+) -> Generator[None, None, None]:
+ """Collect K8s resources when a test fails and add to HTML report."""
+ outcome: Any = yield
+ report = outcome.get_result()
+
+ if report.when == "call" and report.failed:
+ namespace = _get_current_namespace()
+ if not namespace:
+ return
+
+ try:
+ from lib import report_generator
+
+ report.extras = report_generator.generate_report(namespace)
+ except Exception as e:
+ logger.warning(f"Failed to generate HTML report extras: {e}")
+
+
+@pytest.fixture(scope="session", autouse=True)
+def setup_env_vars() -> None:
+ """Setup environment variables for the test session."""
+ git_branch = get_git_branch()
+ git_version, kube_version = get_k8s_versions()
+
+ defaults = {
+ "KUBE_VERSION": kube_version,
+ "EKS": "1" if "eks" in git_version else "0",
+ "GKE": "1" if "gke" in git_version else "0",
+ "OPENSHIFT": is_openshift(),
+ "MINIKUBE": is_minikube(),
+ "API": "psmdb.percona.com/v1",
+ "GIT_COMMIT": get_git_commit(),
+ "GIT_BRANCH": git_branch,
+ "OPERATOR_VERSION": get_cr_version(),
+ "IMAGE": f"perconalab/percona-server-mongodb-operator:{git_branch}",
+ "IMAGE_MONGOD": "perconalab/percona-server-mongodb-operator:main-mongod8.0",
+ "IMAGE_MONGOD_CHAIN": (
+ "perconalab/percona-server-mongodb-operator:main-mongod6.0\n"
+ "perconalab/percona-server-mongodb-operator:main-mongod7.0\n"
+ "perconalab/percona-server-mongodb-operator:main-mongod8.0"
+ ),
+ "IMAGE_BACKUP": "perconalab/percona-server-mongodb-operator:main-backup",
+ "IMAGE_PMM_CLIENT": "percona/pmm-client:2.44.1-1",
+ "IMAGE_PMM_SERVER": "perconalab/pmm-server:dev-latest",
+ "IMAGE_PMM3_CLIENT": "perconalab/pmm-client:3-dev-latest",
+ "IMAGE_PMM3_SERVER": "perconalab/pmm-server:3-dev-latest",
+ "CERT_MANAGER_VER": "1.19.1",
+ "CHAOS_MESH_VER": "2.7.1",
+ "MINIO_VER": "5.4.0",
+ "PMM_SERVER_VER": "9.9.9",
+ "CLEAN_NAMESPACE": "0",
+ "DELETE_CRD_ON_START": "0",
+ "SKIP_DELETE": "1",
+ "SKIP_BACKUPS_TO_AWS_GCP_AZURE": get_cloud_secret_default(),
+ "UPDATE_COMPARE_FILES": "0",
+ }
+
+ for key, value in defaults.items():
+ os.environ.setdefault(key, value)
+
+ env_lines = [f"{key}={os.environ.get(key)}" for key in defaults]
+ logger.info("Environment variables:\n" + "\n".join(env_lines))
+
+
+@pytest.fixture(scope="class")
+def test_paths(request: pytest.FixtureRequest) -> Dict[str, str]:
+ """Fixture to provide paths relative to the test file."""
+ test_file = request.path
+ test_dir = test_file.parent
+ conf_dir = test_dir.parent / "conf"
+ src_dir = test_dir.parent.parent
+
+ return {
+ "test_file": str(test_file),
+ "test_dir": str(test_dir),
+ "conf_dir": str(conf_dir),
+ "src_dir": str(src_dir),
+ }
+
+
+def _wait_for_project_delete(project: str, timeout: int = 180) -> None:
+ """Wait for OpenShift project to be fully deleted."""
+ start = time.time()
+ while time.time() - start < timeout:
+ result = subprocess.run(
+ ["oc", "get", "project", project],
+ capture_output=True,
+ check=False,
+ )
+ if result.returncode != 0:
+ return
+ time.sleep(5)
+ logger.warning(f"Project {project} not deleted within {timeout}s, continuing anyway")
+
+
+@pytest.fixture(scope="class")
+def create_namespace() -> Callable[[str], str]:
+ def _create_namespace(namespace: str) -> str:
+ """Create kubernetes namespace and clean up if exists."""
+
+ if int(os.environ.get("CLEAN_NAMESPACE") or "0"):
+ clean_all_namespaces()
+
+ if int(os.environ.get("OPENSHIFT") or "0"):
+ logger.info("Cleaning up existing OpenShift project if exists")
+ subprocess.run(
+ ["oc", "delete", "project", namespace, "--ignore-not-found"],
+ check=False,
+ )
+ _wait_for_project_delete(namespace)
+
+ logger.info(f"Create namespace {namespace}")
+ subprocess.run(["oc", "new-project", namespace], check=True)
+ subprocess.run(["oc", "project", namespace], check=True)
+ subprocess.run(
+ ["oc", "adm", "policy", "add-scc-to-user", "hostaccess", "-z", "default"],
+ check=False,
+ )
+ else:
+ logger.info("Cleaning up existing namespace")
+
+ # Delete namespace if exists
+ try:
+ kubectl_bin("delete", "namespace", namespace, "--ignore-not-found")
+ kubectl_bin("wait", "--for=delete", f"namespace/{namespace}")
+ except subprocess.CalledProcessError:
+ pass
+
+ logger.info(f"Create namespace {namespace}")
+ kubectl_bin("create", "namespace", namespace)
+ kubectl_bin("config", "set-context", "--current", f"--namespace={namespace}")
+ return namespace
+
+ return _create_namespace
+
+
+@pytest.fixture(scope="class")
+def create_infra(
+ test_paths: Dict[str, str], create_namespace: Callable[[str], str]
+) -> Generator[Callable[[str], str], None, None]:
+ global _current_namespace
+ created_namespaces: list[str] = []
+
+ def _create_infra(test_name: str) -> str:
+ """Create the necessary infrastructure for the tests."""
+ global _current_namespace
+ logger.info("Creating test environment")
+ if os.environ.get("DELETE_CRD_ON_START") == "1":
+ delete_crd_rbac(Path(test_paths["src_dir"]))
+ check_crd_for_deletion(f"{test_paths['src_dir']}/deploy/crd.yaml")
+
+ operator_ns = os.environ.get("OPERATOR_NS")
+ if operator_ns:
+ create_namespace(operator_ns)
+ deploy_operator(test_paths["test_dir"], test_paths["src_dir"])
+ namespace = create_namespace(f"{test_name}-{random.randint(0, 32767)}")
+ else:
+ namespace = create_namespace(f"{test_name}-{random.randint(0, 32767)}")
+ deploy_operator(test_paths["test_dir"], test_paths["src_dir"])
+
+ # Track created namespace for cleanup and failure collection
+ created_namespaces.append(namespace)
+ _current_namespace = namespace
+ return namespace
+
+ yield _create_infra
+
+ # Teardown code
+ _current_namespace = None
+
+ if os.environ.get("SKIP_DELETE") == "1":
+ logger.info("SKIP_DELETE=1. Skipping test environment cleanup")
+ return
+
+ def run_cmd(cmd: list[str]) -> None:
+ try:
+ kubectl_bin(*cmd)
+ except (subprocess.CalledProcessError, FileNotFoundError, OSError) as e:
+ logger.debug(f"Command failed (continuing cleanup): {' '.join(cmd)}, error: {e}")
+
+ def cleanup_crd() -> None:
+ crd_file = f"{test_paths['src_dir']}/deploy/crd.yaml"
+ run_cmd(["delete", "-f", crd_file, "--ignore-not-found", "--wait=false"])
+
+ try:
+ with open(crd_file, "r") as f:
+ for doc in f.read().split("---"):
+ if not doc.strip():
+ continue
+ crd_name = yaml.safe_load(doc)["metadata"]["name"]
+ run_cmd(
+ [
+ "patch",
+ "crd",
+ crd_name,
+ "--type=merge",
+ "-p",
+ '{"metadata":{"finalizers":[]}}',
+ ]
+ )
+ run_cmd(["wait", "--for=delete", "crd", crd_name, "--timeout=60s"])
+ except (FileNotFoundError, yaml.YAMLError, KeyError, TypeError) as e:
+ logger.debug(f"CRD cleanup failed (continuing): {e}")
+
+ logger.info("Cleaning up test environment")
+
+ commands = [
+ ["delete", "psmdb-backup", "--all", "--ignore-not-found"],
+ [
+ "delete",
+ "-f",
+ f"{test_paths['test_dir']}/../conf/container-rc.yaml",
+ "--ignore-not-found",
+ ],
+ [
+ "delete",
+ "-f",
+ f"{test_paths['src_dir']}/deploy/{'cw-' if os.environ.get('OPERATOR_NS') else ''}rbac.yaml",
+ "--ignore-not-found",
+ ],
+ ]
+
+ with ThreadPoolExecutor(max_workers=3) as executor:
+ futures = [executor.submit(run_cmd, cmd) for cmd in commands]
+ futures.append(executor.submit(cleanup_crd))
+
+ # Clean up all created namespaces
+ namespaces_to_delete = created_namespaces.copy()
+ operator_ns = os.environ.get("OPERATOR_NS")
+ if operator_ns:
+ namespaces_to_delete.append(operator_ns)
+
+ for ns in namespaces_to_delete:
+ run_cmd(["delete", "--grace-period=0", "--force", "namespace", ns, "--ignore-not-found"])
+
+
+@pytest.fixture(scope="class")
+def deploy_chaos_mesh() -> Generator[Callable[[str], None], None, None]:
+ """Deploy Chaos Mesh and clean up after tests."""
+ deployed_namespaces = []
+
+ def _deploy(namespace: str) -> None:
+ subprocess.run(
+ ["helm", "repo", "add", "chaos-mesh", "https://charts.chaos-mesh.org"], check=True
+ )
+ subprocess.run(["helm", "repo", "update"], check=True)
+ subprocess.run(
+ [
+ "helm",
+ "install",
+ "chaos-mesh",
+ "chaos-mesh/chaos-mesh",
+ "--namespace",
+ namespace,
+ "--version",
+ os.environ["CHAOS_MESH_VER"],
+ "--set",
+ "dashboard.create=false",
+ "--set",
+ "chaosDaemon.runtime=containerd",
+ "--set",
+ "chaosDaemon.socketPath=/run/containerd/containerd.sock",
+ "--wait",
+ ],
+ check=True,
+ )
+ deployed_namespaces.append(namespace)
+
+ yield _deploy
+
+ for ns in deployed_namespaces:
+ try:
+ subprocess.run(
+ [
+ "helm",
+ "uninstall",
+ "chaos-mesh",
+ "--namespace",
+ ns,
+ "--wait",
+ "--timeout",
+ "60s",
+ ],
+ check=True,
+ )
+ except subprocess.CalledProcessError as e:
+ logger.error(f"Failed to cleanup chaos-mesh in {ns}: {e}")
+
+
+@pytest.fixture(scope="class")
+def deploy_cert_manager() -> Generator[None, None, None]:
+ """Deploy Cert Manager and clean up after tests."""
+ logger.info("Deploying cert-manager")
+ cert_manager_url = f"https://github.com/cert-manager/cert-manager/releases/download/v{os.environ.get('CERT_MANAGER_VER')}/cert-manager.yaml"
+ try:
+ kubectl_bin("create", "namespace", "cert-manager")
+ kubectl_bin(
+ "label", "namespace", "cert-manager", "certmanager.k8s.io/disable-validation=true"
+ )
+ kubectl_bin("apply", "-f", cert_manager_url, "--validate=false")
+ kubectl_bin(
+ "wait",
+ "pod",
+ "-l",
+ "app.kubernetes.io/instance=cert-manager",
+ "--for=condition=ready",
+ "-n",
+ "cert-manager",
+ )
+ except Exception as e:
+ try:
+ kubectl_bin("delete", "-f", cert_manager_url, "--ignore-not-found")
+ except (subprocess.CalledProcessError, FileNotFoundError, OSError) as cleanup_error:
+ logger.warning(
+ f"Failed to cleanup cert-manager during error handling: {cleanup_error}"
+ )
+ raise e
+
+ yield
+
+ try:
+ kubectl_bin("delete", "-f", cert_manager_url, "--ignore-not-found")
+ except Exception as e:
+ logger.error(f"Failed to cleanup cert-manager: {e}")
+
+
+@pytest.fixture(scope="class")
+def deploy_minio() -> Generator[None, None, None]:
+ """Deploy MinIO and clean up after tests."""
+ service_name = "minio-service"
+ bucket = "operator-testing"
+
+ logger.info(f"Installing MinIO: {service_name}")
+
+ subprocess.run(["helm", "uninstall", service_name], capture_output=True, check=False)
+ subprocess.run(["helm", "repo", "remove", "minio"], capture_output=True, check=False)
+ subprocess.run(["helm", "repo", "add", "minio", "https://charts.min.io/"], check=True)
+
+ endpoint = f"http://{service_name}:9000"
+ minio_ver = os.environ.get("MINIO_VER") or ""
+ minio_args = [
+ "helm",
+ "install",
+ service_name,
+ "minio/minio",
+ "--version",
+ minio_ver,
+ "--set",
+ "replicas=1",
+ "--set",
+ "mode=standalone",
+ "--set",
+ "resources.requests.memory=256Mi",
+ "--set",
+ "rootUser=rootuser",
+ "--set",
+ "rootPassword=rootpass123",
+ "--set",
+ "users[0].accessKey=some-access-key",
+ "--set",
+ "users[0].secretKey=some-secret-key",
+ "--set",
+ "users[0].policy=consoleAdmin",
+ "--set",
+ "service.type=ClusterIP",
+ "--set",
+ "configPathmc=/tmp/",
+ "--set",
+ "securityContext.enabled=false",
+ "--set",
+ "persistence.size=2G",
+ "--set",
+ f"fullnameOverride={service_name}",
+ "--set",
+ "serviceAccount.create=true",
+ "--set",
+ f"serviceAccount.name={service_name}-sa",
+ ]
+
+ retry(lambda: subprocess.run(minio_args, check=True), max_attempts=10, delay=60)
+
+ minio_pod = kubectl_bin(
+ "get",
+ "pods",
+ f"--selector=release={service_name}",
+ "-o",
+ "jsonpath={.items[].metadata.name}",
+ ).strip()
+ wait_pod(minio_pod)
+
+ operator_ns = os.environ.get("OPERATOR_NS")
+ if operator_ns:
+ namespace = kubectl_bin(
+ "config", "view", "--minify", "-o", "jsonpath={..namespace}"
+ ).strip()
+ kubectl_bin(
+ "create",
+ "svc",
+ "-n",
+ operator_ns,
+ "externalname",
+ service_name,
+ f"--external-name={service_name}.{namespace}.svc.cluster.local",
+ "--tcp=9000",
+ )
+
+ logger.info(f"Creating MinIO bucket: {bucket}")
+ kubectl_bin(
+ "run",
+ "-i",
+ "--rm",
+ "aws-cli",
+ "--image=perconalab/awscli",
+ "--restart=Never",
+ "--",
+ "bash",
+ "-c",
+ "AWS_ACCESS_KEY_ID=some-access-key "
+ "AWS_SECRET_ACCESS_KEY=some-secret-key "
+ "AWS_DEFAULT_REGION=us-east-1 "
+ f"/usr/bin/aws --no-verify-ssl --endpoint-url {endpoint} s3 mb s3://{bucket}",
+ )
+
+ yield
+
+ try:
+ subprocess.run(
+ ["helm", "uninstall", service_name, "--wait", "--timeout", "60s"],
+ check=True,
+ )
+ except subprocess.CalledProcessError as e:
+ logger.warning(f"Failed to cleanup minio: {e}")
+
+
+@pytest.fixture(scope="class")
+def psmdb_client(test_paths: Dict[str, str]) -> MongoManager:
+ """Deploy and get the client pod name."""
+ kubectl_bin("apply", "-f", f"{test_paths['conf_dir']}/client-70.yml")
+
+ result = retry(
+ lambda: kubectl_bin(
+ "get",
+ "pods",
+ "--selector=name=psmdb-client",
+ "-o",
+ "jsonpath={.items[].metadata.name}",
+ ),
+ condition=lambda result: "container not found" not in result,
+ )
+
+ pod_name = result.strip()
+ wait_pod(pod_name)
+ return MongoManager(pod_name)
diff --git a/e2e-tests/finalizer/test_finalizer.py b/e2e-tests/finalizer/test_finalizer.py
new file mode 100644
index 0000000000..f3c6896c96
--- /dev/null
+++ b/e2e-tests/finalizer/test_finalizer.py
@@ -0,0 +1,68 @@
+import logging
+from typing import Callable, Dict, TypedDict
+
+import pytest
+from lib.config import apply_cluster
+from lib.kubectl import kubectl_bin, wait_for_delete, wait_for_running
+from lib.mongo import MongoManager
+
+logger = logging.getLogger(__name__)
+
+
+class FinalizerConfig(TypedDict):
+ namespace: str
+ cluster: str
+
+
+@pytest.fixture(scope="class", autouse=True)
+def config(create_infra: Callable[[str], str]) -> FinalizerConfig:
+ """Configuration for tests"""
+ return {
+ "namespace": create_infra("finalizer"),
+ "cluster": "some-name",
+ }
+
+
+@pytest.fixture(scope="class", autouse=True)
+def setup_tests(test_paths: Dict[str, str]) -> None:
+ """Setup test environment"""
+ kubectl_bin("apply", "-f", f"{test_paths['conf_dir']}/secrets_with_tls.yml")
+
+
+class TestFinalizer:
+ """Test MongoDB cluster finalizers"""
+
+ @pytest.mark.dependency()
+ def test_create_cluster(self, config: FinalizerConfig, test_paths: Dict[str, str]) -> None:
+ apply_cluster(f"{test_paths['test_dir']}/conf/{config['cluster']}.yml")
+ wait_for_running(f"{config['cluster']}-rs0", 3, False)
+ wait_for_running(f"{config['cluster']}-cfg", 3)
+
+ @pytest.mark.dependency(depends=["TestFinalizer::test_create_cluster"])
+ def test_kill_primary_should_elect_new_one(
+ self, config: FinalizerConfig, psmdb_client: MongoManager
+ ) -> None:
+ primary = psmdb_client.get_mongo_primary(
+ f"clusterAdmin:clusterAdmin123456@{config['cluster']}-rs0.{config['namespace']}",
+ config["cluster"],
+ )
+ if primary == f"{config['cluster']}-rs0-0":
+ kubectl_bin("delete", "pod", "--grace-period=0", "--force", primary)
+ wait_for_running(f"{config['cluster']}-rs0", 3)
+ new_primary = psmdb_client.get_mongo_primary(
+ f"clusterAdmin:clusterAdmin123456@{config['cluster']}-rs0.{config['namespace']}",
+ config["cluster"],
+ )
+ assert new_primary != primary, "Primary did not change after killing the pod"
+
+ @pytest.mark.dependency(depends=["TestFinalizer::test_kill_primary_should_elect_new_one"])
+ def test_delete_cluster(self, config: FinalizerConfig) -> None:
+ kubectl_bin("delete", "psmdb", config["cluster"], "--wait=false")
+ wait_for_delete(f"psmdb/{config['cluster']}")
+
+ wait_for_delete(f"pvc/mongod-data-{config['cluster']}-cfg-0")
+ wait_for_delete(f"pvc/mongod-data-{config['cluster']}-cfg-1")
+ wait_for_delete(f"pvc/mongod-data-{config['cluster']}-cfg-2")
+ wait_for_delete(f"pvc/mongod-data-{config['cluster']}-rs0-0")
+ wait_for_delete(f"pvc/mongod-data-{config['cluster']}-rs0-1")
+ wait_for_delete(f"pvc/mongod-data-{config['cluster']}-rs0-2")
diff --git a/e2e-tests/functions b/e2e-tests/functions
index 04b6b5f88b..236016fce5 100755
--- a/e2e-tests/functions
+++ b/e2e-tests/functions
@@ -1817,6 +1817,8 @@ function setup_azure_credentials() {
create_infra() {
local ns="$1"
+ echo "$ns" > /tmp/pytest_current_namespace
+
if [[ ${DELETE_CRD_ON_START} == 1 ]]; then
delete_crd
check_crd_for_deletion "${GIT_BRANCH}"
diff --git a/e2e-tests/init-deploy/compare/find-1.json b/e2e-tests/init-deploy/compare/find-1.json
new file mode 100644
index 0000000000..2d84f1ffee
--- /dev/null
+++ b/e2e-tests/init-deploy/compare/find-1.json
@@ -0,0 +1 @@
+[ { "x": 100500 } ]
\ No newline at end of file
diff --git a/e2e-tests/init-deploy/compare/find-2.json b/e2e-tests/init-deploy/compare/find-2.json
new file mode 100644
index 0000000000..38993741e3
--- /dev/null
+++ b/e2e-tests/init-deploy/compare/find-2.json
@@ -0,0 +1 @@
+[ { "x": 100500 }, { "x" : 100501 } ]
\ No newline at end of file
diff --git a/e2e-tests/init-deploy/compare/find-3.json b/e2e-tests/init-deploy/compare/find-3.json
new file mode 100644
index 0000000000..665e842fe7
--- /dev/null
+++ b/e2e-tests/init-deploy/compare/find-3.json
@@ -0,0 +1 @@
+[ { "x": 100502 } ]
\ No newline at end of file
diff --git a/e2e-tests/init-deploy/compare/statefulset_another-name-rs0-4-oc.yml b/e2e-tests/init-deploy/compare/statefulset_another-name-rs0-4-oc.yml
index a10c5d45c7..f9461c530a 100644
--- a/e2e-tests/init-deploy/compare/statefulset_another-name-rs0-4-oc.yml
+++ b/e2e-tests/init-deploy/compare/statefulset_another-name-rs0-4-oc.yml
@@ -244,7 +244,7 @@ spec:
securityContext: {}
serviceAccount: default
serviceAccountName: default
- terminationGracePeriodSeconds: 300
+ terminationGracePeriodSeconds: 30
volumes:
- name: another-name-mongodb-keyfile
secret:
diff --git a/e2e-tests/init-deploy/compare/statefulset_another-name-rs0-oc.yml b/e2e-tests/init-deploy/compare/statefulset_another-name-rs0-oc.yml
index 9028f49d85..8c3c78c36e 100644
--- a/e2e-tests/init-deploy/compare/statefulset_another-name-rs0-oc.yml
+++ b/e2e-tests/init-deploy/compare/statefulset_another-name-rs0-oc.yml
@@ -243,7 +243,7 @@ spec:
securityContext: {}
serviceAccount: default
serviceAccountName: default
- terminationGracePeriodSeconds: 300
+ terminationGracePeriodSeconds: 30
volumes:
- name: another-name-mongodb-keyfile
secret:
diff --git a/e2e-tests/init-deploy/compare/statefulset_another-name-rs0.yml b/e2e-tests/init-deploy/compare/statefulset_another-name-rs0.yml
index 1f3d0df2f7..ee48a47c4b 100644
--- a/e2e-tests/init-deploy/compare/statefulset_another-name-rs0.yml
+++ b/e2e-tests/init-deploy/compare/statefulset_another-name-rs0.yml
@@ -297,7 +297,7 @@ spec:
fsGroup: 1001
serviceAccount: default
serviceAccountName: default
- terminationGracePeriodSeconds: 300
+ terminationGracePeriodSeconds: 30
volumes:
- name: another-name-mongodb-keyfile
secret:
diff --git a/e2e-tests/init-deploy/conf/another-name-rs0.yml b/e2e-tests/init-deploy/conf/another-name-rs0.yml
index 5258d5c3b4..a6eeb76e60 100644
--- a/e2e-tests/init-deploy/conf/another-name-rs0.yml
+++ b/e2e-tests/init-deploy/conf/another-name-rs0.yml
@@ -25,7 +25,7 @@ spec:
insecureSkipTLSVerify: false
replsets:
- name: rs0
- terminationGracePeriodSeconds: 300
+ terminationGracePeriodSeconds: 30
configuration: |
operationProfiling:
mode: slowOp
diff --git a/e2e-tests/init-deploy/test_init_deploy.py b/e2e-tests/init-deploy/test_init_deploy.py
new file mode 100644
index 0000000000..df26ffee51
--- /dev/null
+++ b/e2e-tests/init-deploy/test_init_deploy.py
@@ -0,0 +1,274 @@
+#!/usr/bin/env python3
+
+import logging
+import time
+from typing import Callable, Dict, TypedDict
+
+import pytest
+from lib.config import apply_cluster, apply_runtime_class, compare_kubectl
+from lib.kubectl import kubectl_bin, wait_for_running
+from lib.mongo import MongoManager
+from lib.secrets import apply_s3_storage_secrets, get_user_data
+from lib.utils import retry
+
+logger = logging.getLogger(__name__)
+
+
+class InitDeployConfig(TypedDict):
+ namespace: str
+ cluster: str
+ cluster2: str
+ max_conn: int
+
+
+@pytest.fixture(scope="class", autouse=True)
+def config(create_infra: Callable[[str], str]) -> InitDeployConfig:
+ """Configuration for tests"""
+ return {
+ "namespace": create_infra("init-deploy"),
+ "cluster": "some-name-rs0",
+ "cluster2": "another-name-rs0",
+ "max_conn": 17,
+ }
+
+
+@pytest.fixture(scope="class", autouse=True)
+def setup_tests(test_paths: Dict[str, str]) -> None:
+ """Setup test environment"""
+ kubectl_bin("apply", "-f", f"{test_paths['conf_dir']}/secrets_with_tls.yml")
+ apply_runtime_class(test_paths["test_dir"])
+
+
+class TestInitDeploy:
+ """Test MongoDB cluster deployment and operations"""
+
+ @pytest.mark.dependency()
+ def test_create_first_cluster(
+ self, config: InitDeployConfig, test_paths: Dict[str, str]
+ ) -> None:
+ """Create first PSMDB cluster"""
+ apply_cluster(f"{test_paths['test_dir']}/../conf/{config['cluster']}.yml")
+ wait_for_running(config["cluster"], 3)
+
+ compare_kubectl(
+ test_paths["test_dir"], f"statefulset/{config['cluster']}", config["namespace"]
+ )
+ compare_kubectl(
+ test_paths["test_dir"], f"service/{config['cluster']}", config["namespace"]
+ )
+
+ @pytest.mark.dependency(depends=["TestInitDeploy::test_create_first_cluster"])
+ def test_verify_users_created(
+ self,
+ config: InitDeployConfig,
+ test_paths: Dict[str, str],
+ psmdb_client: MongoManager,
+ ) -> None:
+ """Check if users created with correct permissions"""
+ secret_name = "some-users"
+
+ # Test userAdmin user
+ user = get_user_data(secret_name, "MONGODB_USER_ADMIN_USER")
+ password = get_user_data(secret_name, "MONGODB_USER_ADMIN_PASSWORD")
+ psmdb_client.compare_mongo_user(
+ f"{user}:{password}@{config['cluster']}.{config['namespace']}",
+ "userAdmin",
+ test_paths["test_dir"],
+ )
+
+ # Test backup user
+ user = get_user_data(secret_name, "MONGODB_BACKUP_USER")
+ password = get_user_data(secret_name, "MONGODB_BACKUP_PASSWORD")
+ psmdb_client.compare_mongo_user(
+ f"{user}:{password}@{config['cluster']}.{config['namespace']}",
+ "backup",
+ test_paths["test_dir"],
+ )
+
+ # Test clusterAdmin user
+ user = get_user_data(secret_name, "MONGODB_CLUSTER_ADMIN_USER")
+ password = get_user_data(secret_name, "MONGODB_CLUSTER_ADMIN_PASSWORD")
+ psmdb_client.compare_mongo_user(
+ f"{user}:{password}@{config['cluster']}.{config['namespace']}",
+ "clusterAdmin",
+ test_paths["test_dir"],
+ )
+
+ # Test clusterMonitor user
+ user = get_user_data(secret_name, "MONGODB_CLUSTER_MONITOR_USER")
+ password = get_user_data(secret_name, "MONGODB_CLUSTER_MONITOR_PASSWORD")
+ psmdb_client.compare_mongo_user(
+ f"{user}:{password}@{config['cluster']}.{config['namespace']}",
+ "clusterMonitor",
+ test_paths["test_dir"],
+ )
+
+ # Test that unauthorized user is rejected
+ result = psmdb_client.run_mongosh(
+ "db.runCommand({connectionStatus:1,showPrivileges:true})",
+ f"test:test@{config['cluster']}.{config['namespace']}",
+ )
+ assert "Authentication failed" in result
+
+ @pytest.mark.dependency(depends=["TestInitDeploy::test_verify_users_created"])
+ def test_write_and_read_data(
+ self,
+ config: InitDeployConfig,
+ test_paths: Dict[str, str],
+ psmdb_client: MongoManager,
+ ) -> None:
+ """Write data and read from all nodes"""
+
+ psmdb_client.run_mongosh(
+ 'db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})',
+ f"userAdmin:userAdmin123456@{config['cluster']}.{config['namespace']}",
+ )
+
+ retry(
+ lambda: psmdb_client.run_mongosh(
+ "db.getSiblingDB('myApp').test.insertOne({ x: 100500 })",
+ f"myApp:myPass@{config['cluster']}.{config['namespace']}",
+ ),
+ condition=lambda result: "acknowledged: true" in result,
+ )
+
+ for i in range(3):
+ psmdb_client.compare_mongo_cmd(
+ "find({}, { _id: 0 }).toArray()",
+ f"myApp:myPass@{config['cluster']}-{i}.{config['cluster']}.{config['namespace']}",
+ test_file=f"{test_paths['test_dir']}/compare/find-1.json",
+ )
+
+ @pytest.mark.dependency(depends=["TestInitDeploy::test_write_and_read_data"])
+ def test_connection_count(self, config: InitDeployConfig, psmdb_client: MongoManager) -> None:
+ """Check number of connections doesn't exceed maximum"""
+ conn_count = int(
+ psmdb_client.run_mongosh(
+ "db.serverStatus().connections.current",
+ f"clusterAdmin:clusterAdmin123456@{config['cluster']}.{config['namespace']}",
+ ).strip()
+ )
+ assert conn_count <= config["max_conn"], (
+ f"Connection count {conn_count} exceeds maximum {config['max_conn']}"
+ )
+
+ @pytest.mark.dependency(depends=["TestInitDeploy::test_connection_count"])
+ def test_primary_failover(
+ self,
+ config: InitDeployConfig,
+ test_paths: Dict[str, str],
+ psmdb_client: MongoManager,
+ ) -> None:
+ """Kill Primary Pod, check reelection, check data"""
+ initial_primary = psmdb_client.get_mongo_primary(
+ f"clusterAdmin:clusterAdmin123456@{config['cluster']}.{config['namespace']}",
+ config["cluster"],
+ )
+ assert initial_primary, "Failed to get initial primary"
+
+ kubectl_bin(
+ "delete",
+ "pods",
+ "--grace-period=0",
+ "--force",
+ initial_primary,
+ "-n",
+ config["namespace"],
+ )
+ wait_for_running(config["cluster"], 3)
+
+ changed_primary = psmdb_client.get_mongo_primary(
+ f"clusterAdmin:clusterAdmin123456@{config['cluster']}.{config['namespace']}",
+ config["cluster"],
+ )
+ assert initial_primary != changed_primary, "Primary didn't change after pod deletion"
+
+ psmdb_client.run_mongosh(
+ "db.getSiblingDB('myApp').test.insertOne({ x: 100501 })",
+ f"myApp:myPass@{config['cluster']}.{config['namespace']}",
+ )
+
+ for i in range(3):
+ psmdb_client.compare_mongo_cmd(
+ "find({}, { _id: 0 }).toArray()",
+ f"myApp:myPass@{config['cluster']}-{i}.{config['cluster']}.{config['namespace']}",
+ "-2nd",
+ test_file=f"{test_paths['test_dir']}/compare/find-2.json",
+ )
+
+ @pytest.mark.dependency(depends=["TestInitDeploy::test_primary_failover"])
+ def test_create_second_cluster(
+ self, config: InitDeployConfig, test_paths: Dict[str, str]
+ ) -> None:
+ """Check if possible to create second cluster"""
+ apply_s3_storage_secrets(test_paths["conf_dir"])
+ apply_cluster(f"{test_paths['test_dir']}/conf/{config['cluster2']}.yml")
+ wait_for_running(config["cluster2"], 3)
+
+ compare_kubectl(
+ test_paths["test_dir"], f"statefulset/{config['cluster2']}", config["namespace"]
+ )
+ compare_kubectl(
+ test_paths["test_dir"], f"service/{config['cluster2']}", config["namespace"]
+ )
+
+ @pytest.mark.dependency(depends=["TestInitDeploy::test_create_second_cluster"])
+ def test_second_cluster_data_operations(
+ self,
+ config: InitDeployConfig,
+ test_paths: Dict[str, str],
+ psmdb_client: MongoManager,
+ ) -> None:
+ """Write data and read from all nodes in second cluster"""
+ # Create user
+ psmdb_client.run_mongosh(
+ 'db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})',
+ f"userAdmin:userAdmin123456@{config['cluster2']}.{config['namespace']}",
+ )
+
+ # Write data
+ psmdb_client.run_mongosh(
+ "db.getSiblingDB('myApp').test.insertOne({ x: 100502 })",
+ f"myApp:myPass@{config['cluster2']}.{config['namespace']}",
+ )
+
+ # Read from all nodes
+ for i in range(3):
+ psmdb_client.compare_mongo_cmd(
+ "find({}, { _id: 0 }).toArray()",
+ f"myApp:myPass@{config['cluster2']}-{i}.{config['cluster2']}.{config['namespace']}",
+ "-3rd",
+ test_file=f"{test_paths['test_dir']}/compare/find-3.json",
+ )
+
+ @pytest.mark.dependency(depends=["TestInitDeploy::test_second_cluster_data_operations"])
+ def test_connection_count_with_backup(
+ self, config: InitDeployConfig, psmdb_client: MongoManager
+ ) -> None:
+ """Check number of connections doesn't exceed maximum with backup enabled"""
+ max_conn = 50
+ time.sleep(300) # Wait for backup agent connections
+
+ conn_count = int(
+ psmdb_client.run_mongosh(
+ "db.serverStatus().connections.current",
+ f"clusterAdmin:clusterAdmin123456@{config['cluster2']}.{config['namespace']}",
+ ).strip()
+ )
+ assert conn_count <= max_conn, (
+ f"Connection count {conn_count} exceeds maximum {max_conn} with backup enabled"
+ )
+
+ @pytest.mark.dependency(depends=["TestInitDeploy::test_connection_count_with_backup"])
+ def test_log_files_exist(self, config: InitDeployConfig) -> None:
+ """Check if mongod log files exist in pod"""
+ result = kubectl_bin(
+ "exec", f"{config['cluster2']}-0", "-c", "mongod", "--", "ls", "/data/db/logs"
+ )
+
+ assert "mongod.log" in result, "mongod.log not found"
+ assert "mongod.full.log" in result, "mongod.full.log not found"
+
+
+if __name__ == "__main__":
+ pytest.main([__file__, "-v"])
diff --git a/e2e-tests/lib/__init__.py b/e2e-tests/lib/__init__.py
new file mode 100644
index 0000000000..8b13789179
--- /dev/null
+++ b/e2e-tests/lib/__init__.py
@@ -0,0 +1 @@
+
diff --git a/e2e-tests/lib/bash_wrapper.py b/e2e-tests/lib/bash_wrapper.py
new file mode 100644
index 0000000000..34e19d40f3
--- /dev/null
+++ b/e2e-tests/lib/bash_wrapper.py
@@ -0,0 +1,41 @@
+import os
+import subprocess
+from pathlib import Path
+
+import pytest
+
+
+def pytest_addoption(parser: pytest.Parser) -> None:
+ parser.addoption("--test-name", action="store", help="Name of the bash test to run")
+
+
+def run_bash_test(test_name: str) -> None:
+ """Run bash script with live output and capture for error reporting"""
+ script_path = Path(__file__).parent.parent / test_name / "run"
+
+ if not script_path.exists():
+ pytest.fail(f"Script not found: {script_path}")
+
+ original_cwd = os.getcwd()
+ script_dir = script_path.parent
+
+ try:
+ os.chdir(script_dir)
+ process = subprocess.Popen(
+ ["bash", "run"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True
+ )
+
+ output = []
+ if process.stdout is not None:
+ for line in iter(process.stdout.readline, ""):
+ print(line, end="")
+ output.append(line)
+
+ process.wait()
+
+ if process.returncode != 0:
+ error_msg = f"Test {test_name} failed with exit code {process.returncode}\n\nOUTPUT:\n{''.join(output)}"
+ pytest.fail(error_msg)
+
+ finally:
+ os.chdir(original_cwd)
diff --git a/e2e-tests/lib/config.py b/e2e-tests/lib/config.py
new file mode 100644
index 0000000000..af4bb6fbf4
--- /dev/null
+++ b/e2e-tests/lib/config.py
@@ -0,0 +1,147 @@
+import logging
+import os
+import subprocess
+
+import yaml
+from deepdiff import DeepDiff
+
+from .kubectl import kubectl_bin
+
+logger = logging.getLogger(__name__)
+
+
+def cat_config(config_file: str) -> str:
+ """Process config file with yq transformations"""
+ with open(config_file, "r") as f:
+ config = yaml.safe_load(f)
+
+ if "spec" in config:
+ spec = config["spec"]
+
+ if "image" not in spec or spec["image"] is None:
+ spec["image"] = os.environ.get("IMAGE_MONGOD")
+
+ if "pmm" in spec:
+ spec["pmm"]["image"] = os.environ.get("IMAGE_PMM_CLIENT")
+
+ if "initImage" in spec:
+ spec["initImage"] = os.environ.get("IMAGE")
+
+ if "backup" in spec:
+ spec["backup"]["image"] = os.environ.get("IMAGE_BACKUP")
+
+ if "upgradeOptions" not in spec:
+ spec["upgradeOptions"] = {}
+ spec["upgradeOptions"]["apply"] = "Never"
+
+ return yaml.dump(config)
+
+
+def apply_cluster(config_file: str) -> None:
+ """Apply cluster configuration"""
+ logger.info("Creating PSMDB cluster")
+ config_yaml = cat_config(config_file)
+
+ if not os.environ.get("SKIP_BACKUPS_TO_AWS_GCP_AZURE"):
+ kubectl_bin("apply", "-f", "-", input_data=config_yaml)
+ else:
+ config = yaml.safe_load(config_yaml)
+ if "spec" in config and "backup" in config["spec"] and "tasks" in config["spec"]["backup"]:
+ config["spec"]["backup"]["tasks"] = config["spec"]["backup"]["tasks"][:1]
+ kubectl_bin("apply", "-f", "-", input_data=yaml.dump(config))
+
+
+def filter_yaml(
+ yaml_content: str, namespace: str, resource: str = "", skip_generation_check: bool = False
+) -> str:
+ """Filter YAML content using yq command"""
+
+ # TODO: consider using Python for filtering instead of yq
+ yq_filter = f"""
+ del(.metadata.ownerReferences[].apiVersion) |
+ del(.metadata.managedFields) |
+ del(.. | select(has("creationTimestamp")).creationTimestamp) |
+ del(.. | select(has("namespace")).namespace) |
+ del(.. | select(has("uid")).uid) |
+ del(.metadata.resourceVersion) |
+ del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) |
+ del(.metadata.selfLink) |
+ del(.metadata.annotations."cloud.google.com/neg") |
+ del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") |
+ del(.. | select(has("image")).image) |
+ del(.. | select(has("clusterIP")).clusterIP) |
+ del(.. | select(has("clusterIPs")).clusterIPs) |
+ del(.. | select(has("dataSource")).dataSource) |
+ del(.. | select(has("procMount")).procMount) |
+ del(.. | select(has("storageClassName")).storageClassName) |
+ del(.. | select(has("finalizers")).finalizers) |
+ del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") |
+ del(.. | select(has("volumeName")).volumeName) |
+ del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") |
+ del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") |
+ del(.spec.volumeMode) |
+ del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") |
+ del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") |
+ del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") |
+ del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") |
+ del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") |
+ del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) |
+ del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) |
+ del(.. | select(has("nodePort")).nodePort) |
+ del(.status) |
+ (.. | select(tag == "!!str")) |= sub("{namespace}"; "NAME_SPACE") |
+ del(.spec.volumeClaimTemplates[].apiVersion) |
+ del(.spec.volumeClaimTemplates[].kind) |
+ del(.spec.ipFamilies) |
+ del(.spec.ipFamilyPolicy) |
+ del(.spec.persistentVolumeClaimRetentionPolicy) |
+ del(.spec.internalTrafficPolicy) |
+ del(.spec.allocateLoadBalancerNodePorts) |
+ (.. | select(. == "extensions/v1beta1")) = "apps/v1" |
+ (.. | select(. == "batch/v1beta1")) = "batch/v1"
+ """
+
+ cmd = ["yq", "eval", yq_filter.strip(), "-"]
+ result = subprocess.run(cmd, input=yaml_content, text=True, capture_output=True, check=True)
+ filtered_yaml = result.stdout
+
+ if "cronjob" in resource.lower() or skip_generation_check:
+ cmd = ["yq", "eval", "del(.metadata.generation)", "-"]
+ result = subprocess.run(
+ cmd, input=filtered_yaml, text=True, capture_output=True, check=True
+ )
+ filtered_yaml = result.stdout
+
+ return filtered_yaml
+
+
+def compare_kubectl(test_dir: str, resource: str, namespace: str, postfix: str = "") -> None:
+ """Compare kubectl resource with expected output using yq filtering"""
+ expected_result = f"{test_dir}/compare/{resource.replace('/', '_')}{postfix}.yml"
+
+ try:
+ actual_yaml = kubectl_bin("get", resource, "-o", "yaml")
+ with open(expected_result, "r") as f:
+ expected_yaml = f.read()
+
+ filtered_actual = filter_yaml(actual_yaml, namespace)
+ filtered_expected = filter_yaml(expected_yaml, namespace)
+
+ actual_data = yaml.safe_load(filtered_actual)
+ expected_data = yaml.safe_load(filtered_expected)
+
+ diff = DeepDiff(expected_data, actual_data)
+ assert not diff, f"YAML files differ: {diff.pretty()}"
+
+ except subprocess.CalledProcessError as e:
+ raise ValueError(f"Failed to process resource {resource}: {e}")
+
+
+def apply_runtime_class(test_dir: str) -> None:
+ """Apply runtime class configuration"""
+ logger.info("Applying runc runtime class")
+ with open(f"{test_dir}/../conf/container-rc.yaml", "r") as f:
+ content = f.read()
+ if os.environ.get("EKS"):
+ content = content.replace("docker", "runc")
+ kubectl_bin("apply", "-f", "-", input_data=content)
diff --git a/e2e-tests/lib/k8s_collector.py b/e2e-tests/lib/k8s_collector.py
new file mode 100644
index 0000000000..9024c4ae5a
--- /dev/null
+++ b/e2e-tests/lib/k8s_collector.py
@@ -0,0 +1,206 @@
+#!/usr/bin/env python3
+
+import logging
+import os
+import tarfile
+from concurrent.futures import ThreadPoolExecutor, as_completed
+from datetime import datetime
+from typing import Dict, List, Optional
+
+from lib.kubectl import kubectl_bin
+
+logger = logging.getLogger(__name__)
+
+REPORTS_DIR = os.path.join(os.path.dirname(__file__), "..", "reports")
+
+
+class K8sCollector:
+ def __init__(self, namespace: str, custom_resources: Optional[List[str]] = None):
+ self.namespace = namespace
+ self.custom_resources = custom_resources or []
+ self.timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
+ self.output_dir = os.path.join(REPORTS_DIR, f"{namespace}_{self.timestamp}")
+
+ def kubectl(self, *args: str) -> str:
+ """Run kubectl command and return stdout"""
+ return kubectl_bin(*args, check=False)
+
+ def kubectl_ns(self, *args: str) -> str:
+ """Run kubectl command with namespace flag"""
+ return kubectl_bin(*args, "-n", self.namespace, check=False)
+
+ def get_names(self, resource_type: str) -> List[str]:
+ """Get list of resource names"""
+ output = self.kubectl_ns("get", resource_type, "-o", "name")
+ return [line.split("/")[-1] for line in output.strip().split("\n") if line]
+
+ def save(self, path: str, content: str) -> None:
+ """Save content to file, creating directories as needed"""
+ os.makedirs(os.path.dirname(path), exist_ok=True)
+ with open(path, "w") as f:
+ f.write(content)
+
+ def process_resource(self, resource_type: str) -> None:
+ """Process a resource type: get list, describe each, get yaml"""
+ logger.debug(f"Processing {resource_type}...")
+ base = f"{self.output_dir}/get/{resource_type}"
+
+ self.save(
+ f"{base}/{resource_type}.txt", self.kubectl_ns("get", resource_type, "-o", "wide")
+ )
+
+ for name in self.get_names(resource_type):
+ self.save(
+ f"{self.output_dir}/describe/{resource_type}_{name}.txt",
+ self.kubectl_ns("describe", resource_type, name),
+ )
+ self.save(
+ f"{base}/{name}.yaml", self.kubectl_ns("get", resource_type, name, "-o", "yaml")
+ )
+
+ def extract_pod_logs(self, pod: str) -> None:
+ """Extract logs for all containers in a pod"""
+ containers = self.kubectl_ns(
+ "get", "pod", pod, "-o", "jsonpath={.spec.containers[*].name}"
+ ).split()
+
+ for container in containers:
+ logger.debug(f"Extracting logs: {pod}/{container}")
+ logs = self.kubectl_ns("logs", pod, "-c", container)
+ self.save(f"{self.output_dir}/logs/{pod}/{container}.log", logs)
+
+ def process_pods(self) -> None:
+ """Process pods with parallel log extraction"""
+ logger.debug("Processing pods...")
+ base = f"{self.output_dir}/get/pods"
+
+ self.save(f"{base}/pods.txt", self.kubectl_ns("get", "pods", "-o", "wide"))
+
+ pods = self.get_names("pods")
+ for pod in pods:
+ self.save(
+ f"{self.output_dir}/describe/pod_{pod}.txt",
+ self.kubectl_ns("describe", "pod", pod),
+ )
+ self.save(f"{base}/{pod}.yaml", self.kubectl_ns("get", "pod", pod, "-o", "yaml"))
+
+ with ThreadPoolExecutor(max_workers=5) as executor:
+ list(executor.map(self.extract_pod_logs, pods))
+
+ def extract_events(self) -> None:
+ """Extract namespace events"""
+ logger.debug("Extracting events...")
+ self.save(
+ f"{self.output_dir}/events/events.txt", self.kubectl_ns("get", "events", "-o", "wide")
+ )
+ self.save(
+ f"{self.output_dir}/events/events.json", self.kubectl_ns("get", "events", "-o", "json")
+ )
+
+ def extract_errors(self) -> None:
+ """Extract error lines from all logs into summary"""
+ logs_dir = f"{self.output_dir}/logs"
+ if not os.path.exists(logs_dir):
+ return
+
+ errors = []
+ for root, _, files in os.walk(logs_dir):
+ for file in files:
+ if not file.endswith(".log"):
+ continue
+ path = os.path.join(root, file)
+ with open(path) as f:
+ error_lines = [line for line in f if "error" in line.lower()]
+ if error_lines:
+ rel_path = os.path.relpath(path, logs_dir)
+ errors.append(f"=== {rel_path} ===\n" + "".join(error_lines))
+
+ if errors:
+ self.save(
+ f"{self.output_dir}/error_summary.log",
+ f"Errors for {self.namespace} ({self.timestamp})\n{'=' * 50}\n\n"
+ + "\n\n".join(errors),
+ )
+
+ def capture_summary(self) -> Dict[str, str]:
+ """Capture simplified resources for HTML report. Returns dict with resources, logs, events."""
+ sections = []
+
+ sections.append("=== Nodes ===")
+ sections.append(self.kubectl("get", "nodes") or "(no output)")
+ sections.append("")
+
+ sections.append(f"=== All from namespace {self.namespace} ===")
+ sections.append(self.kubectl_ns("get", "all") or "(no output)")
+ sections.append("")
+
+ sections.append("=== Secrets ===")
+ sections.append(self.kubectl_ns("get", "secrets") or "(no output)")
+ sections.append("")
+
+ sections.append("=== PSMDB Cluster ===")
+ sections.append(
+ self.kubectl_ns(
+ "get", "psmdb", "-o", "custom-columns=NAME:.metadata.name,STATE:.status.state"
+ )
+ or "(no output)"
+ )
+ sections.append("")
+
+ sections.append("=== PSMDB Backup ===")
+ sections.append(self.kubectl_ns("get", "psmdb-backup") or "(no output)")
+ sections.append("")
+
+ sections.append("=== PSMDB Restore ===")
+ sections.append(self.kubectl_ns("get", "psmdb-restore") or "(no output)")
+
+ logs = self.kubectl_ns(
+ "logs", "-l", "app.kubernetes.io/name=percona-server-mongodb-operator", "--tail=50"
+ )
+
+ events = self.kubectl_ns("get", "events", "--sort-by=.lastTimestamp")
+
+ return {
+ "resources": "\n".join(sections),
+ "logs": f"=== PSMDB Operator Logs ===\n{logs or '(no output)'}",
+ "events": f"=== Kubernetes Events ===\n{events or '(no output)'}",
+ }
+
+ def collect_all(self) -> None:
+ """Main collection method"""
+ logger.info(f"Collecting from namespace: {self.namespace}")
+
+ self.process_pods()
+
+ resources = ["statefulsets", "deployments", "secrets", "jobs", "configmaps", "services"]
+ resources.extend(r for r in self.custom_resources if r)
+
+ with ThreadPoolExecutor(max_workers=6) as executor:
+ futures = [executor.submit(self.process_resource, r) for r in resources]
+ futures.append(executor.submit(self.extract_events))
+ for f in as_completed(futures):
+ try:
+ f.result()
+ except Exception as e:
+ logger.error(f"Error: {e}")
+
+ self.extract_errors()
+ logger.info(f"Done. Output: {self.output_dir}")
+
+
+def collect_resources(
+ namespace: str, custom_resources: Optional[List[str]] = None, output_dir: Optional[str] = None
+) -> None:
+ """Collect Kubernetes resources for a given namespace."""
+ collector = K8sCollector(namespace, custom_resources)
+ if output_dir:
+ collector.output_dir = os.path.join(REPORTS_DIR, f"{output_dir}_{collector.timestamp}")
+
+ os.makedirs(REPORTS_DIR, exist_ok=True)
+
+ try:
+ collector.collect_all()
+ with tarfile.open(f"{collector.output_dir}.tar.gz", "w:gz") as tar:
+ tar.add(collector.output_dir, arcname=os.path.basename(collector.output_dir))
+ except Exception as e:
+ logger.error(f"Error collecting from {namespace}: {e}")
diff --git a/e2e-tests/lib/kubectl.py b/e2e-tests/lib/kubectl.py
new file mode 100644
index 0000000000..20807eadad
--- /dev/null
+++ b/e2e-tests/lib/kubectl.py
@@ -0,0 +1,191 @@
+import json
+import logging
+import subprocess
+import time
+
+logger = logging.getLogger(__name__)
+
+
+def kubectl_bin(*args: str, check: bool = True, input_data: str = "") -> str:
+ """Execute kubectl command"""
+ cmd = ["kubectl"] + list(args)
+ logger.debug(" ".join(map(str, cmd)))
+ result = subprocess.run(cmd, check=check, capture_output=True, text=True, input=input_data)
+
+ if result.stderr:
+ logger.warning(f"kubectl error: {result.stderr}")
+
+ if result.returncode != 0 and not result.stdout:
+ return result.stderr
+
+ return result.stdout
+
+
+def wait_pod(pod_name: str, timeout: int = 360) -> None:
+ """Wait for pod to be ready."""
+ start_time = time.time()
+ logger.info(f"Waiting for pod/{pod_name} to be ready...")
+ while time.time() - start_time < timeout:
+ try:
+ result = kubectl_bin(
+ "get",
+ "pod",
+ pod_name,
+ "-o",
+ "jsonpath={.status.conditions[?(@.type=='Ready')].status}",
+ ).strip("'")
+ if result == "True":
+ logger.info(f"Pod {pod_name} is ready")
+ return
+ except subprocess.CalledProcessError:
+ pass
+ time.sleep(1)
+
+ status = (
+ kubectl_bin(
+ "get",
+ "pod",
+ pod_name,
+ "-o",
+ "jsonpath={.status.phase} (Ready={.status.conditions[?(@.type=='Ready')].status})",
+ check=False,
+ ).strip()
+ or "not found"
+ )
+ raise TimeoutError(f"Timeout waiting for {pod_name} to be ready. Last status: {status}")
+
+
+def wait_for_running(
+ cluster_name: str, expected_pods: int, check_cluster_readyness: bool = True, timeout: int = 600
+) -> None:
+ """Wait for pods to be in running state using custom label selector"""
+ last_pod = expected_pods - 1
+ rs_name = cluster_name.split("-")[-1]
+
+ for i in range(last_pod + 1):
+ if i == last_pod and get_jsonpath(cluster_name, rs_name, "arbiter.enabled") == "true":
+ wait_pod(f"{cluster_name}-arbiter-0")
+ else:
+ wait_pod(f"{cluster_name}-{i}")
+
+ for pod_type, path_prefix in [("nv", "non_voting"), ("hidden", "hidden")]:
+ if get_jsonpath(cluster_name, rs_name, f"{path_prefix}.enabled") == "true":
+ size = get_jsonpath(cluster_name, rs_name, f"{path_prefix}.size")
+ if size:
+ for i in range(int(size)):
+ wait_pod(f"{cluster_name}-{pod_type}-{i}")
+
+ cluster_name = cluster_name.replace(f"-{rs_name}", "")
+ if check_cluster_readyness:
+ start_time = time.time()
+ logger.info(f"Waiting for cluster {cluster_name} readiness")
+ while time.time() - start_time < timeout:
+ try:
+ state = kubectl_bin(
+ "get", "psmdb", cluster_name, "-o", "jsonpath={.status.state}"
+ ).strip("'")
+ if state == "ready":
+ logger.info(f"Cluster {cluster_name} is ready")
+ return
+ except subprocess.CalledProcessError:
+ pass
+ time.sleep(1)
+
+ state = (
+ kubectl_bin(
+ "get",
+ "psmdb",
+ cluster_name,
+ "-o",
+ "jsonpath={.status.state}",
+ check=False,
+ ).strip("'")
+ or "unknown"
+ )
+ raise TimeoutError(f"Timeout waiting for {cluster_name} to be ready. Last state: {state}")
+
+
+def wait_for_delete(resource: str, timeout: int = 180) -> None:
+ """Wait for a specific resource to be deleted"""
+ logger.info(f"Waiting for {resource} to be deleted")
+ time.sleep(1)
+ try:
+ kubectl_bin("wait", "--for=delete", resource, f"--timeout={timeout}s")
+ except subprocess.CalledProcessError as e:
+ raise TimeoutError(f"Resource {resource} was not deleted within {timeout}s") from e
+ logger.info(f"{resource} was deleted")
+
+
+def get_jsonpath(cluster_name: str, rs_name: str, path: str) -> str:
+ """Get value from PSMDB resource using JSONPath"""
+ jsonpath = f'{{.spec.replsets[?(@.name=="{rs_name}")].{path}}}'
+ try:
+ return kubectl_bin("get", "psmdb", cluster_name, "-o", f"jsonpath={jsonpath}")
+ except subprocess.CalledProcessError:
+ return ""
+
+
+def clean_all_namespaces() -> None:
+ """Delete all namespaces except system ones."""
+ try:
+ logger.info("Cleaning up all old namespaces")
+ result = kubectl_bin(
+ "get",
+ "ns",
+ "-o",
+ "jsonpath={range .items[*]}{.metadata.name} {.status.phase}{'\\n'}{end}",
+ )
+ excluded = ("kube-", "default", "psmdb-operator", "openshift", "gke-", "gmp-")
+
+ namespaces = [
+ parts[0]
+ for line in result.strip().splitlines()
+ if (parts := line.split())
+ and len(parts) == 2
+ and not any(ex in parts[0] for ex in excluded)
+ and parts[1] != "Terminating"
+ ]
+
+ if namespaces:
+ kubectl_bin("delete", "ns", *namespaces)
+ except subprocess.CalledProcessError:
+ logger.error("Failed to clean namespaces")
+
+
+def detect_k8s_provider(provider: str) -> str:
+ """Detect if the Kubernetes provider matches the given string"""
+ try:
+ output = kubectl_bin("version", "-o", "json")
+ git_version = json.loads(output)["serverVersion"]["gitVersion"]
+ return "1" if provider in git_version else "0"
+ except Exception as e:
+ logger.error(f"Failed to detect Kubernetes provider: {e}")
+ return "0"
+
+
+def get_k8s_versions() -> tuple[str, str]:
+ """Get Kubernetes git version and semantic version."""
+ output = kubectl_bin("version", "-o", "json")
+ version_info = json.loads(output)["serverVersion"]
+
+ git_version = version_info["gitVersion"]
+ major = version_info["major"]
+ minor = version_info["minor"].rstrip("+")
+ kube_version = f"{major}.{minor}"
+
+ return git_version, kube_version
+
+
+def is_openshift() -> str:
+ """Detect if running on OpenShift. Returns '1' or ''."""
+ try:
+ result = subprocess.run(["oc", "get", "projects"], capture_output=True)
+ return "1" if result.returncode == 0 else ""
+ except FileNotFoundError:
+ return ""
+
+
+def is_minikube() -> str:
+ """Detect if running on Minikube. Returns '1' or ''."""
+ result = kubectl_bin("get", "nodes", check=False)
+ return "1" if any(line.startswith("minikube") for line in result.splitlines()) else ""
diff --git a/e2e-tests/lib/mongo.py b/e2e-tests/lib/mongo.py
new file mode 100644
index 0000000000..f38eb0299d
--- /dev/null
+++ b/e2e-tests/lib/mongo.py
@@ -0,0 +1,154 @@
+import json
+import logging
+import os
+import re
+from pathlib import Path
+from typing import Any, Dict
+
+from deepdiff import DeepDiff
+
+from .kubectl import kubectl_bin
+from .utils import retry
+
+logger = logging.getLogger(__name__)
+
+
+class MongoManager:
+ def __init__(self, client: str):
+ self.client = client
+
+ def run_mongosh(
+ self,
+ command: str,
+ uri: str,
+ driver: str = "mongodb+srv",
+ suffix: str = ".svc.cluster.local",
+ mongo_flag: str = "",
+ timeout: int = 30,
+ ) -> str:
+ """Execute mongosh command in PSMDB client container."""
+ replica_set = "cfg" if "cfg" in uri else "rs0"
+ connection_string = f"{driver}://{uri}{suffix}/admin?ssl=false&replicaSet={replica_set}"
+ if mongo_flag:
+ connection_string += f" {mongo_flag}"
+
+ result = kubectl_bin(
+ "exec",
+ self.client,
+ "--",
+ "timeout",
+ str(timeout),
+ "mongosh",
+ f"{connection_string}",
+ "--eval",
+ command,
+ "--quiet",
+ check=False,
+ )
+ return result
+
+ def compare_mongo_user(self, uri: str, expected_role: str, test_dir: str) -> None:
+ """Compare MongoDB user permissions"""
+
+ def get_expected_file(test_dir: str, user: str) -> Any:
+ """Get the appropriate expected file based on MongoDB version"""
+ base_path = Path(test_dir) / "compare"
+ base_file = base_path / f"{user}.json"
+
+ image_mongod = os.environ.get("IMAGE_MONGOD", "")
+ version_mappings = [("8.0", "-80"), ("7.0", "-70"), ("6.0", "-60")]
+
+ for version, suffix in version_mappings:
+ if version in image_mongod:
+ version_file = base_path / f"{user}{suffix}.json"
+ if version_file.exists():
+ logger.info(f"Using version-specific file: {version_file}")
+ with open(version_file) as f:
+ return json.load(f)
+
+ if base_file.exists():
+ logger.info(f"Using base file: {base_file}")
+ with open(base_file) as f:
+ return json.load(f)
+ else:
+ raise FileNotFoundError(f"Expected file not found: {base_file}")
+
+ def clean_mongo_json(data: Dict[str, Any]) -> Dict[str, Any]:
+ """Remove timestamps and metadata from MongoDB response"""
+
+ def remove_timestamps(obj: Any) -> Any:
+ if isinstance(obj, dict):
+ return {
+ k: remove_timestamps(v)
+ for k, v in obj.items()
+ if k not in {"ok", "$clusterTime", "operationTime"}
+ }
+ elif isinstance(obj, list):
+ return [remove_timestamps(v) for v in obj]
+ elif isinstance(obj, str):
+ return re.sub(
+ r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}[\+\-]\d{4}", "", obj
+ )
+ else:
+ return obj
+
+ cleaned = remove_timestamps(data)
+ if not isinstance(cleaned, dict):
+ raise TypeError("Expected cleaned MongoDB response to be a dict")
+ return cleaned
+
+ result = retry(
+ lambda: self.run_mongosh(
+ "EJSON.stringify(db.runCommand({connectionStatus:1,showPrivileges:true}))",
+ uri,
+ )
+ )
+ actual_data = clean_mongo_json(json.loads(result))
+ expected_data = get_expected_file(test_dir, expected_role)
+
+ diff = DeepDiff(expected_data, actual_data, ignore_order=True)
+ assert not diff, f"MongoDB user permissions differ: {diff.pretty()}"
+
+ def compare_mongo_cmd(
+ self,
+ command: str,
+ uri: str,
+ postfix: str = "",
+ suffix: str = "",
+ database: str = "myApp",
+ collection: str = "test",
+ sort: str = "",
+ test_file: str = "",
+ ) -> None:
+ """Compare MongoDB command output"""
+ full_cmd = f"{collection}.{command}"
+ if sort:
+ full_cmd = f"{collection}.{command}.{sort}"
+
+ logger.info(f"Running: {full_cmd} on db {database}")
+
+ mongo_expr = f"EJSON.stringify(db.getSiblingDB('{database}').{full_cmd})"
+ result = json.loads(self.run_mongosh(mongo_expr, uri, "mongodb"))
+
+ logger.info(f"MongoDB output: {result}")
+
+ with open(test_file) as file:
+ expected = json.load(file)
+
+ diff = DeepDiff(expected, result)
+ assert not diff, f"MongoDB command output differs: {diff.pretty()}"
+
+ def get_mongo_primary(self, uri: str, cluster_name: str) -> str:
+ """Get current MongoDB primary node"""
+ primary_endpoint = self.run_mongosh("EJSON.stringify(db.hello().me)", uri)
+
+ if cluster_name in primary_endpoint:
+ return primary_endpoint.split(".")[0].replace('"', "")
+ else:
+ endpoint_host = primary_endpoint.split(":")[0]
+ result = kubectl_bin("get", "service", "-o", "wide")
+
+ for line in result.splitlines():
+ if endpoint_host in line:
+ return line.split()[0].replace('"', "")
+ raise ValueError("Primary node not found in service list")
diff --git a/e2e-tests/lib/operator.py b/e2e-tests/lib/operator.py
new file mode 100644
index 0000000000..817445885c
--- /dev/null
+++ b/e2e-tests/lib/operator.py
@@ -0,0 +1,189 @@
+import json
+import logging
+import os
+import re
+import subprocess
+from pathlib import Path
+
+import yaml
+
+from .kubectl import kubectl_bin, wait_pod
+
+logger = logging.getLogger(__name__)
+
+
+def deploy_operator(test_dir: str, src_dir: str) -> None:
+ """Deploy the operator with simplified logic."""
+ logger.info("Start PSMDB operator")
+ operator_ns = os.environ.get("OPERATOR_NS")
+
+ crd_file = f"{test_dir}/conf/crd.yaml"
+ if not os.path.isfile(crd_file):
+ crd_file = f"{src_dir}/deploy/crd.yaml"
+
+ kubectl_bin("apply", "--server-side", "--force-conflicts", "-f", crd_file)
+
+ rbac_type = "cw-rbac" if operator_ns else "rbac"
+ operator_file = f"{src_dir}/deploy/{'cw-' if operator_ns else ''}operator.yaml"
+
+ apply_rbac(src_dir, rbac_type)
+
+ with open(operator_file, "r") as f:
+ data = yaml.safe_load(f)
+
+ for container in data["spec"]["template"]["spec"]["containers"]:
+ container["image"] = os.environ.get("IMAGE")
+ if "env" in container:
+ env_vars = {env["name"]: env for env in container["env"]}
+ if "DISABLE_TELEMETRY" in env_vars:
+ env_vars["DISABLE_TELEMETRY"]["value"] = "true"
+ if "LOG_LEVEL" in env_vars:
+ env_vars["LOG_LEVEL"]["value"] = "DEBUG"
+
+ yaml_content = yaml.dump(data, default_flow_style=False)
+ kubectl_bin("apply", "-f", "-", input_data=yaml_content)
+ operator_pod = get_operator_pod()
+ wait_pod(operator_pod)
+
+ logs = kubectl_bin("logs", operator_pod)
+ startup_logs = [line for line in logs.splitlines() if "Manager starting up" in line]
+ if startup_logs:
+ logger.info(f"Operator startup: {startup_logs[0]}")
+ else:
+ logger.warning("No 'Manager starting up' message found in logs")
+
+
+def get_operator_pod() -> str:
+ """Get the operator pod name"""
+ args = [
+ "get",
+ "pods",
+ "--selector=name=percona-server-mongodb-operator",
+ "-o",
+ "jsonpath={.items[].metadata.name}",
+ ]
+ operator_ns = os.environ.get("OPERATOR_NS")
+ if operator_ns:
+ args.extend(["-n", operator_ns])
+ try:
+ out = kubectl_bin(*args)
+ names = [n for n in out.strip().split() if n]
+ if not names:
+ raise RuntimeError(
+ "No Running operator pod found. Ensure the operator deployment succeeded"
+ )
+ if len(names) > 1:
+ raise RuntimeError(f"Multiple operator pods found: {names}")
+ return names[0]
+ except Exception as e:
+ raise RuntimeError(f"Failed to get operator pod: {e}") from e
+
+
+def apply_rbac(src_dir: str, rbac: str = "rbac") -> None:
+ """Apply RBAC YAML with namespace substitution"""
+ operator_ns = os.getenv("OPERATOR_NS", "psmdb-operator")
+ path = Path(src_dir) / "deploy" / f"{rbac}.yaml"
+
+ yaml_content = path.read_text()
+ modified_yaml = re.sub(
+ r"^(\s*)namespace:\s*.*$", rf"\1namespace: {operator_ns}", yaml_content, flags=re.MULTILINE
+ )
+
+ args = ["apply", "-f", "-"]
+ if os.getenv("OPERATOR_NS"):
+ args = ["apply", "-n", operator_ns, "-f", "-"]
+
+ kubectl_bin(*args, input_data=modified_yaml)
+
+
+def delete_crd_rbac(src_dir: Path) -> None:
+ logger.info("Deleting old CRDs and RBACs")
+ crd_path = (src_dir / "deploy" / "crd.yaml").resolve()
+
+ docs = list(yaml.safe_load_all(crd_path.read_text()))
+ crd_names = []
+ resource_kinds = []
+ for doc in docs:
+ if doc and doc.get("kind") == "CustomResourceDefinition":
+ crd_names.append(doc["metadata"]["name"])
+ group = doc["spec"]["group"]
+ plural = doc["spec"]["names"]["plural"]
+ resource_kinds.append(f"{plural}.{group}")
+
+ kubectl_bin("delete", "-f", str(crd_path), "--ignore-not-found", "--wait=false", check=False)
+
+ for kind in resource_kinds:
+ try:
+ items_json = kubectl_bin("get", kind, "--all-namespaces", "-o", "json")
+ data = json.loads(items_json)
+ for item in data.get("items", []):
+ ns = item["metadata"]["namespace"]
+ name = item["metadata"]["name"]
+ kubectl_bin(
+ "patch",
+ kind,
+ "-n",
+ ns,
+ name,
+ "--type=merge",
+ "-p",
+ '{"metadata":{"finalizers":[]}}',
+ )
+ except subprocess.CalledProcessError:
+ pass
+
+ for name in crd_names:
+ kubectl_bin("wait", "--for=delete", "crd", name, check=False)
+
+
+def check_crd_for_deletion(file_path: str) -> None:
+ """Check and remove finalizers from CRDs to allow deletion"""
+ with open(file_path, "r") as f:
+ yaml_content = f.read()
+
+ for doc in yaml_content.split("---"):
+ if not doc.strip():
+ continue
+ try:
+ parsed_doc = yaml.safe_load(doc)
+ if not parsed_doc or "metadata" not in parsed_doc:
+ continue
+
+ crd_name = parsed_doc["metadata"]["name"]
+
+ result = kubectl_bin(
+ "get",
+ f"crd/{crd_name}",
+ "-o",
+ "jsonpath={.status.conditions[-1].type}",
+ "--ignore-not-found",
+ )
+ is_crd_terminating = result.strip() == "Terminating"
+
+ if is_crd_terminating:
+ logger.info(f"Removing finalizers from CRD {crd_name} to allow deletion")
+ kubectl_bin(
+ "patch",
+ f"crd/{crd_name}",
+ "--type=merge",
+ "-p",
+ '{"metadata":{"finalizers":[]}}',
+ )
+ try:
+ kubectl_bin(
+ "patch",
+ crd_name,
+ "--all-namespaces",
+ "--type=merge",
+ "-p",
+ '{"metadata":{"finalizers":[]}}',
+ )
+ except Exception as patch_error:
+ logger.warning(
+ f"Could not patch {crd_name} instances (may not exist): {patch_error}"
+ )
+
+ except yaml.YAMLError as yaml_error:
+ logger.error(f"Error parsing YAML document: {yaml_error}")
+ except Exception as e:
+ logger.error(f"Error removing finalizers from CRD: {e}")
diff --git a/e2e-tests/lib/report_generator.py b/e2e-tests/lib/report_generator.py
new file mode 100644
index 0000000000..a15fc9c9aa
--- /dev/null
+++ b/e2e-tests/lib/report_generator.py
@@ -0,0 +1,144 @@
+import os
+import re
+from typing import Any, List, MutableSequence
+
+from pytest_html import extras
+
+from lib.k8s_collector import K8sCollector
+
+ENV_VARS_TO_REPORT = [
+ "KUBE_VERSION",
+ "EKS",
+ "GKE",
+ "OPENSHIFT",
+ "MINIKUBE",
+ "GIT_COMMIT",
+ "GIT_BRANCH",
+ "OPERATOR_VERSION",
+ "OPERATOR_NS",
+ "IMAGE",
+ "IMAGE_MONGOD",
+ "IMAGE_BACKUP",
+ "IMAGE_PMM_CLIENT",
+ "IMAGE_PMM_SERVER",
+ "IMAGE_PMM3_CLIENT",
+ "IMAGE_PMM3_SERVER",
+ "CERT_MANAGER_VER",
+ "CHAOS_MESH_VER",
+ "MINIO_VER",
+ "CLEAN_NAMESPACE",
+ "DELETE_CRD_ON_START",
+ "SKIP_DELETE",
+ "SKIP_BACKUPS_TO_AWS_GCP_AZURE",
+]
+
+
+def pytest_html_results_summary(
+ prefix: MutableSequence[str], summary: MutableSequence[str], postfix: MutableSequence[str]
+) -> None:
+ """Add environment variables table to HTML report summary."""
+ rows = ""
+ for i, var in enumerate(ENV_VARS_TO_REPORT):
+ value = os.environ.get(var, "")
+ escaped_value = value.replace("<", "<").replace(">", ">")
+ bg = "#f9f9f9" if i % 2 == 0 else "#ffffff"
+ rows += f'{var} | {escaped_value} |
\n'
+
+ if rows:
+ table = f"""
+ Environment Variables
+
+
+
+ | Variable |
+ Value |
+
+
+
+ {rows}
+
+
+ """
+ prefix.append(table)
+
+
+def pytest_html_results_table_header(cells: MutableSequence[str]) -> None:
+ """Add Class column to HTML report."""
+ cells.insert(1, 'Class | ')
+
+
+def pytest_html_results_table_row(report: Any, cells: MutableSequence[str]) -> None:
+ """Populate Class column in HTML report."""
+ nodeid = report.nodeid
+ if "test_bash_wrapper[" in nodeid:
+ # Extract test name from: test_pytest_wrapper.py::test_bash_wrapper[test-name]
+ class_name = nodeid.split("[")[1].rstrip("]")
+ else:
+ parts = nodeid.split("::")
+ class_name = parts[1].replace("Test", "") if len(parts) > 2 else "-"
+ cells.insert(1, f"{class_name} | ")
+
+
+LOG_LEVEL_COLORS = {
+ "ERROR": "#ff6b6b",
+ "WARN": "#ffa500",
+ "INFO": "#00CC66",
+ "DEBUG": "#3B8EFF",
+ "Normal": "#00CC66",
+ "Warning": "#ffa500",
+}
+LOG_LEVEL_PATTERN = re.compile(r"\b(" + "|".join(LOG_LEVEL_COLORS.keys()) + r")\b")
+
+
+def highlight_log_levels(logs: str) -> str:
+ """Add basic color highlighting for common log levels"""
+ return LOG_LEVEL_PATTERN.sub(
+ lambda m: f'{m.group(1)}',
+ logs,
+ )
+
+
+def generate_report(namespace: str) -> List[Any]:
+ def create_collapsible_section(title: str, content: str) -> str:
+ return f"""
+
+ """
+
+ summary = K8sCollector(namespace).capture_summary()
+
+ return [
+ extras.html(
+ create_collapsible_section("Operator Pod Logs", highlight_log_levels(summary["logs"]))
+ ),
+ extras.html(create_collapsible_section("Kubernetes Resources", summary["resources"])),
+ extras.html(
+ create_collapsible_section(
+ "Kubernetes Events", highlight_log_levels(summary["events"])
+ )
+ ),
+ ]
diff --git a/e2e-tests/lib/secrets.py b/e2e-tests/lib/secrets.py
new file mode 100644
index 0000000000..df4d26ca6d
--- /dev/null
+++ b/e2e-tests/lib/secrets.py
@@ -0,0 +1,110 @@
+import atexit
+import base64
+import logging
+import os
+import subprocess
+import tempfile
+import urllib.parse
+from pathlib import Path
+from typing import Optional
+
+from .kubectl import kubectl_bin
+
+logger = logging.getLogger(__name__)
+
+_temp_files_to_cleanup: list[str] = []
+
+
+def _cleanup_temp_files() -> None:
+ for path in _temp_files_to_cleanup:
+ try:
+ if os.path.exists(path):
+ os.unlink(path)
+ except OSError:
+ pass
+
+
+atexit.register(_cleanup_temp_files)
+
+
+def get_secret_data(secret_name: str, data_key: str) -> str:
+ """Get and decode secret data from Kubernetes"""
+ try:
+ result = kubectl_bin(
+ "get", f"secrets/{secret_name}", "-o", f"jsonpath={{.data.{data_key}}}"
+ ).strip()
+ decoded_data = base64.b64decode(result).decode("utf-8")
+ return decoded_data
+ except subprocess.CalledProcessError as e:
+ logger.error(f"Error: {e.stderr}")
+ return ""
+
+
+def get_user_data(secret_name: str, data_key: str) -> str:
+ """Get and URL-encode secret data"""
+ secret_data = get_secret_data(secret_name, data_key)
+ return urllib.parse.quote(secret_data, safe="")
+
+
+def get_cloud_secret_default(conf_dir: Optional[Path] = None) -> str:
+ """Return default for SKIP_BACKUPS_TO_AWS_GCP_AZURE based on cloud-secret.yml existence."""
+ if conf_dir is None:
+ conf_dir = Path(__file__).parent.parent / "conf"
+ if (conf_dir / "cloud-secret.yml").exists():
+ return ""
+ return "1"
+
+
+def apply_s3_storage_secrets(conf_dir: str) -> None:
+ """Apply secrets for cloud storages."""
+ if not os.environ.get("SKIP_BACKUPS_TO_AWS_GCP_AZURE"):
+ logger.info("Creating secrets for cloud storages (minio + cloud)")
+ kubectl_bin(
+ "apply",
+ "-f",
+ f"{conf_dir}/minio-secret.yml",
+ "-f",
+ f"{conf_dir}/cloud-secret.yml",
+ )
+ else:
+ logger.info("Creating secrets for cloud storages (minio only)")
+ kubectl_bin("apply", "-f", f"{conf_dir}/minio-secret.yml")
+
+
+def setup_gcs_credentials(secret_name: str = "gcp-cs-secret") -> bool:
+ """Setup GCS credentials from K8s secret for gsutil."""
+ result = subprocess.run(["gsutil", "ls"], capture_output=True, check=False)
+ if result.returncode == 0:
+ logger.info("GCS credentials already set in environment")
+ return True
+
+ logger.info(f"Setting up GCS credentials from K8s secret: {secret_name}")
+
+ access_key = get_secret_data(secret_name, "AWS_ACCESS_KEY_ID")
+ secret_key = get_secret_data(secret_name, "AWS_SECRET_ACCESS_KEY")
+
+ if not access_key or not secret_key:
+ logger.error("Failed to extract GCS credentials from secret")
+ return False
+
+ boto_fd, boto_path = tempfile.mkstemp(prefix="boto.", suffix=".cfg")
+ try:
+ with os.fdopen(boto_fd, "w") as f:
+ f.write("[Credentials]\n")
+ f.write(f"gs_access_key_id = {access_key}\n")
+ f.write(f"gs_secret_access_key = {secret_key}\n")
+ os.chmod(boto_path, 0o600)
+ os.environ["BOTO_CONFIG"] = boto_path
+ _temp_files_to_cleanup.append(boto_path)
+ logger.info("GCS credentials configured successfully")
+ return True
+ except Exception as e:
+ logger.error(f"Failed to create boto config: {e}")
+ os.unlink(boto_path)
+ return False
+
+
+# TODO: implement this function
+def check_passwords_leak(namespace: Optional[str] = None) -> None:
+ """Check for password leaks in Kubernetes pod logs."""
+ pass
diff --git a/e2e-tests/lib/utils.py b/e2e-tests/lib/utils.py
new file mode 100644
index 0000000000..1dd3012b59
--- /dev/null
+++ b/e2e-tests/lib/utils.py
@@ -0,0 +1,98 @@
+import logging
+import os
+import re
+import subprocess
+import time
+from typing import Any, Callable, Optional
+
+from rich.highlighter import RegexHighlighter
+from rich.theme import Theme
+
+logger = logging.getLogger(__name__)
+
+
+class K8sHighlighter(RegexHighlighter):
+ """Highlight Kubernetes resources in logs."""
+
+ base_style = "k8s."
+ highlights = [
+ r"(?Pkubectl)\s+(?Pexec)\s+(?P[\w-]+)\s+--\s+(?Ptimeout\s+\d+)\s+(?Pmongosh)\s+(?Pmongodb(?:\+srv)?://[^ ]+)\s+--eval\s+(?P\S.+?)\s+--quiet",
+ r"(?P(?(?(?pending|failed|error|deleted)",
+ r"(?Pready|running)",
+ ]
+
+
+k8s_theme = Theme(
+ {
+ "logging.level.debug": "blue",
+ "logging.level.info": "green",
+ "k8s.pod": "green",
+ "k8s.uri": "magenta",
+ "k8s.resource": "cyan",
+ "k8s.namespace": "green",
+ "k8s.bad_state": "red",
+ "k8s.state": "blue",
+ }
+)
+
+
+def retry(
+ func: Callable[[], Any],
+ max_attempts: int = 5,
+ delay: int = 1,
+ condition: Optional[Callable[[Any], bool]] = None,
+) -> Any:
+ """Retry a function until it succeeds or max attempts reached."""
+ for attempt in range(max_attempts):
+ try:
+ result = func()
+ if condition is None or condition(result):
+ return result
+ except Exception:
+ if attempt == max_attempts - 1:
+ raise
+
+ time.sleep(delay)
+
+ raise Exception(f"Max attempts ({max_attempts}) reached")
+
+
+def get_git_commit() -> str:
+ result = subprocess.run(["git", "rev-parse", "HEAD"], capture_output=True, text=True)
+ return result.stdout.strip()
+
+
+def get_cr_version() -> str:
+ """Get CR version from cr.yaml"""
+ try:
+ with open(
+ os.path.realpath(
+ os.path.join(os.path.dirname(__file__), "..", "..", "deploy", "cr.yaml")
+ )
+ ) as f:
+ return next(line.split()[1] for line in f if "crVersion" in line)
+ except (StopIteration, Exception) as e:
+ logger.error(f"Failed to get CR version: {e}")
+ raise RuntimeError("CR version not found in cr.yaml")
+
+
+def get_git_branch() -> str:
+ """Get current git branch or version from environment variable"""
+ if version := os.environ.get("VERSION"):
+ return version
+
+ try:
+ result = subprocess.run(
+ ["git", "rev-parse", "--abbrev-ref", "HEAD"],
+ capture_output=True,
+ text=True,
+ check=True,
+ )
+ branch = result.stdout.strip()
+ except (subprocess.CalledProcessError, FileNotFoundError):
+ return "unknown"
+
+ return re.sub(r"[^a-zA-Z0-9-]", "-", branch.lower())
diff --git a/e2e-tests/liveness/test_liveness.py b/e2e-tests/liveness/test_liveness.py
new file mode 100644
index 0000000000..6eb0505a83
--- /dev/null
+++ b/e2e-tests/liveness/test_liveness.py
@@ -0,0 +1,93 @@
+#!/usr/bin/env python3
+
+import logging
+import re
+from typing import Any, Callable, Dict
+
+import pytest
+from lib.config import apply_cluster, compare_kubectl
+from lib.kubectl import kubectl_bin, wait_for_running
+
+logger = logging.getLogger(__name__)
+
+
+@pytest.fixture(scope="class", autouse=True)
+def config(create_infra: Callable[[str], str]) -> Dict[str, str]:
+ """Configuration for tests"""
+ return {
+ "namespace": create_infra("liveness"),
+ "cluster": "liveness",
+ }
+
+
+@pytest.fixture(scope="class", autouse=True)
+def setup_tests(test_paths: Dict[str, str], deploy_minio: Any) -> None:
+ """Setup test environment"""
+ kubectl_bin(
+ "apply",
+ "-f",
+ f"{test_paths['conf_dir']}/secrets.yml",
+ "-f",
+ f"{test_paths['conf_dir']}/minio-secret.yml",
+ )
+
+
+class TestLiveness:
+ @pytest.mark.dependency()
+ def test_create_first_cluster(
+ self, config: Dict[str, str], test_paths: Dict[str, str]
+ ) -> None:
+ """Create first PSMDB cluster"""
+ apply_cluster(f"{test_paths['test_dir']}/conf/{config['cluster']}-rs0.yml")
+ wait_for_running(f"{config['cluster']}-rs0", 3)
+
+ compare_kubectl(
+ test_paths["test_dir"], f"statefulset/{config['cluster']}-rs0", config["namespace"]
+ )
+
+ @pytest.mark.dependency(depends=["TestLiveness::test_create_first_cluster"])
+ def test_liveness_check_fails_with_invalid_ssl_option(self, config: Dict[str, str]) -> None:
+ kubectl_bin(
+ "exec",
+ f"{config['cluster']}-rs0-0",
+ "-c",
+ "mongod",
+ "--",
+ "bash",
+ "-c",
+ "/opt/percona/mongodb-healthcheck k8s liveness --ssl",
+ check=False,
+ )
+
+ logs_output = kubectl_bin(
+ "exec",
+ f"{config['cluster']}-rs0-0",
+ "-c",
+ "mongod",
+ "--",
+ "bash",
+ "-c",
+ "ls /data/db/mongod-data/logs",
+ )
+ log_count = logs_output.count("mongodb-healthcheck.log")
+ assert log_count == 1, f"Expected 1 healthcheck log file, got {log_count}"
+
+ rotated_count = len(re.findall(r"mongodb-healthcheck-.*\.log\.gz", logs_output))
+ assert rotated_count >= 1, f"Expected >=1 rotated logs, got {rotated_count}"
+
+ @pytest.mark.dependency(
+ depends=["TestLiveness::test_liveness_check_fails_with_invalid_ssl_option"]
+ )
+ def test_change_liveness_config(
+ self, config: Dict[str, str], test_paths: Dict[str, str]
+ ) -> None:
+ apply_cluster(f"{test_paths['test_dir']}/conf/{config['cluster']}-rs0-changed.yml")
+
+ wait_for_running(f"{config['cluster']}-rs0", 3)
+
+ compare_kubectl(
+ test_paths["test_dir"],
+ f"statefulset/{config['cluster']}-rs0",
+ config["namespace"],
+ "-changed",
+ )
diff --git a/e2e-tests/test_pytest_wrapper.py b/e2e-tests/test_pytest_wrapper.py
new file mode 100644
index 0000000000..ec2cca67bb
--- /dev/null
+++ b/e2e-tests/test_pytest_wrapper.py
@@ -0,0 +1,10 @@
+import pytest
+from lib.bash_wrapper import run_bash_test
+
+
+def test_bash_wrapper(request: pytest.FixtureRequest) -> None:
+ """Run a bash test script via pytest wrapper."""
+ test_name = request.config.getoption("--test-name")
+ if not test_name:
+ pytest.skip("No --test-name provided")
+ run_bash_test(test_name)
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000000..1fa089cad3
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,33 @@
+[project]
+name = "psmdb-pytest"
+version = "0.1.0"
+description = "Tests for PSMDB Operator"
+requires-python = ">=3.13"
+dependencies = [
+ "deepdiff>=8.5.0",
+ "junitparser>=3.2.0",
+ "mypy>=1.16.0",
+ "pytest>=8.4.0",
+ "pytest-dependency>=0.6.0",
+ "pytest-html>=4.1.1",
+ "pytest-html-merger>=0.1.0",
+ "pytest-json-report>=1.5.0",
+ "pyyaml>=6.0.2",
+ "rich>=14.0.0",
+ "ruff>=0.11.12",
+ "types-pyyaml>=6.0.12.20250915",
+]
+
+[tool.pytest.ini_options]
+addopts = "-v -s --self-contained-html"
+render_collapsed = "all"
+junit_family = "xunit2"
+junit_suite_name = "psmdb-e2e"
+
+[[tool.mypy.overrides]]
+module = ["pytest_html.*"]
+follow_untyped_imports = true
+strict = true
+
+[tool.ruff]
+line-length = 99
diff --git a/uv.lock b/uv.lock
new file mode 100644
index 0000000000..b0942abf84
--- /dev/null
+++ b/uv.lock
@@ -0,0 +1,486 @@
+version = 1
+revision = 1
+requires-python = ">=3.13"
+
+[[package]]
+name = "beautifulsoup4"
+version = "4.14.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "soupsieve" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/c3/b0/1c6a16426d389813b48d95e26898aff79abbde42ad353958ad95cc8c9b21/beautifulsoup4-4.14.3.tar.gz", hash = "sha256:6292b1c5186d356bba669ef9f7f051757099565ad9ada5dd630bd9de5fa7fb86", size = 627737 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/1a/39/47f9197bdd44df24d67ac8893641e16f386c984a0619ef2ee4c51fbbc019/beautifulsoup4-4.14.3-py3-none-any.whl", hash = "sha256:0918bfe44902e6ad8d57732ba310582e98da931428d231a5ecb9e7c703a735bb", size = 107721 },
+]
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 },
+]
+
+[[package]]
+name = "deepdiff"
+version = "8.6.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "orderly-set" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/19/76/36c9aab3d5c19a94091f7c6c6e784efca50d87b124bf026c36e94719f33c/deepdiff-8.6.1.tar.gz", hash = "sha256:ec56d7a769ca80891b5200ec7bd41eec300ced91ebcc7797b41eb2b3f3ff643a", size = 634054 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f7/e6/efe534ef0952b531b630780e19cabd416e2032697019d5295defc6ef9bd9/deepdiff-8.6.1-py3-none-any.whl", hash = "sha256:ee8708a7f7d37fb273a541fa24ad010ed484192cd0c4ffc0fa0ed5e2d4b9e78b", size = 91378 },
+]
+
+[[package]]
+name = "iniconfig"
+version = "2.3.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484 },
+]
+
+[[package]]
+name = "jinja2"
+version = "3.1.6"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "markupsafe" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899 },
+]
+
+[[package]]
+name = "junitparser"
+version = "4.0.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/aa/97/954ee1ef04e50d8494e9f5d82d4051ed71a7618aa2c1514c1b3f24691174/junitparser-4.0.2.tar.gz", hash = "sha256:d5d07cece6d4a600ff3b7b96c8db5ffa45a91eed695cb86c45c3db113c1ca0f8", size = 25646 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/44/87/b444f934f62ee2a1be45bb52563cf17a66b0d790eba43af4df9929e7107f/junitparser-4.0.2-py3-none-any.whl", hash = "sha256:94c3570e41fcaedc64cc3c634ca99457fe41a84dd1aa8ff74e9e12e66223a155", size = 14592 },
+]
+
+[[package]]
+name = "librt"
+version = "0.7.8"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/e7/24/5f3646ff414285e0f7708fa4e946b9bf538345a41d1c375c439467721a5e/librt-0.7.8.tar.gz", hash = "sha256:1a4ede613941d9c3470b0368be851df6bb78ab218635512d0370b27a277a0862", size = 148323 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a1/fe/b1f9de2829cf7fc7649c1dcd202cfd873837c5cc2fc9e526b0e7f716c3d2/librt-0.7.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4c3995abbbb60b3c129490fa985dfe6cac11d88fc3c36eeb4fb1449efbbb04fc", size = 57500 },
+ { url = "https://files.pythonhosted.org/packages/eb/d4/4a60fbe2e53b825f5d9a77325071d61cd8af8506255067bf0c8527530745/librt-0.7.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:44e0c2cbc9bebd074cf2cdbe472ca185e824be4e74b1c63a8e934cea674bebf2", size = 59019 },
+ { url = "https://files.pythonhosted.org/packages/6a/37/61ff80341ba5159afa524445f2d984c30e2821f31f7c73cf166dcafa5564/librt-0.7.8-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:4d2f1e492cae964b3463a03dc77a7fe8742f7855d7258c7643f0ee32b6651dd3", size = 169015 },
+ { url = "https://files.pythonhosted.org/packages/1c/86/13d4f2d6a93f181ebf2fc953868826653ede494559da8268023fe567fca3/librt-0.7.8-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:451e7ffcef8f785831fdb791bd69211f47e95dc4c6ddff68e589058806f044c6", size = 178161 },
+ { url = "https://files.pythonhosted.org/packages/88/26/e24ef01305954fc4d771f1f09f3dd682f9eb610e1bec188ffb719374d26e/librt-0.7.8-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3469e1af9f1380e093ae06bedcbdd11e407ac0b303a56bbe9afb1d6824d4982d", size = 193015 },
+ { url = "https://files.pythonhosted.org/packages/88/a0/92b6bd060e720d7a31ed474d046a69bd55334ec05e9c446d228c4b806ae3/librt-0.7.8-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f11b300027ce19a34f6d24ebb0a25fd0e24a9d53353225a5c1e6cadbf2916b2e", size = 192038 },
+ { url = "https://files.pythonhosted.org/packages/06/bb/6f4c650253704279c3a214dad188101d1b5ea23be0606628bc6739456624/librt-0.7.8-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4adc73614f0d3c97874f02f2c7fd2a27854e7e24ad532ea6b965459c5b757eca", size = 186006 },
+ { url = "https://files.pythonhosted.org/packages/dc/00/1c409618248d43240cadf45f3efb866837fa77e9a12a71481912135eb481/librt-0.7.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:60c299e555f87e4c01b2eca085dfccda1dde87f5a604bb45c2906b8305819a93", size = 206888 },
+ { url = "https://files.pythonhosted.org/packages/d9/83/b2cfe8e76ff5c1c77f8a53da3d5de62d04b5ebf7cf913e37f8bca43b5d07/librt-0.7.8-cp313-cp313-win32.whl", hash = "sha256:b09c52ed43a461994716082ee7d87618096851319bf695d57ec123f2ab708951", size = 44126 },
+ { url = "https://files.pythonhosted.org/packages/a9/0b/c59d45de56a51bd2d3a401fc63449c0ac163e4ef7f523ea8b0c0dee86ec5/librt-0.7.8-cp313-cp313-win_amd64.whl", hash = "sha256:f8f4a901a3fa28969d6e4519deceab56c55a09d691ea7b12ca830e2fa3461e34", size = 50262 },
+ { url = "https://files.pythonhosted.org/packages/fc/b9/973455cec0a1ec592395250c474164c4a58ebf3e0651ee920fef1a2623f1/librt-0.7.8-cp313-cp313-win_arm64.whl", hash = "sha256:43d4e71b50763fcdcf64725ac680d8cfa1706c928b844794a7aa0fa9ac8e5f09", size = 43600 },
+ { url = "https://files.pythonhosted.org/packages/1a/73/fa8814c6ce2d49c3827829cadaa1589b0bf4391660bd4510899393a23ebc/librt-0.7.8-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:be927c3c94c74b05128089a955fba86501c3b544d1d300282cc1b4bd370cb418", size = 57049 },
+ { url = "https://files.pythonhosted.org/packages/53/fe/f6c70956da23ea235fd2e3cc16f4f0b4ebdfd72252b02d1164dd58b4e6c3/librt-0.7.8-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:7b0803e9008c62a7ef79058233db7ff6f37a9933b8f2573c05b07ddafa226611", size = 58689 },
+ { url = "https://files.pythonhosted.org/packages/1f/4d/7a2481444ac5fba63050d9abe823e6bc16896f575bfc9c1e5068d516cdce/librt-0.7.8-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:79feb4d00b2a4e0e05c9c56df707934f41fcb5fe53fd9efb7549068d0495b758", size = 166808 },
+ { url = "https://files.pythonhosted.org/packages/ac/3c/10901d9e18639f8953f57c8986796cfbf4c1c514844a41c9197cf87cb707/librt-0.7.8-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b9122094e3f24aa759c38f46bd8863433820654927370250f460ae75488b66ea", size = 175614 },
+ { url = "https://files.pythonhosted.org/packages/db/01/5cbdde0951a5090a80e5ba44e6357d375048123c572a23eecfb9326993a7/librt-0.7.8-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7e03bea66af33c95ce3addf87a9bf1fcad8d33e757bc479957ddbc0e4f7207ac", size = 189955 },
+ { url = "https://files.pythonhosted.org/packages/6a/b4/e80528d2f4b7eaf1d437fcbd6fc6ba4cbeb3e2a0cb9ed5a79f47c7318706/librt-0.7.8-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f1ade7f31675db00b514b98f9ab9a7698c7282dad4be7492589109471852d398", size = 189370 },
+ { url = "https://files.pythonhosted.org/packages/c1/ab/938368f8ce31a9787ecd4becb1e795954782e4312095daf8fd22420227c8/librt-0.7.8-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:a14229ac62adcf1b90a15992f1ab9c69ae8b99ffb23cb64a90878a6e8a2f5b81", size = 183224 },
+ { url = "https://files.pythonhosted.org/packages/3c/10/559c310e7a6e4014ac44867d359ef8238465fb499e7eb31b6bfe3e3f86f5/librt-0.7.8-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5bcaaf624fd24e6a0cb14beac37677f90793a96864c67c064a91458611446e83", size = 203541 },
+ { url = "https://files.pythonhosted.org/packages/f8/db/a0db7acdb6290c215f343835c6efda5b491bb05c3ddc675af558f50fdba3/librt-0.7.8-cp314-cp314-win32.whl", hash = "sha256:7aa7d5457b6c542ecaed79cec4ad98534373c9757383973e638ccced0f11f46d", size = 40657 },
+ { url = "https://files.pythonhosted.org/packages/72/e0/4f9bdc2a98a798511e81edcd6b54fe82767a715e05d1921115ac70717f6f/librt-0.7.8-cp314-cp314-win_amd64.whl", hash = "sha256:3d1322800771bee4a91f3b4bd4e49abc7d35e65166821086e5afd1e6c0d9be44", size = 46835 },
+ { url = "https://files.pythonhosted.org/packages/f9/3d/59c6402e3dec2719655a41ad027a7371f8e2334aa794ed11533ad5f34969/librt-0.7.8-cp314-cp314-win_arm64.whl", hash = "sha256:5363427bc6a8c3b1719f8f3845ea53553d301382928a86e8fab7984426949bce", size = 39885 },
+ { url = "https://files.pythonhosted.org/packages/4e/9c/2481d80950b83085fb14ba3c595db56330d21bbc7d88a19f20165f3538db/librt-0.7.8-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:ca916919793a77e4a98d4a1701e345d337ce53be4a16620f063191f7322ac80f", size = 59161 },
+ { url = "https://files.pythonhosted.org/packages/96/79/108df2cfc4e672336765d54e3ff887294c1cc36ea4335c73588875775527/librt-0.7.8-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:54feb7b4f2f6706bb82325e836a01be805770443e2400f706e824e91f6441dde", size = 61008 },
+ { url = "https://files.pythonhosted.org/packages/46/f2/30179898f9994a5637459d6e169b6abdc982012c0a4b2d4c26f50c06f911/librt-0.7.8-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:39a4c76fee41007070f872b648cc2f711f9abf9a13d0c7162478043377b52c8e", size = 187199 },
+ { url = "https://files.pythonhosted.org/packages/b4/da/f7563db55cebdc884f518ba3791ad033becc25ff68eb70902b1747dc0d70/librt-0.7.8-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ac9c8a458245c7de80bc1b9765b177055efff5803f08e548dd4bb9ab9a8d789b", size = 198317 },
+ { url = "https://files.pythonhosted.org/packages/b3/6c/4289acf076ad371471fa86718c30ae353e690d3de6167f7db36f429272f1/librt-0.7.8-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95b67aa7eff150f075fda09d11f6bfb26edffd300f6ab1666759547581e8f666", size = 210334 },
+ { url = "https://files.pythonhosted.org/packages/4a/7f/377521ac25b78ac0a5ff44127a0360ee6d5ddd3ce7327949876a30533daa/librt-0.7.8-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:535929b6eff670c593c34ff435d5440c3096f20fa72d63444608a5aef64dd581", size = 211031 },
+ { url = "https://files.pythonhosted.org/packages/c5/b1/e1e96c3e20b23d00cf90f4aad48f0deb4cdfec2f0ed8380d0d85acf98bbf/librt-0.7.8-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:63937bd0f4d1cb56653dc7ae900d6c52c41f0015e25aaf9902481ee79943b33a", size = 204581 },
+ { url = "https://files.pythonhosted.org/packages/43/71/0f5d010e92ed9747e14bef35e91b6580533510f1e36a8a09eb79ee70b2f0/librt-0.7.8-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cf243da9e42d914036fd362ac3fa77d80a41cadcd11ad789b1b5eec4daaf67ca", size = 224731 },
+ { url = "https://files.pythonhosted.org/packages/22/f0/07fb6ab5c39a4ca9af3e37554f9d42f25c464829254d72e4ebbd81da351c/librt-0.7.8-cp314-cp314t-win32.whl", hash = "sha256:171ca3a0a06c643bd0a2f62a8944e1902c94aa8e5da4db1ea9a8daf872685365", size = 41173 },
+ { url = "https://files.pythonhosted.org/packages/24/d4/7e4be20993dc6a782639625bd2f97f3c66125c7aa80c82426956811cfccf/librt-0.7.8-cp314-cp314t-win_amd64.whl", hash = "sha256:445b7304145e24c60288a2f172b5ce2ca35c0f81605f5299f3fa567e189d2e32", size = 47668 },
+ { url = "https://files.pythonhosted.org/packages/fc/85/69f92b2a7b3c0f88ffe107c86b952b397004b5b8ea5a81da3d9c04c04422/librt-0.7.8-cp314-cp314t-win_arm64.whl", hash = "sha256:8766ece9de08527deabcd7cb1b4f1a967a385d26e33e536d6d8913db6ef74f06", size = 40550 },
+]
+
+[[package]]
+name = "markdown-it-py"
+version = "4.0.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "mdurl" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321 },
+]
+
+[[package]]
+name = "markupsafe"
+version = "3.0.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622 },
+ { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029 },
+ { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374 },
+ { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980 },
+ { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990 },
+ { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784 },
+ { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588 },
+ { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041 },
+ { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543 },
+ { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113 },
+ { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911 },
+ { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658 },
+ { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066 },
+ { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639 },
+ { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569 },
+ { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284 },
+ { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801 },
+ { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769 },
+ { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642 },
+ { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612 },
+ { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200 },
+ { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973 },
+ { url = "https://files.pythonhosted.org/packages/33/8a/8e42d4838cd89b7dde187011e97fe6c3af66d8c044997d2183fbd6d31352/markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe", size = 11619 },
+ { url = "https://files.pythonhosted.org/packages/b5/64/7660f8a4a8e53c924d0fa05dc3a55c9cee10bbd82b11c5afb27d44b096ce/markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026", size = 12029 },
+ { url = "https://files.pythonhosted.org/packages/da/ef/e648bfd021127bef5fa12e1720ffed0c6cbb8310c8d9bea7266337ff06de/markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737", size = 24408 },
+ { url = "https://files.pythonhosted.org/packages/41/3c/a36c2450754618e62008bf7435ccb0f88053e07592e6028a34776213d877/markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97", size = 23005 },
+ { url = "https://files.pythonhosted.org/packages/bc/20/b7fdf89a8456b099837cd1dc21974632a02a999ec9bf7ca3e490aacd98e7/markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d", size = 22048 },
+ { url = "https://files.pythonhosted.org/packages/9a/a7/591f592afdc734f47db08a75793a55d7fbcc6902a723ae4cfbab61010cc5/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda", size = 23821 },
+ { url = "https://files.pythonhosted.org/packages/7d/33/45b24e4f44195b26521bc6f1a82197118f74df348556594bd2262bda1038/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf", size = 21606 },
+ { url = "https://files.pythonhosted.org/packages/ff/0e/53dfaca23a69fbfbbf17a4b64072090e70717344c52eaaaa9c5ddff1e5f0/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe", size = 23043 },
+ { url = "https://files.pythonhosted.org/packages/46/11/f333a06fc16236d5238bfe74daccbca41459dcd8d1fa952e8fbd5dccfb70/markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9", size = 14747 },
+ { url = "https://files.pythonhosted.org/packages/28/52/182836104b33b444e400b14f797212f720cbc9ed6ba34c800639d154e821/markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581", size = 15341 },
+ { url = "https://files.pythonhosted.org/packages/6f/18/acf23e91bd94fd7b3031558b1f013adfa21a8e407a3fdb32745538730382/markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4", size = 14073 },
+ { url = "https://files.pythonhosted.org/packages/3c/f0/57689aa4076e1b43b15fdfa646b04653969d50cf30c32a102762be2485da/markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab", size = 11661 },
+ { url = "https://files.pythonhosted.org/packages/89/c3/2e67a7ca217c6912985ec766c6393b636fb0c2344443ff9d91404dc4c79f/markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175", size = 12069 },
+ { url = "https://files.pythonhosted.org/packages/f0/00/be561dce4e6ca66b15276e184ce4b8aec61fe83662cce2f7d72bd3249d28/markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634", size = 25670 },
+ { url = "https://files.pythonhosted.org/packages/50/09/c419f6f5a92e5fadde27efd190eca90f05e1261b10dbd8cbcb39cd8ea1dc/markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50", size = 23598 },
+ { url = "https://files.pythonhosted.org/packages/22/44/a0681611106e0b2921b3033fc19bc53323e0b50bc70cffdd19f7d679bb66/markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e", size = 23261 },
+ { url = "https://files.pythonhosted.org/packages/5f/57/1b0b3f100259dc9fffe780cfb60d4be71375510e435efec3d116b6436d43/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5", size = 24835 },
+ { url = "https://files.pythonhosted.org/packages/26/6a/4bf6d0c97c4920f1597cc14dd720705eca0bf7c787aebc6bb4d1bead5388/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523", size = 22733 },
+ { url = "https://files.pythonhosted.org/packages/14/c7/ca723101509b518797fedc2fdf79ba57f886b4aca8a7d31857ba3ee8281f/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc", size = 23672 },
+ { url = "https://files.pythonhosted.org/packages/fb/df/5bd7a48c256faecd1d36edc13133e51397e41b73bb77e1a69deab746ebac/markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d", size = 14819 },
+ { url = "https://files.pythonhosted.org/packages/1a/8a/0402ba61a2f16038b48b39bccca271134be00c5c9f0f623208399333c448/markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9", size = 15426 },
+ { url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146 },
+]
+
+[[package]]
+name = "mdurl"
+version = "0.1.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 },
+]
+
+[[package]]
+name = "mypy"
+version = "1.19.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "librt", marker = "platform_python_implementation != 'PyPy'" },
+ { name = "mypy-extensions" },
+ { name = "pathspec" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/f5/db/4efed9504bc01309ab9c2da7e352cc223569f05478012b5d9ece38fd44d2/mypy-1.19.1.tar.gz", hash = "sha256:19d88bb05303fe63f71dd2c6270daca27cb9401c4ca8255fe50d1d920e0eb9ba", size = 3582404 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/de/9f/a6abae693f7a0c697dbb435aac52e958dc8da44e92e08ba88d2e42326176/mypy-1.19.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e3157c7594ff2ef1634ee058aafc56a82db665c9438fd41b390f3bde1ab12250", size = 13201927 },
+ { url = "https://files.pythonhosted.org/packages/9a/a4/45c35ccf6e1c65afc23a069f50e2c66f46bd3798cbe0d680c12d12935caa/mypy-1.19.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdb12f69bcc02700c2b47e070238f42cb87f18c0bc1fc4cdb4fb2bc5fd7a3b8b", size = 12206730 },
+ { url = "https://files.pythonhosted.org/packages/05/bb/cdcf89678e26b187650512620eec8368fded4cfd99cfcb431e4cdfd19dec/mypy-1.19.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f859fb09d9583a985be9a493d5cfc5515b56b08f7447759a0c5deaf68d80506e", size = 12724581 },
+ { url = "https://files.pythonhosted.org/packages/d1/32/dd260d52babf67bad8e6770f8e1102021877ce0edea106e72df5626bb0ec/mypy-1.19.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9a6538e0415310aad77cb94004ca6482330fece18036b5f360b62c45814c4ef", size = 13616252 },
+ { url = "https://files.pythonhosted.org/packages/71/d0/5e60a9d2e3bd48432ae2b454b7ef2b62a960ab51292b1eda2a95edd78198/mypy-1.19.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:da4869fc5e7f62a88f3fe0b5c919d1d9f7ea3cef92d3689de2823fd27e40aa75", size = 13840848 },
+ { url = "https://files.pythonhosted.org/packages/98/76/d32051fa65ecf6cc8c6610956473abdc9b4c43301107476ac03559507843/mypy-1.19.1-cp313-cp313-win_amd64.whl", hash = "sha256:016f2246209095e8eda7538944daa1d60e1e8134d98983b9fc1e92c1fc0cb8dd", size = 10135510 },
+ { url = "https://files.pythonhosted.org/packages/de/eb/b83e75f4c820c4247a58580ef86fcd35165028f191e7e1ba57128c52782d/mypy-1.19.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:06e6170bd5836770e8104c8fdd58e5e725cfeb309f0a6c681a811f557e97eac1", size = 13199744 },
+ { url = "https://files.pythonhosted.org/packages/94/28/52785ab7bfa165f87fcbb61547a93f98bb20e7f82f90f165a1f69bce7b3d/mypy-1.19.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:804bd67b8054a85447c8954215a906d6eff9cabeabe493fb6334b24f4bfff718", size = 12215815 },
+ { url = "https://files.pythonhosted.org/packages/0a/c6/bdd60774a0dbfb05122e3e925f2e9e846c009e479dcec4821dad881f5b52/mypy-1.19.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:21761006a7f497cb0d4de3d8ef4ca70532256688b0523eee02baf9eec895e27b", size = 12740047 },
+ { url = "https://files.pythonhosted.org/packages/32/2a/66ba933fe6c76bd40d1fe916a83f04fed253152f451a877520b3c4a5e41e/mypy-1.19.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:28902ee51f12e0f19e1e16fbe2f8f06b6637f482c459dd393efddd0ec7f82045", size = 13601998 },
+ { url = "https://files.pythonhosted.org/packages/e3/da/5055c63e377c5c2418760411fd6a63ee2b96cf95397259038756c042574f/mypy-1.19.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:481daf36a4c443332e2ae9c137dfee878fcea781a2e3f895d54bd3002a900957", size = 13807476 },
+ { url = "https://files.pythonhosted.org/packages/cd/09/4ebd873390a063176f06b0dbf1f7783dd87bd120eae7727fa4ae4179b685/mypy-1.19.1-cp314-cp314-win_amd64.whl", hash = "sha256:8bb5c6f6d043655e055be9b542aa5f3bdd30e4f3589163e85f93f3640060509f", size = 10281872 },
+ { url = "https://files.pythonhosted.org/packages/8d/f4/4ce9a05ce5ded1de3ec1c1d96cf9f9504a04e54ce0ed55cfa38619a32b8d/mypy-1.19.1-py3-none-any.whl", hash = "sha256:f1235f5ea01b7db5468d53ece6aaddf1ad0b88d9e7462b86ef96fe04995d7247", size = 2471239 },
+]
+
+[[package]]
+name = "mypy-extensions"
+version = "1.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963 },
+]
+
+[[package]]
+name = "orderly-set"
+version = "5.5.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/4a/88/39c83c35d5e97cc203e9e77a4f93bf87ec89cf6a22ac4818fdcc65d66584/orderly_set-5.5.0.tar.gz", hash = "sha256:e87185c8e4d8afa64e7f8160ee2c542a475b738bc891dc3f58102e654125e6ce", size = 27414 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/12/27/fb8d7338b4d551900fa3e580acbe7a0cf655d940e164cb5c00ec31961094/orderly_set-5.5.0-py3-none-any.whl", hash = "sha256:46f0b801948e98f427b412fcabb831677194c05c3b699b80de260374baa0b1e7", size = 13068 },
+]
+
+[[package]]
+name = "packaging"
+version = "26.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366 },
+]
+
+[[package]]
+name = "pathspec"
+version = "1.0.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/fa/36/e27608899f9b8d4dff0617b2d9ab17ca5608956ca44461ac14ac48b44015/pathspec-1.0.4.tar.gz", hash = "sha256:0210e2ae8a21a9137c0d470578cb0e595af87edaa6ebf12ff176f14a02e0e645", size = 131200 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl", hash = "sha256:fb6ae2fd4e7c921a165808a552060e722767cfa526f99ca5156ed2ce45a5c723", size = 55206 },
+]
+
+[[package]]
+name = "pluggy"
+version = "1.6.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538 },
+]
+
+[[package]]
+name = "psmdb-pytest"
+version = "0.1.0"
+source = { virtual = "." }
+dependencies = [
+ { name = "deepdiff" },
+ { name = "junitparser" },
+ { name = "mypy" },
+ { name = "pytest" },
+ { name = "pytest-dependency" },
+ { name = "pytest-html" },
+ { name = "pytest-html-merger" },
+ { name = "pytest-json-report" },
+ { name = "pyyaml" },
+ { name = "rich" },
+ { name = "ruff" },
+ { name = "types-pyyaml" },
+]
+
+[package.metadata]
+requires-dist = [
+ { name = "deepdiff", specifier = ">=8.5.0" },
+ { name = "junitparser", specifier = ">=3.2.0" },
+ { name = "mypy", specifier = ">=1.16.0" },
+ { name = "pytest", specifier = ">=8.4.0" },
+ { name = "pytest-dependency", specifier = ">=0.6.0" },
+ { name = "pytest-html", specifier = ">=4.1.1" },
+ { name = "pytest-html-merger", specifier = ">=0.1.0" },
+ { name = "pytest-json-report", specifier = ">=1.5.0" },
+ { name = "pyyaml", specifier = ">=6.0.2" },
+ { name = "rich", specifier = ">=14.0.0" },
+ { name = "ruff", specifier = ">=0.11.12" },
+ { name = "types-pyyaml", specifier = ">=6.0.12.20250915" },
+]
+
+[[package]]
+name = "pygments"
+version = "2.19.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217 },
+]
+
+[[package]]
+name = "pytest"
+version = "9.0.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+ { name = "iniconfig" },
+ { name = "packaging" },
+ { name = "pluggy" },
+ { name = "pygments" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801 },
+]
+
+[[package]]
+name = "pytest-dependency"
+version = "0.6.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pytest" },
+ { name = "setuptools" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/7e/3b/317cc04e77d707d338540ca67b619df8f247f3f4c9f40e67bf5ea503ad94/pytest-dependency-0.6.0.tar.gz", hash = "sha256:934b0e6a39d95995062c193f7eaeed8a8ffa06ff1bcef4b62b0dc74a708bacc1", size = 19499 }
+
+[[package]]
+name = "pytest-html"
+version = "4.2.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "jinja2" },
+ { name = "pytest" },
+ { name = "pytest-metadata" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/c4/08/2076aa09507e51c1119d16a84c6307354d16270558f1a44fc9a2c99fdf1d/pytest_html-4.2.0.tar.gz", hash = "sha256:b6a88cba507500d8709959201e2e757d3941e859fd17cfd4ed87b16fc0c67912", size = 108634 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/84/47/07046e0acedc12fe2bae79cf6c73ad67f51ae9d67df64d06b0f3eac73d36/pytest_html-4.2.0-py3-none-any.whl", hash = "sha256:ff5caf3e17a974008e5816edda61168e6c3da442b078a44f8744865862a85636", size = 23801 },
+]
+
+[[package]]
+name = "pytest-html-merger"
+version = "0.1.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "beautifulsoup4" },
+ { name = "packaging" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/72/8d/8f1bc3282c636bf29c88579a136ae6add64ddf5f239d8bec52d7434e163c/pytest_html_merger-0.1.0.tar.gz", hash = "sha256:497b1e9c99c12eb06eee5fdf9abad42c10fec78524d740737def9d85b8f995e4", size = 17814 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d2/33/6ebce34cf14de51d2a422e5f77493468e19afe042e35c3f78bebfa5275d6/pytest_html_merger-0.1.0-py3-none-any.whl", hash = "sha256:c1bf0574245dd67481b21630d68168fdf26a779f55b939f37e7aff4c438ea61b", size = 17200 },
+]
+
+[[package]]
+name = "pytest-json-report"
+version = "1.5.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pytest" },
+ { name = "pytest-metadata" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/4f/d3/765dae9712fcd68d820338908c1337e077d5fdadccd5cacf95b9b0bea278/pytest-json-report-1.5.0.tar.gz", hash = "sha256:2dde3c647851a19b5f3700729e8310a6e66efb2077d674f27ddea3d34dc615de", size = 21241 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/81/35/d07400c715bf8a88aa0c1ee9c9eb6050ca7fe5b39981f0eea773feeb0681/pytest_json_report-1.5.0-py3-none-any.whl", hash = "sha256:9897b68c910b12a2e48dd849f9a284b2c79a732a8a9cb398452ddd23d3c8c325", size = 13222 },
+]
+
+[[package]]
+name = "pytest-metadata"
+version = "3.1.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pytest" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a6/85/8c969f8bec4e559f8f2b958a15229a35495f5b4ce499f6b865eac54b878d/pytest_metadata-3.1.1.tar.gz", hash = "sha256:d2a29b0355fbc03f168aa96d41ff88b1a3b44a3b02acbe491801c98a048017c8", size = 9952 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/3e/43/7e7b2ec865caa92f67b8f0e9231a798d102724ca4c0e1f414316be1c1ef2/pytest_metadata-3.1.1-py3-none-any.whl", hash = "sha256:c8e0844db684ee1c798cfa38908d20d67d0463ecb6137c72e91f418558dd5f4b", size = 11428 },
+]
+
+[[package]]
+name = "pyyaml"
+version = "6.0.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669 },
+ { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252 },
+ { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081 },
+ { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159 },
+ { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626 },
+ { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613 },
+ { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115 },
+ { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427 },
+ { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090 },
+ { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246 },
+ { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814 },
+ { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809 },
+ { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454 },
+ { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355 },
+ { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175 },
+ { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228 },
+ { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194 },
+ { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429 },
+ { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912 },
+ { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108 },
+ { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641 },
+ { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901 },
+ { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132 },
+ { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261 },
+ { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272 },
+ { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923 },
+ { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062 },
+ { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341 },
+]
+
+[[package]]
+name = "rich"
+version = "14.3.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "markdown-it-py" },
+ { name = "pygments" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a1/84/4831f881aa6ff3c976f6d6809b58cdfa350593ffc0dc3c58f5f6586780fb/rich-14.3.1.tar.gz", hash = "sha256:b8c5f568a3a749f9290ec6bddedf835cec33696bfc1e48bcfecb276c7386e4b8", size = 230125 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/87/2a/a1810c8627b9ec8c57ec5ec325d306701ae7be50235e8fd81266e002a3cc/rich-14.3.1-py3-none-any.whl", hash = "sha256:da750b1aebbff0b372557426fb3f35ba56de8ef954b3190315eb64076d6fb54e", size = 309952 },
+]
+
+[[package]]
+name = "ruff"
+version = "0.14.14"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/2e/06/f71e3a86b2df0dfa2d2f72195941cd09b44f87711cb7fa5193732cb9a5fc/ruff-0.14.14.tar.gz", hash = "sha256:2d0f819c9a90205f3a867dbbd0be083bee9912e170fd7d9704cc8ae45824896b", size = 4515732 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d2/89/20a12e97bc6b9f9f68343952da08a8099c57237aef953a56b82711d55edd/ruff-0.14.14-py3-none-linux_armv6l.whl", hash = "sha256:7cfe36b56e8489dee8fbc777c61959f60ec0f1f11817e8f2415f429552846aed", size = 10467650 },
+ { url = "https://files.pythonhosted.org/packages/a3/b1/c5de3fd2d5a831fcae21beda5e3589c0ba67eec8202e992388e4b17a6040/ruff-0.14.14-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6006a0082336e7920b9573ef8a7f52eec837add1265cc74e04ea8a4368cd704c", size = 10883245 },
+ { url = "https://files.pythonhosted.org/packages/b8/7c/3c1db59a10e7490f8f6f8559d1db8636cbb13dccebf18686f4e3c9d7c772/ruff-0.14.14-py3-none-macosx_11_0_arm64.whl", hash = "sha256:026c1d25996818f0bf498636686199d9bd0d9d6341c9c2c3b62e2a0198b758de", size = 10231273 },
+ { url = "https://files.pythonhosted.org/packages/a1/6e/5e0e0d9674be0f8581d1f5e0f0a04761203affce3232c1a1189d0e3b4dad/ruff-0.14.14-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f666445819d31210b71e0a6d1c01e24447a20b85458eea25a25fe8142210ae0e", size = 10585753 },
+ { url = "https://files.pythonhosted.org/packages/23/09/754ab09f46ff1884d422dc26d59ba18b4e5d355be147721bb2518aa2a014/ruff-0.14.14-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c0f18b922c6d2ff9a5e6c3ee16259adc513ca775bcf82c67ebab7cbd9da5bc8", size = 10286052 },
+ { url = "https://files.pythonhosted.org/packages/c8/cc/e71f88dd2a12afb5f50733851729d6b571a7c3a35bfdb16c3035132675a0/ruff-0.14.14-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1629e67489c2dea43e8658c3dba659edbfd87361624b4040d1df04c9740ae906", size = 11043637 },
+ { url = "https://files.pythonhosted.org/packages/67/b2/397245026352494497dac935d7f00f1468c03a23a0c5db6ad8fc49ca3fb2/ruff-0.14.14-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:27493a2131ea0f899057d49d303e4292b2cae2bb57253c1ed1f256fbcd1da480", size = 12194761 },
+ { url = "https://files.pythonhosted.org/packages/5b/06/06ef271459f778323112c51b7587ce85230785cd64e91772034ddb88f200/ruff-0.14.14-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01ff589aab3f5b539e35db38425da31a57521efd1e4ad1ae08fc34dbe30bd7df", size = 12005701 },
+ { url = "https://files.pythonhosted.org/packages/41/d6/99364514541cf811ccc5ac44362f88df66373e9fec1b9d1c4cc830593fe7/ruff-0.14.14-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1cc12d74eef0f29f51775f5b755913eb523546b88e2d733e1d701fe65144e89b", size = 11282455 },
+ { url = "https://files.pythonhosted.org/packages/ca/71/37daa46f89475f8582b7762ecd2722492df26421714a33e72ccc9a84d7a5/ruff-0.14.14-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb8481604b7a9e75eff53772496201690ce2687067e038b3cc31aaf16aa0b974", size = 11215882 },
+ { url = "https://files.pythonhosted.org/packages/2c/10/a31f86169ec91c0705e618443ee74ede0bdd94da0a57b28e72db68b2dbac/ruff-0.14.14-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:14649acb1cf7b5d2d283ebd2f58d56b75836ed8c6f329664fa91cdea19e76e66", size = 11180549 },
+ { url = "https://files.pythonhosted.org/packages/fd/1e/c723f20536b5163adf79bdd10c5f093414293cdf567eed9bdb7b83940f3f/ruff-0.14.14-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e8058d2145566510790eab4e2fad186002e288dec5e0d343a92fe7b0bc1b3e13", size = 10543416 },
+ { url = "https://files.pythonhosted.org/packages/3e/34/8a84cea7e42c2d94ba5bde1d7a4fae164d6318f13f933d92da6d7c2041ff/ruff-0.14.14-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e651e977a79e4c758eb807f0481d673a67ffe53cfa92209781dfa3a996cf8412", size = 10285491 },
+ { url = "https://files.pythonhosted.org/packages/55/ef/b7c5ea0be82518906c978e365e56a77f8de7678c8bb6651ccfbdc178c29f/ruff-0.14.14-py3-none-musllinux_1_2_i686.whl", hash = "sha256:cc8b22da8d9d6fdd844a68ae937e2a0adf9b16514e9a97cc60355e2d4b219fc3", size = 10733525 },
+ { url = "https://files.pythonhosted.org/packages/6a/5b/aaf1dfbcc53a2811f6cc0a1759de24e4b03e02ba8762daabd9b6bd8c59e3/ruff-0.14.14-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:16bc890fb4cc9781bb05beb5ab4cd51be9e7cb376bf1dd3580512b24eb3fda2b", size = 11315626 },
+ { url = "https://files.pythonhosted.org/packages/2c/aa/9f89c719c467dfaf8ad799b9bae0df494513fb21d31a6059cb5870e57e74/ruff-0.14.14-py3-none-win32.whl", hash = "sha256:b530c191970b143375b6a68e6f743800b2b786bbcf03a7965b06c4bf04568167", size = 10502442 },
+ { url = "https://files.pythonhosted.org/packages/87/44/90fa543014c45560cae1fffc63ea059fb3575ee6e1cb654562197e5d16fb/ruff-0.14.14-py3-none-win_amd64.whl", hash = "sha256:3dde1435e6b6fe5b66506c1dff67a421d0b7f6488d466f651c07f4cab3bf20fd", size = 11630486 },
+ { url = "https://files.pythonhosted.org/packages/9e/6a/40fee331a52339926a92e17ae748827270b288a35ef4a15c9c8f2ec54715/ruff-0.14.14-py3-none-win_arm64.whl", hash = "sha256:56e6981a98b13a32236a72a8da421d7839221fa308b223b9283312312e5ac76c", size = 10920448 },
+]
+
+[[package]]
+name = "setuptools"
+version = "80.10.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/76/95/faf61eb8363f26aa7e1d762267a8d602a1b26d4f3a1e758e92cb3cb8b054/setuptools-80.10.2.tar.gz", hash = "sha256:8b0e9d10c784bf7d262c4e5ec5d4ec94127ce206e8738f29a437945fbc219b70", size = 1200343 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/94/b8/f1f62a5e3c0ad2ff1d189590bfa4c46b4f3b6e49cef6f26c6ee4e575394d/setuptools-80.10.2-py3-none-any.whl", hash = "sha256:95b30ddfb717250edb492926c92b5221f7ef3fbcc2b07579bcd4a27da21d0173", size = 1064234 },
+]
+
+[[package]]
+name = "soupsieve"
+version = "2.8.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/7b/ae/2d9c981590ed9999a0d91755b47fc74f74de286b0f5cee14c9269041e6c4/soupsieve-2.8.3.tar.gz", hash = "sha256:3267f1eeea4251fb42728b6dfb746edc9acaffc4a45b27e19450b676586e8349", size = 118627 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/46/2c/1462b1d0a634697ae9e55b3cecdcb64788e8b7d63f54d923fcd0bb140aed/soupsieve-2.8.3-py3-none-any.whl", hash = "sha256:ed64f2ba4eebeab06cc4962affce381647455978ffc1e36bb79a545b91f45a95", size = 37016 },
+]
+
+[[package]]
+name = "types-pyyaml"
+version = "6.0.12.20250915"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/7e/69/3c51b36d04da19b92f9e815be12753125bd8bc247ba0470a982e6979e71c/types_pyyaml-6.0.12.20250915.tar.gz", hash = "sha256:0f8b54a528c303f0e6f7165687dd33fafa81c807fcac23f632b63aa624ced1d3", size = 17522 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/bd/e0/1eed384f02555dde685fff1a1ac805c1c7dcb6dd019c916fe659b1c1f9ec/types_pyyaml-6.0.12.20250915-py3-none-any.whl", hash = "sha256:e7d4d9e064e89a3b3cae120b4990cd370874d2bf12fa5f46c97018dd5d3c9ab6", size = 20338 },
+]
+
+[[package]]
+name = "typing-extensions"
+version = "4.15.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614 },
+]