[pull] dev from home-assistant:dev #1025
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: CI | |
| run-name: "${{ github.event_name == 'workflow_dispatch' && format('CI: {0}', github.ref_name) || '' }}" | |
| # yamllint disable-line rule:truthy | |
| on: | |
| push: | |
| branches: | |
| - dev | |
| - rc | |
| - master | |
| pull_request: ~ | |
| workflow_dispatch: | |
| inputs: | |
| full: | |
| description: "Full run (regardless of changes)" | |
| default: false | |
| type: boolean | |
| lint-only: | |
| description: "Skip pytest" | |
| default: false | |
| type: boolean | |
| skip-coverage: | |
| description: "Skip coverage" | |
| default: false | |
| type: boolean | |
| pylint-only: | |
| description: "Only run pylint" | |
| default: false | |
| type: boolean | |
| mypy-only: | |
| description: "Only run mypy" | |
| default: false | |
| type: boolean | |
| audit-licenses-only: | |
| description: "Only run audit licenses" | |
| default: false | |
| type: boolean | |
| env: | |
| CACHE_VERSION: 1 | |
| UV_CACHE_VERSION: 1 | |
| MYPY_CACHE_VERSION: 1 | |
| HA_SHORT_VERSION: "2025.11" | |
| DEFAULT_PYTHON: "3.13" | |
| ALL_PYTHON_VERSIONS: "['3.13', '3.14']" | |
| # 10.3 is the oldest supported version | |
| # - 10.3.32 is the version currently shipped with Synology (as of 17 Feb 2022) | |
| # 10.6 is the current long-term-support | |
| # - 10.6.10 is the version currently shipped with the Add-on (as of 31 Jan 2023) | |
| # 10.10 is the latest short-term-support | |
| # - 10.10.3 is the latest (as of 6 Feb 2023) | |
| # 10.11 is the latest long-term-support | |
| # - 10.11.2 is the version currently shipped with Synology (as of 11 Oct 2023) | |
| # mysql 8.0.32 does not always behave the same as MariaDB | |
| # and some queries that work on MariaDB do not work on MySQL | |
| MARIADB_VERSIONS: "['mariadb:10.3.32','mariadb:10.6.10','mariadb:10.10.3','mariadb:10.11.2','mysql:8.0.32']" | |
| # 12 is the oldest supported version | |
| # - 12.14 is the latest (as of 9 Feb 2023) | |
| # 15 is the latest version | |
| # - 15.2 is the latest (as of 9 Feb 2023) | |
| POSTGRESQL_VERSIONS: "['postgres:12.14','postgres:15.2']" | |
| PRE_COMMIT_CACHE: ~/.cache/pre-commit | |
| UV_CACHE_DIR: /tmp/uv-cache | |
| APT_CACHE_BASE: /home/runner/work/apt | |
| APT_CACHE_DIR: /home/runner/work/apt/cache | |
| APT_LIST_CACHE_DIR: /home/runner/work/apt/lists | |
| SQLALCHEMY_WARN_20: 1 | |
| PYTHONASYNCIODEBUG: 1 | |
| HASS_CI: 1 | |
| concurrency: | |
| group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} | |
| cancel-in-progress: true | |
| jobs: | |
| info: | |
| name: Collect information & changes data | |
| runs-on: &runs-on-ubuntu ubuntu-24.04 | |
| outputs: | |
| # In case of issues with the partial run, use the following line instead: | |
| # test_full_suite: 'true' | |
| core: ${{ steps.core.outputs.changes }} | |
| integrations_glob: ${{ steps.info.outputs.integrations_glob }} | |
| integrations: ${{ steps.integrations.outputs.changes }} | |
| apt_cache_key: ${{ steps.generate_apt_cache_key.outputs.key }} | |
| pre-commit_cache_key: ${{ steps.generate_pre-commit_cache_key.outputs.key }} | |
| python_cache_key: ${{ steps.generate_python_cache_key.outputs.key }} | |
| requirements: ${{ steps.core.outputs.requirements }} | |
| mariadb_groups: ${{ steps.info.outputs.mariadb_groups }} | |
| postgresql_groups: ${{ steps.info.outputs.postgresql_groups }} | |
| python_versions: ${{ steps.info.outputs.python_versions }} | |
| test_full_suite: ${{ steps.info.outputs.test_full_suite }} | |
| test_group_count: ${{ steps.info.outputs.test_group_count }} | |
| test_groups: ${{ steps.info.outputs.test_groups }} | |
| tests_glob: ${{ steps.info.outputs.tests_glob }} | |
| tests: ${{ steps.info.outputs.tests }} | |
| lint_only: ${{ steps.info.outputs.lint_only }} | |
| skip_coverage: ${{ steps.info.outputs.skip_coverage }} | |
| steps: | |
| - &checkout | |
| name: Check out code from GitHub | |
| uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 | |
| - name: Generate partial Python venv restore key | |
| id: generate_python_cache_key | |
| run: | | |
| # Include HA_SHORT_VERSION to force the immediate creation | |
| # of a new uv cache entry after a version bump. | |
| echo "key=venv-${{ env.CACHE_VERSION }}-${{ env.HA_SHORT_VERSION }}-${{ | |
| hashFiles('requirements_test.txt', 'requirements_test_pre_commit.txt') }}-${{ | |
| hashFiles('requirements.txt') }}-${{ | |
| hashFiles('requirements_all.txt') }}-${{ | |
| hashFiles('homeassistant/package_constraints.txt') }}-${{ | |
| hashFiles('script/gen_requirements_all.py') }}" >> $GITHUB_OUTPUT | |
| - name: Generate partial pre-commit restore key | |
| id: generate_pre-commit_cache_key | |
| run: >- | |
| echo "key=pre-commit-${{ env.CACHE_VERSION }}-${{ | |
| hashFiles('.pre-commit-config.yaml') }}" >> $GITHUB_OUTPUT | |
| - name: Generate partial apt restore key | |
| id: generate_apt_cache_key | |
| run: | | |
| echo "key=$(lsb_release -rs)-apt-${{ env.CACHE_VERSION }}-${{ env.HA_SHORT_VERSION }}" >> $GITHUB_OUTPUT | |
| - name: Filter for core changes | |
| uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 | |
| id: core | |
| with: | |
| filters: .core_files.yaml | |
| - name: Create a list of integrations to filter for changes | |
| run: | | |
| integrations=$(ls -Ad ./homeassistant/components/[!_]* | xargs -n 1 basename) | |
| touch .integration_paths.yaml | |
| for integration in $integrations; do | |
| echo "${integration}: [homeassistant/components/${integration}/**, tests/components/${integration}/**]" \ | |
| >> .integration_paths.yaml; | |
| done | |
| echo "Result:" | |
| cat .integration_paths.yaml | |
| - name: Filter for integration changes | |
| uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 | |
| id: integrations | |
| with: | |
| filters: .integration_paths.yaml | |
| - name: Collect additional information | |
| id: info | |
| run: | | |
| # Defaults | |
| integrations_glob="" | |
| mariadb_groups=${MARIADB_VERSIONS} | |
| postgresql_groups=${POSTGRESQL_VERSIONS} | |
| test_full_suite="true" | |
| test_groups="[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]" | |
| test_group_count=10 | |
| tests="[]" | |
| tests_glob="" | |
| lint_only="" | |
| skip_coverage="" | |
| if [[ "${{ steps.integrations.outputs.changes }}" != "[]" ]]; | |
| then | |
| # Create a file glob for the integrations | |
| integrations_glob=$(echo '${{ steps.integrations.outputs.changes }}' | jq -cSr '. | join(",")') | |
| [[ "${integrations_glob}" == *","* ]] && integrations_glob="{${integrations_glob}}" | |
| # Create list of testable integrations | |
| possible_integrations=$(echo '${{ steps.integrations.outputs.changes }}' | jq -cSr '.[]') | |
| tests=$( | |
| for integration in ${possible_integrations}; | |
| do | |
| if [[ -d "tests/components/${integration}" ]]; then | |
| echo -n "\"${integration}\","; | |
| fi; | |
| done | |
| ) | |
| [[ ! -z "${tests}" ]] && tests="${tests::-1}" | |
| tests="[${tests}]" | |
| test_groups="${tests}" | |
| # Test group count should be 1, we don't split partial tests | |
| test_group_count=1 | |
| # Create a file glob for the integrations tests | |
| tests_glob=$(echo "${tests}" | jq -cSr '. | join(",")') | |
| [[ "${tests_glob}" == *","* ]] && tests_glob="{${tests_glob}}" | |
| mariadb_groups="[]" | |
| postgresql_groups="[]" | |
| test_full_suite="false" | |
| fi | |
| # We need to run the full suite on certain branches. | |
| # Or, in case core files are touched, for the full suite as well. | |
| if [[ "${{ github.ref }}" == "refs/heads/dev" ]] \ | |
| || [[ "${{ github.ref }}" == "refs/heads/master" ]] \ | |
| || [[ "${{ github.ref }}" == "refs/heads/rc" ]] \ | |
| || [[ "${{ steps.core.outputs.any }}" == "true" ]] \ | |
| || [[ "${{ github.event.inputs.full }}" == "true" ]] \ | |
| || [[ "${{ contains(github.event.pull_request.labels.*.name, 'ci-full-run') }}" == "true" ]]; | |
| then | |
| mariadb_groups=${MARIADB_VERSIONS} | |
| postgresql_groups=${POSTGRESQL_VERSIONS} | |
| test_groups="[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]" | |
| test_group_count=10 | |
| test_full_suite="true" | |
| fi | |
| if [[ "${{ github.event.inputs.lint-only }}" == "true" ]] \ | |
| || [[ "${{ github.event.inputs.pylint-only }}" == "true" ]] \ | |
| || [[ "${{ github.event.inputs.mypy-only }}" == "true" ]] \ | |
| || [[ "${{ github.event.inputs.audit-licenses-only }}" == "true" ]] \ | |
| || [[ "${{ github.event_name }}" == "push" \ | |
| && "${{ github.event.repository.full_name }}" != "home-assistant/core" ]]; | |
| then | |
| lint_only="true" | |
| skip_coverage="true" | |
| fi | |
| if [[ "${{ github.event.inputs.skip-coverage }}" == "true" ]] \ | |
| || [[ "${{ contains(github.event.pull_request.labels.*.name, 'ci-skip-coverage') }}" == "true" ]]; | |
| then | |
| skip_coverage="true" | |
| fi | |
| # Output & sent to GitHub Actions | |
| echo "mariadb_groups: ${mariadb_groups}" | |
| echo "mariadb_groups=${mariadb_groups}" >> $GITHUB_OUTPUT | |
| echo "postgresql_groups: ${postgresql_groups}" | |
| echo "postgresql_groups=${postgresql_groups}" >> $GITHUB_OUTPUT | |
| echo "python_versions: ${ALL_PYTHON_VERSIONS}" | |
| echo "python_versions=${ALL_PYTHON_VERSIONS}" >> $GITHUB_OUTPUT | |
| echo "test_full_suite: ${test_full_suite}" | |
| echo "test_full_suite=${test_full_suite}" >> $GITHUB_OUTPUT | |
| echo "integrations_glob: ${integrations_glob}" | |
| echo "integrations_glob=${integrations_glob}" >> $GITHUB_OUTPUT | |
| echo "test_group_count: ${test_group_count}" | |
| echo "test_group_count=${test_group_count}" >> $GITHUB_OUTPUT | |
| echo "test_groups: ${test_groups}" | |
| echo "test_groups=${test_groups}" >> $GITHUB_OUTPUT | |
| echo "tests: ${tests}" | |
| echo "tests=${tests}" >> $GITHUB_OUTPUT | |
| echo "tests_glob: ${tests_glob}" | |
| echo "tests_glob=${tests_glob}" >> $GITHUB_OUTPUT | |
| echo "lint_only": ${lint_only} | |
| echo "lint_only=${lint_only}" >> $GITHUB_OUTPUT | |
| echo "skip_coverage: ${skip_coverage}" | |
| echo "skip_coverage=${skip_coverage}" >> $GITHUB_OUTPUT | |
| pre-commit: | |
| name: Prepare pre-commit base | |
| runs-on: *runs-on-ubuntu | |
| needs: [info] | |
| if: | | |
| github.event.inputs.pylint-only != 'true' | |
| && github.event.inputs.mypy-only != 'true' | |
| && github.event.inputs.audit-licenses-only != 'true' | |
| steps: | |
| - *checkout | |
| - &setup-python-default | |
| name: Set up Python ${{ env.DEFAULT_PYTHON }} | |
| id: python | |
| uses: &actions-setup-python actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 | |
| with: | |
| python-version: ${{ env.DEFAULT_PYTHON }} | |
| check-latest: true | |
| - name: Restore base Python virtual environment | |
| id: cache-venv | |
| uses: &actions-cache actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 | |
| with: | |
| path: venv | |
| key: &key-pre-commit-venv >- | |
| ${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-venv-${{ | |
| needs.info.outputs.pre-commit_cache_key }} | |
| - name: Create Python virtual environment | |
| if: steps.cache-venv.outputs.cache-hit != 'true' | |
| run: | | |
| python -m venv venv | |
| . venv/bin/activate | |
| python --version | |
| pip install "$(grep '^uv' < requirements.txt)" | |
| uv pip install "$(cat requirements_test.txt | grep pre-commit)" | |
| - name: Restore pre-commit environment from cache | |
| id: cache-precommit | |
| uses: *actions-cache | |
| with: | |
| path: ${{ env.PRE_COMMIT_CACHE }} | |
| lookup-only: true | |
| key: &key-pre-commit-env >- | |
| ${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{ | |
| needs.info.outputs.pre-commit_cache_key }} | |
| - name: Install pre-commit dependencies | |
| if: steps.cache-precommit.outputs.cache-hit != 'true' | |
| run: | | |
| . venv/bin/activate | |
| pre-commit install-hooks | |
| lint-ruff-format: | |
| name: Check ruff-format | |
| runs-on: *runs-on-ubuntu | |
| needs: &needs-pre-commit | |
| - info | |
| - pre-commit | |
| steps: | |
| - *checkout | |
| - *setup-python-default | |
| - &cache-restore-pre-commit-venv | |
| name: Restore base Python virtual environment | |
| id: cache-venv | |
| uses: &actions-cache-restore actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 | |
| with: | |
| path: venv | |
| fail-on-cache-miss: true | |
| key: *key-pre-commit-venv | |
| - &cache-restore-pre-commit-env | |
| name: Restore pre-commit environment from cache | |
| id: cache-precommit | |
| uses: *actions-cache-restore | |
| with: | |
| path: ${{ env.PRE_COMMIT_CACHE }} | |
| fail-on-cache-miss: true | |
| key: *key-pre-commit-env | |
| - name: Run ruff-format | |
| run: | | |
| . venv/bin/activate | |
| pre-commit run --hook-stage manual ruff-format --all-files --show-diff-on-failure | |
| env: | |
| RUFF_OUTPUT_FORMAT: github | |
| lint-ruff: | |
| name: Check ruff | |
| runs-on: *runs-on-ubuntu | |
| needs: *needs-pre-commit | |
| steps: | |
| - *checkout | |
| - *setup-python-default | |
| - *cache-restore-pre-commit-venv | |
| - *cache-restore-pre-commit-env | |
| - name: Run ruff | |
| run: | | |
| . venv/bin/activate | |
| pre-commit run --hook-stage manual ruff-check --all-files --show-diff-on-failure | |
| env: | |
| RUFF_OUTPUT_FORMAT: github | |
| lint-other: | |
| name: Check other linters | |
| runs-on: *runs-on-ubuntu | |
| needs: *needs-pre-commit | |
| steps: | |
| - *checkout | |
| - *setup-python-default | |
| - *cache-restore-pre-commit-venv | |
| - *cache-restore-pre-commit-env | |
| - name: Register yamllint problem matcher | |
| run: | | |
| echo "::add-matcher::.github/workflows/matchers/yamllint.json" | |
| - name: Run yamllint | |
| run: | | |
| . venv/bin/activate | |
| pre-commit run --hook-stage manual yamllint --all-files --show-diff-on-failure | |
| - name: Register check-json problem matcher | |
| run: | | |
| echo "::add-matcher::.github/workflows/matchers/check-json.json" | |
| - name: Run check-json | |
| run: | | |
| . venv/bin/activate | |
| pre-commit run --hook-stage manual check-json --all-files | |
| - name: Run prettier (fully) | |
| if: needs.info.outputs.test_full_suite == 'true' | |
| run: | | |
| . venv/bin/activate | |
| pre-commit run --hook-stage manual prettier --all-files | |
| - name: Run prettier (partially) | |
| if: needs.info.outputs.test_full_suite == 'false' | |
| shell: bash | |
| run: | | |
| . venv/bin/activate | |
| shopt -s globstar | |
| pre-commit run --hook-stage manual prettier --files {homeassistant,tests}/components/${{ needs.info.outputs.integrations_glob }}/{*,**/*} | |
| - name: Register check executables problem matcher | |
| run: | | |
| echo "::add-matcher::.github/workflows/matchers/check-executables-have-shebangs.json" | |
| - name: Run executables check | |
| run: | | |
| . venv/bin/activate | |
| pre-commit run --hook-stage manual check-executables-have-shebangs --all-files | |
| - name: Register codespell problem matcher | |
| run: | | |
| echo "::add-matcher::.github/workflows/matchers/codespell.json" | |
| - name: Run codespell | |
| run: | | |
| . venv/bin/activate | |
| pre-commit run --show-diff-on-failure --hook-stage manual codespell --all-files | |
| lint-hadolint: | |
| name: Check ${{ matrix.file }} | |
| runs-on: *runs-on-ubuntu | |
| needs: [info] | |
| if: | | |
| github.event.inputs.pylint-only != 'true' | |
| && github.event.inputs.mypy-only != 'true' | |
| && github.event.inputs.audit-licenses-only != 'true' | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| file: | |
| - Dockerfile | |
| - Dockerfile.dev | |
| - script/hassfest/docker/Dockerfile | |
| steps: | |
| - *checkout | |
| - name: Register hadolint problem matcher | |
| run: | | |
| echo "::add-matcher::.github/workflows/matchers/hadolint.json" | |
| - name: Check ${{ matrix.file }} | |
| uses: docker://hadolint/hadolint:v2.12.0 | |
| with: | |
| args: hadolint ${{ matrix.file }} | |
| base: | |
| name: Prepare dependencies | |
| runs-on: *runs-on-ubuntu | |
| needs: [info] | |
| timeout-minutes: 60 | |
| strategy: | |
| matrix: | |
| python-version: &matrix-python ${{ fromJson(needs.info.outputs.python_versions) }} | |
| steps: | |
| - *checkout | |
| - &setup-python-matrix | |
| name: Set up Python ${{ matrix.python-version }} | |
| id: python | |
| uses: *actions-setup-python | |
| with: | |
| python-version: ${{ matrix.python-version }} | |
| check-latest: true | |
| - name: Generate partial uv restore key | |
| id: generate-uv-key | |
| run: | | |
| uv_version=$(cat requirements.txt | grep uv | cut -d '=' -f 3) | |
| echo "version=${uv_version}" >> $GITHUB_OUTPUT | |
| echo "key=uv-${{ env.UV_CACHE_VERSION }}-${uv_version}-${{ | |
| env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT | |
| - name: Restore base Python virtual environment | |
| id: cache-venv | |
| uses: *actions-cache | |
| with: | |
| path: venv | |
| key: &key-python-venv >- | |
| ${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{ | |
| needs.info.outputs.python_cache_key }} | |
| - name: Restore uv wheel cache | |
| if: steps.cache-venv.outputs.cache-hit != 'true' | |
| uses: *actions-cache | |
| with: | |
| path: ${{ env.UV_CACHE_DIR }} | |
| key: >- | |
| ${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{ | |
| steps.generate-uv-key.outputs.key }} | |
| restore-keys: | | |
| ${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-uv-${{ | |
| env.UV_CACHE_VERSION }}-${{ steps.generate-uv-key.outputs.version }}-${{ | |
| env.HA_SHORT_VERSION }}- | |
| - name: Check if apt cache exists | |
| id: cache-apt-check | |
| uses: *actions-cache | |
| with: | |
| lookup-only: ${{ steps.cache-venv.outputs.cache-hit == 'true' }} | |
| path: &path-apt-cache | | |
| ${{ env.APT_CACHE_DIR }} | |
| ${{ env.APT_LIST_CACHE_DIR }} | |
| key: &key-apt-cache >- | |
| ${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }} | |
| - name: Install additional OS dependencies | |
| if: | | |
| steps.cache-venv.outputs.cache-hit != 'true' | |
| || steps.cache-apt-check.outputs.cache-hit != 'true' | |
| timeout-minutes: 10 | |
| run: | | |
| sudo rm /etc/apt/sources.list.d/microsoft-prod.list | |
| if [[ "${{ steps.cache-apt-check.outputs.cache-hit }}" != 'true' ]]; then | |
| mkdir -p ${{ env.APT_CACHE_DIR }} | |
| mkdir -p ${{ env.APT_LIST_CACHE_DIR }} | |
| fi | |
| sudo apt-get update \ | |
| -o Dir::Cache=${{ env.APT_CACHE_DIR }} \ | |
| -o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} | |
| sudo apt-get -y install \ | |
| -o Dir::Cache=${{ env.APT_CACHE_DIR }} \ | |
| -o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \ | |
| bluez \ | |
| ffmpeg \ | |
| libturbojpeg \ | |
| libxml2-utils \ | |
| libavcodec-dev \ | |
| libavdevice-dev \ | |
| libavfilter-dev \ | |
| libavformat-dev \ | |
| libavutil-dev \ | |
| libgammu-dev \ | |
| libswresample-dev \ | |
| libswscale-dev \ | |
| libudev-dev | |
| if [[ "${{ steps.cache-apt-check.outputs.cache-hit }}" != 'true' ]]; then | |
| sudo chmod -R 755 ${{ env.APT_CACHE_BASE }} | |
| fi | |
| - name: Save apt cache | |
| if: steps.cache-apt-check.outputs.cache-hit != 'true' | |
| uses: &actions-cache-save actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 | |
| with: | |
| path: *path-apt-cache | |
| key: *key-apt-cache | |
| - name: Create Python virtual environment | |
| if: steps.cache-venv.outputs.cache-hit != 'true' | |
| run: | | |
| python -m venv venv | |
| . venv/bin/activate | |
| python --version | |
| pip install "$(grep '^uv' < requirements.txt)" | |
| uv pip install -U "pip>=25.2" | |
| uv pip install -r requirements.txt | |
| python -m script.gen_requirements_all ci | |
| uv pip install -r requirements_all_pytest.txt -r requirements_test.txt | |
| uv pip install -e . --config-settings editable_mode=compat | |
| - name: Dump pip freeze | |
| run: | | |
| python -m venv venv | |
| . venv/bin/activate | |
| python --version | |
| uv pip freeze >> pip_freeze.txt | |
| - name: Upload pip_freeze artifact | |
| uses: &actions-upload-artifact actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 | |
| with: | |
| name: pip-freeze-${{ matrix.python-version }} | |
| path: pip_freeze.txt | |
| overwrite: true | |
| - name: Remove pip_freeze | |
| run: rm pip_freeze.txt | |
| - name: Remove generated requirements_all | |
| if: steps.cache-venv.outputs.cache-hit != 'true' | |
| run: rm requirements_all_pytest.txt requirements_all_wheels_*.txt | |
| - &check-dirty | |
| name: Check dirty | |
| run: | | |
| ./script/check_dirty | |
| hassfest: | |
| name: Check hassfest | |
| runs-on: *runs-on-ubuntu | |
| needs: &needs-base | |
| - info | |
| - base | |
| if: | | |
| github.event.inputs.pylint-only != 'true' | |
| && github.event.inputs.mypy-only != 'true' | |
| && github.event.inputs.audit-licenses-only != 'true' | |
| steps: | |
| - &cache-restore-apt | |
| name: Restore apt cache | |
| uses: *actions-cache-restore | |
| with: | |
| path: *path-apt-cache | |
| fail-on-cache-miss: true | |
| key: *key-apt-cache | |
| - name: Install additional OS dependencies | |
| timeout-minutes: 10 | |
| run: | | |
| sudo rm /etc/apt/sources.list.d/microsoft-prod.list | |
| sudo apt-get update \ | |
| -o Dir::Cache=${{ env.APT_CACHE_DIR }} \ | |
| -o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} | |
| sudo apt-get -y install \ | |
| -o Dir::Cache=${{ env.APT_CACHE_DIR }} \ | |
| -o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \ | |
| libturbojpeg | |
| - *checkout | |
| - *setup-python-default | |
| - &cache-restore-python-default | |
| name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment | |
| id: cache-venv | |
| uses: *actions-cache-restore | |
| with: | |
| path: venv | |
| fail-on-cache-miss: true | |
| key: *key-python-venv | |
| - name: Run hassfest | |
| run: | | |
| . venv/bin/activate | |
| python -m script.hassfest --requirements --action validate | |
| gen-requirements-all: | |
| name: Check all requirements | |
| runs-on: *runs-on-ubuntu | |
| needs: *needs-base | |
| if: | | |
| github.event.inputs.pylint-only != 'true' | |
| && github.event.inputs.mypy-only != 'true' | |
| && github.event.inputs.audit-licenses-only != 'true' | |
| steps: | |
| - *checkout | |
| - *setup-python-default | |
| - *cache-restore-python-default | |
| - name: Run gen_requirements_all.py | |
| run: | | |
| . venv/bin/activate | |
| python -m script.gen_requirements_all validate | |
| dependency-review: | |
| name: Dependency review | |
| runs-on: *runs-on-ubuntu | |
| needs: *needs-base | |
| if: | | |
| github.event.inputs.pylint-only != 'true' | |
| && github.event.inputs.mypy-only != 'true' | |
| && needs.info.outputs.requirements == 'true' | |
| && github.event_name == 'pull_request' | |
| steps: | |
| - *checkout | |
| - name: Dependency review | |
| uses: actions/dependency-review-action@40c09b7dc99638e5ddb0bfd91c1673effc064d8a # v4.8.1 | |
| with: | |
| license-check: false # We use our own license audit checks | |
| audit-licenses: | |
| name: Audit licenses | |
| runs-on: *runs-on-ubuntu | |
| needs: *needs-base | |
| if: | | |
| (github.event.inputs.pylint-only != 'true' | |
| && github.event.inputs.mypy-only != 'true' | |
| || github.event.inputs.audit-licenses-only == 'true') | |
| && needs.info.outputs.requirements == 'true' | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| python-version: *matrix-python | |
| steps: | |
| - *checkout | |
| - *setup-python-matrix | |
| - &cache-restore-python-matrix | |
| name: Restore full Python ${{ matrix.python-version }} virtual environment | |
| id: cache-venv | |
| uses: *actions-cache-restore | |
| with: | |
| path: venv | |
| fail-on-cache-miss: true | |
| key: *key-python-venv | |
| - name: Extract license data | |
| run: | | |
| . venv/bin/activate | |
| python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json | |
| - name: Upload licenses | |
| uses: *actions-upload-artifact | |
| with: | |
| name: licenses-${{ github.run_number }}-${{ matrix.python-version }} | |
| path: licenses-${{ matrix.python-version }}.json | |
| - name: Check licenses | |
| run: | | |
| . venv/bin/activate | |
| python -m script.licenses check licenses-${{ matrix.python-version }}.json | |
| pylint: | |
| name: Check pylint | |
| runs-on: *runs-on-ubuntu | |
| needs: *needs-base | |
| timeout-minutes: 20 | |
| if: | | |
| github.event.inputs.mypy-only != 'true' | |
| && github.event.inputs.audit-licenses-only != 'true' | |
| || github.event.inputs.pylint-only == 'true' | |
| steps: | |
| - *checkout | |
| - *setup-python-default | |
| - *cache-restore-python-default | |
| - &problem-matcher-pylint | |
| name: Register pylint problem matcher | |
| run: | | |
| echo "::add-matcher::.github/workflows/matchers/pylint.json" | |
| - name: Run pylint (fully) | |
| if: needs.info.outputs.test_full_suite == 'true' | |
| run: | | |
| . venv/bin/activate | |
| python --version | |
| pylint --ignore-missing-annotations=y homeassistant | |
| - name: Run pylint (partially) | |
| if: needs.info.outputs.test_full_suite == 'false' | |
| shell: bash | |
| run: | | |
| . venv/bin/activate | |
| python --version | |
| pylint --ignore-missing-annotations=y homeassistant/components/${{ needs.info.outputs.integrations_glob }} | |
| pylint-tests: | |
| name: Check pylint on tests | |
| runs-on: *runs-on-ubuntu | |
| needs: *needs-base | |
| timeout-minutes: 20 | |
| if: | | |
| (github.event.inputs.mypy-only != 'true' | |
| && github.event.inputs.audit-licenses-only != 'true' | |
| || github.event.inputs.pylint-only == 'true') | |
| && (needs.info.outputs.tests_glob || needs.info.outputs.test_full_suite == 'true') | |
| steps: | |
| - *checkout | |
| - *setup-python-default | |
| - *cache-restore-python-default | |
| - *problem-matcher-pylint | |
| - name: Run pylint (fully) | |
| if: needs.info.outputs.test_full_suite == 'true' | |
| run: | | |
| . venv/bin/activate | |
| python --version | |
| pylint tests | |
| - name: Run pylint (partially) | |
| if: needs.info.outputs.test_full_suite == 'false' | |
| shell: bash | |
| run: | | |
| . venv/bin/activate | |
| python --version | |
| pylint tests/components/${{ needs.info.outputs.tests_glob }} | |
| mypy: | |
| name: Check mypy | |
| runs-on: *runs-on-ubuntu | |
| needs: *needs-base | |
| if: | | |
| github.event.inputs.pylint-only != 'true' | |
| && github.event.inputs.audit-licenses-only != 'true' | |
| || github.event.inputs.mypy-only == 'true' | |
| steps: | |
| - *checkout | |
| - *setup-python-default | |
| - name: Generate partial mypy restore key | |
| id: generate-mypy-key | |
| run: | | |
| mypy_version=$(cat requirements_test.txt | grep 'mypy.*=' | cut -d '=' -f 3) | |
| echo "version=$mypy_version" >> $GITHUB_OUTPUT | |
| echo "key=mypy-${{ env.MYPY_CACHE_VERSION }}-$mypy_version-${{ | |
| env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT | |
| - *cache-restore-python-default | |
| - name: Restore mypy cache | |
| uses: *actions-cache | |
| with: | |
| path: .mypy_cache | |
| key: >- | |
| ${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{ | |
| steps.generate-mypy-key.outputs.key }} | |
| restore-keys: | | |
| ${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-mypy-${{ | |
| env.MYPY_CACHE_VERSION }}-${{ steps.generate-mypy-key.outputs.version }}-${{ | |
| env.HA_SHORT_VERSION }}- | |
| - name: Register mypy problem matcher | |
| run: | | |
| echo "::add-matcher::.github/workflows/matchers/mypy.json" | |
| - name: Run mypy (fully) | |
| if: needs.info.outputs.test_full_suite == 'true' | |
| run: | | |
| . venv/bin/activate | |
| python --version | |
| mypy homeassistant pylint | |
| - name: Run mypy (partially) | |
| if: needs.info.outputs.test_full_suite == 'false' | |
| shell: bash | |
| run: | | |
| . venv/bin/activate | |
| python --version | |
| mypy homeassistant/components/${{ needs.info.outputs.integrations_glob }} | |
| prepare-pytest-full: | |
| name: Split tests for full run | |
| runs-on: *runs-on-ubuntu | |
| if: | | |
| needs.info.outputs.lint_only != 'true' | |
| && needs.info.outputs.test_full_suite == 'true' | |
| needs: | |
| - info | |
| - base | |
| - gen-requirements-all | |
| - hassfest | |
| - lint-other | |
| - lint-ruff | |
| - lint-ruff-format | |
| - mypy | |
| steps: | |
| - *cache-restore-apt | |
| - name: Install additional OS dependencies | |
| timeout-minutes: 10 | |
| run: | | |
| sudo rm /etc/apt/sources.list.d/microsoft-prod.list | |
| sudo apt-get update \ | |
| -o Dir::Cache=${{ env.APT_CACHE_DIR }} \ | |
| -o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} | |
| sudo apt-get -y install \ | |
| -o Dir::Cache=${{ env.APT_CACHE_DIR }} \ | |
| -o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \ | |
| bluez \ | |
| ffmpeg \ | |
| libturbojpeg \ | |
| libgammu-dev | |
| - *checkout | |
| - *setup-python-default | |
| - *cache-restore-python-default | |
| - name: Run split_tests.py | |
| run: | | |
| . venv/bin/activate | |
| python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests | |
| - name: Upload pytest_buckets | |
| uses: *actions-upload-artifact | |
| with: | |
| name: pytest_buckets | |
| path: pytest_buckets.txt | |
| overwrite: true | |
| pytest-full: | |
| name: Run tests Python ${{ matrix.python-version }} (${{ matrix.group }}) | |
| runs-on: *runs-on-ubuntu | |
| needs: | |
| - info | |
| - base | |
| - gen-requirements-all | |
| - hassfest | |
| - lint-other | |
| - lint-ruff | |
| - lint-ruff-format | |
| - mypy | |
| - prepare-pytest-full | |
| if: | | |
| needs.info.outputs.lint_only != 'true' | |
| && needs.info.outputs.test_full_suite == 'true' | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| python-version: *matrix-python | |
| group: &matrix-group ${{ fromJson(needs.info.outputs.test_groups) }} | |
| steps: | |
| - *cache-restore-apt | |
| - name: Install additional OS dependencies | |
| timeout-minutes: 10 | |
| run: | | |
| sudo rm /etc/apt/sources.list.d/microsoft-prod.list | |
| sudo apt-get update \ | |
| -o Dir::Cache=${{ env.APT_CACHE_DIR }} \ | |
| -o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} | |
| sudo apt-get -y install \ | |
| -o Dir::Cache=${{ env.APT_CACHE_DIR }} \ | |
| -o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \ | |
| bluez \ | |
| ffmpeg \ | |
| libturbojpeg \ | |
| libgammu-dev \ | |
| libxml2-utils | |
| - *checkout | |
| - *setup-python-matrix | |
| - *cache-restore-python-matrix | |
| - &problem-matcher-python | |
| name: Register Python problem matcher | |
| run: | | |
| echo "::add-matcher::.github/workflows/matchers/python.json" | |
| - &problem-matcher-pytest-slow | |
| name: Register pytest slow test problem matcher | |
| run: | | |
| echo "::add-matcher::.github/workflows/matchers/pytest-slow.json" | |
| - name: Download pytest_buckets | |
| uses: &actions-download-artifact actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 | |
| with: | |
| name: pytest_buckets | |
| - &compile-english-translations | |
| name: Compile English translations | |
| run: | | |
| . venv/bin/activate | |
| python3 -m script.translations develop --all | |
| - name: Run pytest | |
| timeout-minutes: 60 | |
| id: pytest-full | |
| env: | |
| PYTHONDONTWRITEBYTECODE: 1 | |
| run: | | |
| . venv/bin/activate | |
| python --version | |
| set -o pipefail | |
| cov_params=() | |
| if [[ "${{ needs.info.outputs.skip_coverage }}" != "true" ]]; then | |
| cov_params+=(--cov="homeassistant") | |
| cov_params+=(--cov-report=xml) | |
| cov_params+=(--junitxml=junit.xml -o junit_family=legacy) | |
| fi | |
| echo "Test group ${{ matrix.group }}: $(sed -n "${{ matrix.group }},1p" pytest_buckets.txt)" | |
| python3 -b -X dev -m pytest \ | |
| -qq \ | |
| --timeout=9 \ | |
| --durations=10 \ | |
| --numprocesses auto \ | |
| --snapshot-details \ | |
| --dist=loadfile \ | |
| ${cov_params[@]} \ | |
| -o console_output_style=count \ | |
| -p no:sugar \ | |
| --exclude-warning-annotations \ | |
| $(sed -n "${{ matrix.group }},1p" pytest_buckets.txt) \ | |
| 2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt | |
| - name: Upload pytest output | |
| if: success() || failure() && steps.pytest-full.conclusion == 'failure' | |
| uses: *actions-upload-artifact | |
| with: | |
| name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }} | |
| path: pytest-*.txt | |
| overwrite: true | |
| - name: Upload coverage artifact | |
| if: needs.info.outputs.skip_coverage != 'true' | |
| uses: *actions-upload-artifact | |
| with: | |
| name: coverage-${{ matrix.python-version }}-${{ matrix.group }} | |
| path: coverage.xml | |
| overwrite: true | |
| - &beautify-test-results | |
| name: Beautify test results | |
| # For easier identification of parsing errors | |
| if: needs.info.outputs.skip_coverage != 'true' | |
| run: | | |
| xmllint --format "junit.xml" > "junit.xml-tmp" | |
| mv "junit.xml-tmp" "junit.xml" | |
| - name: Upload test results artifact | |
| if: needs.info.outputs.skip_coverage != 'true' && !cancelled() | |
| uses: *actions-upload-artifact | |
| with: | |
| name: test-results-full-${{ matrix.python-version }}-${{ matrix.group }} | |
| path: junit.xml | |
| - name: Remove pytest_buckets | |
| run: rm pytest_buckets.txt | |
| - *check-dirty | |
| pytest-mariadb: | |
| name: Run ${{ matrix.mariadb-group }} tests Python ${{ matrix.python-version }} | |
| runs-on: *runs-on-ubuntu | |
| services: | |
| mariadb: | |
| image: ${{ matrix.mariadb-group }} | |
| ports: | |
| - 3306:3306 | |
| env: | |
| MYSQL_ROOT_PASSWORD: password | |
| options: --health-cmd="mysqladmin ping -uroot -ppassword" --health-interval=5s --health-timeout=2s --health-retries=3 | |
| needs: | |
| - info | |
| - base | |
| - gen-requirements-all | |
| - hassfest | |
| - lint-other | |
| - lint-ruff | |
| - lint-ruff-format | |
| - mypy | |
| if: | | |
| needs.info.outputs.lint_only != 'true' | |
| && needs.info.outputs.mariadb_groups != '[]' | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| python-version: *matrix-python | |
| mariadb-group: ${{ fromJson(needs.info.outputs.mariadb_groups) }} | |
| steps: | |
| - *cache-restore-apt | |
| - name: Install additional OS dependencies | |
| timeout-minutes: 10 | |
| run: | | |
| sudo rm /etc/apt/sources.list.d/microsoft-prod.list | |
| sudo apt-get update \ | |
| -o Dir::Cache=${{ env.APT_CACHE_DIR }} \ | |
| -o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} | |
| sudo apt-get -y install \ | |
| -o Dir::Cache=${{ env.APT_CACHE_DIR }} \ | |
| -o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \ | |
| bluez \ | |
| ffmpeg \ | |
| libturbojpeg \ | |
| libmariadb-dev-compat \ | |
| libxml2-utils | |
| - *checkout | |
| - *setup-python-matrix | |
| - *cache-restore-python-matrix | |
| - *problem-matcher-python | |
| - *problem-matcher-pytest-slow | |
| - name: Install SQL Python libraries | |
| run: | | |
| . venv/bin/activate | |
| uv pip install mysqlclient sqlalchemy_utils | |
| - *compile-english-translations | |
| - name: Run pytest (partially) | |
| timeout-minutes: 20 | |
| id: pytest-partial | |
| shell: bash | |
| env: | |
| PYTHONDONTWRITEBYTECODE: 1 | |
| run: | | |
| . venv/bin/activate | |
| python --version | |
| set -o pipefail | |
| mariadb=$(echo "${{ matrix.mariadb-group }}" | sed "s/:/-/g") | |
| echo "mariadb=${mariadb}" >> $GITHUB_OUTPUT | |
| cov_params=() | |
| if [[ "${{ needs.info.outputs.skip_coverage }}" != "true" ]]; then | |
| cov_params+=(--cov="homeassistant.components.recorder") | |
| cov_params+=(--cov-report=xml) | |
| cov_params+=(--cov-report=term-missing) | |
| cov_params+=(--junitxml=junit.xml -o junit_family=legacy) | |
| fi | |
| python3 -b -X dev -m pytest \ | |
| -qq \ | |
| --timeout=20 \ | |
| --numprocesses 1 \ | |
| --snapshot-details \ | |
| ${cov_params[@]} \ | |
| -o console_output_style=count \ | |
| --durations=10 \ | |
| -p no:sugar \ | |
| --exclude-warning-annotations \ | |
| --dburl=mysql://root:[email protected]/homeassistant-test \ | |
| tests/components/history \ | |
| tests/components/logbook \ | |
| tests/components/recorder \ | |
| tests/components/sensor \ | |
| 2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt | |
| - name: Upload pytest output | |
| if: success() || failure() && steps.pytest-partial.conclusion == 'failure' | |
| uses: *actions-upload-artifact | |
| with: | |
| name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ | |
| steps.pytest-partial.outputs.mariadb }} | |
| path: pytest-*.txt | |
| overwrite: true | |
| - name: Upload coverage artifact | |
| if: needs.info.outputs.skip_coverage != 'true' | |
| uses: *actions-upload-artifact | |
| with: | |
| name: coverage-${{ matrix.python-version }}-${{ | |
| steps.pytest-partial.outputs.mariadb }} | |
| path: coverage.xml | |
| overwrite: true | |
| - *beautify-test-results | |
| - name: Upload test results artifact | |
| if: needs.info.outputs.skip_coverage != 'true' && !cancelled() | |
| uses: *actions-upload-artifact | |
| with: | |
| name: test-results-mariadb-${{ matrix.python-version }}-${{ | |
| steps.pytest-partial.outputs.mariadb }} | |
| path: junit.xml | |
| - *check-dirty | |
| pytest-postgres: | |
| name: Run ${{ matrix.postgresql-group }} tests Python ${{ matrix.python-version }} | |
| runs-on: *runs-on-ubuntu | |
| services: | |
| postgres: | |
| image: ${{ matrix.postgresql-group }} | |
| ports: | |
| - 5432:5432 | |
| env: | |
| POSTGRES_PASSWORD: password | |
| options: --health-cmd="pg_isready -hlocalhost -Upostgres" --health-interval=5s --health-timeout=2s --health-retries=3 | |
| needs: | |
| - info | |
| - base | |
| - gen-requirements-all | |
| - hassfest | |
| - lint-other | |
| - lint-ruff | |
| - lint-ruff-format | |
| - mypy | |
| if: | | |
| needs.info.outputs.lint_only != 'true' | |
| && needs.info.outputs.postgresql_groups != '[]' | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| python-version: *matrix-python | |
| postgresql-group: ${{ fromJson(needs.info.outputs.postgresql_groups) }} | |
| steps: | |
| - *cache-restore-apt | |
| - name: Install additional OS dependencies | |
| timeout-minutes: 10 | |
| run: | | |
| sudo rm /etc/apt/sources.list.d/microsoft-prod.list | |
| sudo apt-get update \ | |
| -o Dir::Cache=${{ env.APT_CACHE_DIR }} \ | |
| -o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} | |
| sudo apt-get -y install \ | |
| -o Dir::Cache=${{ env.APT_CACHE_DIR }} \ | |
| -o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \ | |
| bluez \ | |
| ffmpeg \ | |
| libturbojpeg \ | |
| libxml2-utils | |
| sudo /usr/share/postgresql-common/pgdg/apt.postgresql.org.sh -y | |
| sudo apt-get -y install \ | |
| postgresql-server-dev-14 | |
| - *checkout | |
| - *setup-python-matrix | |
| - *cache-restore-python-matrix | |
| - *problem-matcher-python | |
| - *problem-matcher-pytest-slow | |
| - name: Install SQL Python libraries | |
| run: | | |
| . venv/bin/activate | |
| uv pip install psycopg2 sqlalchemy_utils | |
| - *compile-english-translations | |
| - name: Run pytest (partially) | |
| timeout-minutes: 20 | |
| id: pytest-partial | |
| shell: bash | |
| env: | |
| PYTHONDONTWRITEBYTECODE: 1 | |
| run: | | |
| . venv/bin/activate | |
| python --version | |
| set -o pipefail | |
| postgresql=$(echo "${{ matrix.postgresql-group }}" | sed "s/:/-/g") | |
| echo "postgresql=${postgresql}" >> $GITHUB_OUTPUT | |
| cov_params=() | |
| if [[ "${{ needs.info.outputs.skip_coverage }}" != "true" ]]; then | |
| cov_params+=(--cov="homeassistant.components.recorder") | |
| cov_params+=(--cov-report=xml) | |
| cov_params+=(--cov-report=term-missing) | |
| cov_params+=(--junitxml=junit.xml -o junit_family=legacy) | |
| fi | |
| python3 -b -X dev -m pytest \ | |
| -qq \ | |
| --timeout=9 \ | |
| --numprocesses 1 \ | |
| --snapshot-details \ | |
| ${cov_params[@]} \ | |
| -o console_output_style=count \ | |
| --durations=0 \ | |
| --durations-min=10 \ | |
| -p no:sugar \ | |
| --exclude-warning-annotations \ | |
| --dburl=postgresql://postgres:[email protected]/homeassistant-test \ | |
| tests/components/history \ | |
| tests/components/logbook \ | |
| tests/components/recorder \ | |
| tests/components/sensor \ | |
| 2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt | |
| - name: Upload pytest output | |
| if: success() || failure() && steps.pytest-partial.conclusion == 'failure' | |
| uses: *actions-upload-artifact | |
| with: | |
| name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ | |
| steps.pytest-partial.outputs.postgresql }} | |
| path: pytest-*.txt | |
| overwrite: true | |
| - name: Upload coverage artifact | |
| if: needs.info.outputs.skip_coverage != 'true' | |
| uses: *actions-upload-artifact | |
| with: | |
| name: coverage-${{ matrix.python-version }}-${{ | |
| steps.pytest-partial.outputs.postgresql }} | |
| path: coverage.xml | |
| overwrite: true | |
| - *beautify-test-results | |
| - name: Upload test results artifact | |
| if: needs.info.outputs.skip_coverage != 'true' && !cancelled() | |
| uses: *actions-upload-artifact | |
| with: | |
| name: test-results-postgres-${{ matrix.python-version }}-${{ | |
| steps.pytest-partial.outputs.postgresql }} | |
| path: junit.xml | |
| - *check-dirty | |
| coverage-full: | |
| name: Upload test coverage to Codecov (full suite) | |
| runs-on: *runs-on-ubuntu | |
| needs: | |
| - info | |
| - pytest-full | |
| - pytest-postgres | |
| - pytest-mariadb | |
| timeout-minutes: 10 | |
| if: needs.info.outputs.skip_coverage != 'true' | |
| steps: | |
| - *checkout | |
| - name: Download all coverage artifacts | |
| uses: *actions-download-artifact | |
| with: | |
| pattern: coverage-* | |
| - name: Upload coverage to Codecov | |
| if: needs.info.outputs.test_full_suite == 'true' | |
| uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1 | |
| with: | |
| fail_ci_if_error: true | |
| flags: full-suite | |
| token: ${{ secrets.CODECOV_TOKEN }} | |
| pytest-partial: | |
| name: Run tests Python ${{ matrix.python-version }} (${{ matrix.group }}) | |
| runs-on: *runs-on-ubuntu | |
| needs: | |
| - info | |
| - base | |
| - gen-requirements-all | |
| - hassfest | |
| - lint-other | |
| - lint-ruff | |
| - lint-ruff-format | |
| - mypy | |
| if: | | |
| needs.info.outputs.lint_only != 'true' | |
| && needs.info.outputs.tests_glob | |
| && needs.info.outputs.test_full_suite == 'false' | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| python-version: *matrix-python | |
| group: *matrix-group | |
| steps: | |
| - *cache-restore-apt | |
| - name: Install additional OS dependencies | |
| timeout-minutes: 10 | |
| run: | | |
| sudo rm /etc/apt/sources.list.d/microsoft-prod.list | |
| sudo apt-get update \ | |
| -o Dir::Cache=${{ env.APT_CACHE_DIR }} \ | |
| -o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} | |
| sudo apt-get -y install \ | |
| -o Dir::Cache=${{ env.APT_CACHE_DIR }} \ | |
| -o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \ | |
| bluez \ | |
| ffmpeg \ | |
| libturbojpeg \ | |
| libgammu-dev \ | |
| libxml2-utils | |
| - *checkout | |
| - *setup-python-matrix | |
| - *cache-restore-python-matrix | |
| - *problem-matcher-python | |
| - *problem-matcher-pytest-slow | |
| - *compile-english-translations | |
| - name: Run pytest | |
| timeout-minutes: 10 | |
| id: pytest-partial | |
| shell: bash | |
| env: | |
| PYTHONDONTWRITEBYTECODE: 1 | |
| run: | | |
| . venv/bin/activate | |
| python --version | |
| set -o pipefail | |
| if [[ ! -f "tests/components/${{ matrix.group }}/__init__.py" ]]; then | |
| echo "::error:: missing file tests/components/${{ matrix.group }}/__init__.py" | |
| exit 1 | |
| fi | |
| cov_params=() | |
| if [[ "${{ needs.info.outputs.skip_coverage }}" != "true" ]]; then | |
| cov_params+=(--cov="homeassistant.components.${{ matrix.group }}") | |
| cov_params+=(--cov-report=xml) | |
| cov_params+=(--cov-report=term-missing) | |
| cov_params+=(--junitxml=junit.xml -o junit_family=legacy) | |
| fi | |
| python3 -b -X dev -m pytest \ | |
| -qq \ | |
| --timeout=9 \ | |
| --numprocesses auto \ | |
| --snapshot-details \ | |
| ${cov_params[@]} \ | |
| -o console_output_style=count \ | |
| --durations=0 \ | |
| --durations-min=1 \ | |
| -p no:sugar \ | |
| --exclude-warning-annotations \ | |
| tests/components/${{ matrix.group }} \ | |
| 2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt | |
| - name: Upload pytest output | |
| if: success() || failure() && steps.pytest-partial.conclusion == 'failure' | |
| uses: *actions-upload-artifact | |
| with: | |
| name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }} | |
| path: pytest-*.txt | |
| overwrite: true | |
| - name: Upload coverage artifact | |
| if: needs.info.outputs.skip_coverage != 'true' | |
| uses: *actions-upload-artifact | |
| with: | |
| name: coverage-${{ matrix.python-version }}-${{ matrix.group }} | |
| path: coverage.xml | |
| overwrite: true | |
| - *beautify-test-results | |
| - name: Upload test results artifact | |
| if: needs.info.outputs.skip_coverage != 'true' && !cancelled() | |
| uses: *actions-upload-artifact | |
| with: | |
| name: test-results-partial-${{ matrix.python-version }}-${{ matrix.group }} | |
| path: junit.xml | |
| - *check-dirty | |
| coverage-partial: | |
| name: Upload test coverage to Codecov (partial suite) | |
| if: needs.info.outputs.skip_coverage != 'true' | |
| runs-on: *runs-on-ubuntu | |
| timeout-minutes: 10 | |
| needs: | |
| - info | |
| - pytest-partial | |
| steps: | |
| - *checkout | |
| - name: Download all coverage artifacts | |
| uses: *actions-download-artifact | |
| with: | |
| pattern: coverage-* | |
| - name: Upload coverage to Codecov | |
| if: needs.info.outputs.test_full_suite == 'false' | |
| uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1 | |
| with: | |
| fail_ci_if_error: true | |
| token: ${{ secrets.CODECOV_TOKEN }} | |
| upload-test-results: | |
| name: Upload test results to Codecov | |
| runs-on: *runs-on-ubuntu | |
| needs: | |
| - info | |
| - pytest-partial | |
| - pytest-full | |
| - pytest-postgres | |
| - pytest-mariadb | |
| timeout-minutes: 10 | |
| # codecov/test-results-action currently doesn't support tokenless uploads | |
| # therefore we can't run it on forks | |
| if: | | |
| (github.event_name != 'pull_request' || !github.event.pull_request.head.repo.fork) | |
| && needs.info.outputs.skip_coverage != 'true' && !cancelled() | |
| steps: | |
| - name: Download all coverage artifacts | |
| uses: *actions-download-artifact | |
| with: | |
| pattern: test-results-* | |
| - name: Upload test results to Codecov | |
| uses: codecov/test-results-action@47f89e9acb64b76debcd5ea40642d25a4adced9f # v1.1.1 | |
| with: | |
| fail_ci_if_error: true | |
| verbose: true | |
| token: ${{ secrets.CODECOV_TOKEN }} |