diff --git a/.github/dependabot.yml b/.github/dependabot.yml
index 92370eb4..c877d70a 100644
--- a/.github/dependabot.yml
+++ b/.github/dependabot.yml
@@ -18,9 +18,9 @@ updates:
     # typically except libraries that don't have a stable release yet (v0.x.x
     # branch), so we make some exceptions for them.
     # Major updates and dependencies excluded by the above groups are still
-    # managed, but they'll create one PR per dependency, as breaking is
-    # expected so it might need manual intervention.
-    # Finally we group some dependencies that are related to each other, and
+    # managed, but they'll create one PR per dependency, as breakage is
+    # expected, so it might need manual intervention.
+    # Finally, we group some dependencies that are related to each other, and
     # usually need to be updated together.
     groups:
       patch:
@@ -33,6 +33,7 @@ updates:
         update-types:
           - "minor"
         exclude-patterns:
+          - "async-solipsism"
           - "frequenz-repo-config*"
           - "markdown-callouts"
           - "mkdocs-gen-files"
@@ -40,6 +41,7 @@ updates:
           - "mkdocstrings*"
           - "pydoclint"
           - "pymdownx-superfence-filter-lines"
+          - "pytest-asyncio"
       # We group repo-config updates as it uses optional dependencies that are
       # considered different dependencies otherwise, and will create one PR for
       # each if we don't group them.
diff --git a/.github/workflows/ci-pr.yaml b/.github/workflows/ci-pr.yaml
new file mode 100644
index 00000000..2c74ec0c
--- /dev/null
+++ b/.github/workflows/ci-pr.yaml
@@ -0,0 +1,55 @@
+name: Test PR
+
+on:
+  pull_request:
+
+env:
+  # Please make sure this version is included in the `matrix`, as the
+  # `matrix` section can't use `env`, so it must be entered manually
+  DEFAULT_PYTHON_VERSION: '3.11'
+  # It would be nice to be able to also define a DEFAULT_UBUNTU_VERSION
+  # but sadly `env` can't be used either in `runs-on`.
+
+jobs:
+  nox:
+    name: Test with nox
+    runs-on: ubuntu-24.04
+
+    steps:
+      - name: Run nox
+        uses: frequenz-floss/gh-action-nox@v1.0.0
+        with:
+          python-version: "3.11"
+          nox-session: ci_checks_max
+
+  test-docs:
+    name: Test documentation website generation
+    runs-on: ubuntu-24.04
+    steps:
+      - name: Setup Git
+        uses: frequenz-floss/gh-action-setup-git@v1.0.0
+
+      - name: Fetch sources
+        uses: actions/checkout@v4
+        with:
+          submodules: true
+
+      - name: Setup Python
+        uses: frequenz-floss/gh-action-setup-python-with-deps@v1.0.0
+        with:
+          python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
+          dependencies: .[dev-mkdocs]
+
+      - name: Generate the documentation
+        env:
+          MIKE_VERSION: gh-${{ github.job }}
+        run: |
+          mike deploy $MIKE_VERSION
+          mike set-default $MIKE_VERSION
+
+      - name: Upload site
+        uses: actions/upload-artifact@v4
+        with:
+          name: docs-site
+          path: site/
+          if-no-files-found: error
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 38b4f22f..55dddda6 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -2,7 +2,6 @@ name: CI
 
 on:
   merge_group:
-  pull_request:
   push:
     # We need to explicitly include tags because otherwise when adding
     # `branches-ignore` it will only trigger on branches.
@@ -29,59 +28,27 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
+        arch:
+          - amd64
+          - arm
         os:
-          - ubuntu-20.04
+          - ubuntu-24.04
         python:
           - "3.11"
+          - "3.12"
         nox-session:
           # To speed things up a bit we use the special ci_checks_max session
           # that uses the same venv to run multiple linting sessions
           - "ci_checks_max"
           - "pytest_min"
-    runs-on: ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}${{ matrix.arch != 'amd64' && format('-{0}', matrix.arch) || '' }}
 
     steps:
-      - name: Setup Git
-        uses: frequenz-floss/gh-action-setup-git@v0.x.x
-
-      - name: Print environment (debug)
-        run: env
-
-      - name: Fetch sources
-        uses: actions/checkout@v4
-        with:
-          submodules: true
-
-      - name: Set up Python
-        uses: actions/setup-python@v5
+      - name: Run nox
+        uses: frequenz-floss/gh-action-nox@v1.0.0
         with:
           python-version: ${{ matrix.python }}
-          cache: 'pip'
-
-      - name: Install required Python packages
-        run: |
-          python -m pip install --upgrade pip
-          python -m pip install -e .[dev-noxfile]
-          pip freeze
-
-      - name: Create nox venv
-        env:
-          NOX_SESSION: ${{ matrix.nox-session }}
-        run: nox --install-only -e "$NOX_SESSION"
-
-      - name: Print pip freeze for nox venv (debug)
-        env:
-          NOX_SESSION: ${{ matrix.nox-session }}
-        run: |
-          . ".nox/$NOX_SESSION/bin/activate"
-          pip freeze
-          deactivate
-
-      - name: Run nox
-        env:
-          NOX_SESSION: ${{ matrix.nox-session }}
-        run: nox -R -e "$NOX_SESSION"
-        timeout-minutes: 10
+          nox-session: ${{ matrix.nox-session }}
 
   # This job runs if all the `nox` matrix jobs ran and succeeded.
   # It is only used to have a single job that we can require in branch
@@ -93,157 +60,34 @@ jobs:
     needs: ["nox"]
     # We skip this job only if nox was also skipped
     if: always() && needs.nox.result != 'skipped'
-    runs-on: ubuntu-20.04
+    runs-on: ubuntu-24.04
     env:
       DEPS_RESULT: ${{ needs.nox.result }}
     steps:
       - name: Check matrix job result
         run: test "$DEPS_RESULT" = "success"
 
-  nox-cross-arch:
-    name: Cross-arch tests with nox
-    if: github.event_name != 'pull_request'
-    strategy:
-      fail-fast: false
-      # Before adding new items to this matrix, make sure that a dockerfile
-      # exists for the combination of items in the matrix.
-      # Refer to .github/containers/nox-cross-arch/README.md to learn how to
-      # add and name new dockerfiles.
-      matrix:
-        arch:
-          - arm64
-        os:
-          - ubuntu-20.04
-        python:
-          - "3.11"
-        nox-session:
-          - "pytest_min"
-          - "pytest_max"
-    runs-on: ${{ matrix.os }}
-
-    steps:
-      - name: Setup Git
-        uses: frequenz-floss/gh-action-setup-git@v0.x.x
-
-      - name: Fetch sources
-        uses: actions/checkout@v4
-        with:
-          submodules: true
-
-      - name: Set up QEMU
-        uses: docker/setup-qemu-action@v3
-        with:
-          platforms: linux/${{ matrix.arch }}
-
-      - name: Set up Docker Buildx
-        uses: docker/setup-buildx-action@v3
-
-      # This is a workaround to prevent the cache from growing indefinitely.
-      # https://docs.docker.com/build/ci/github-actions/cache/#local-cache
-      # https://github.com/docker/build-push-action/issues/252
-      # https://github.com/moby/buildkit/issues/1896
-      - name: Cache container layers
-        uses: actions/cache@v4
-        with:
-          path: /tmp/.buildx-cache
-          key: ${{ runner.os }}-buildx-nox-${{ matrix.arch }}-${{ matrix.os }}-${{ matrix.python }}
-
-      - name: Build image
-        uses: docker/build-push-action@v6
-        with:
-          context: .github/containers/nox-cross-arch
-          file: .github/containers/nox-cross-arch/${{ matrix.arch }}-${{ matrix.os }}-python-${{ matrix.python }}.Dockerfile
-          platforms: linux/${{ matrix.arch }}
-          tags: localhost/nox-cross-arch:latest
-          push: false
-          load: true
-          cache-from: type=local,src=/tmp/.buildx-cache
-          cache-to: type=local,dest=/tmp/.buildx-cache-new,mode=max
-
-      # Refer to the workaround mentioned above
-      - name: Move cache
-        run: |
-          rm -rf /tmp/.buildx-cache
-          mv /tmp/.buildx-cache-new /tmp/.buildx-cache
-
-      # Cache pip downloads
-      - name: Cache pip downloads
-        uses: actions/cache@v4
-        with:
-          path: /tmp/pip-cache
-          key: nox-${{ matrix.nox-session }}-${{ matrix.arch }}-${{ matrix.os }}-${{ matrix.python }}-${{ hashFiles('pyproject.toml') }}
-
-      # This ensures that the docker container has access to the pip cache.
-      # Changing the user in the docker-run step causes it to fail due to
-      # incorrect permissions. Setting the ownership of the pip cache to root
-      # before running is a workaround to this issue.
-      - name: Set pip cache owners to root for docker
-        run: if [[ -e /tmp/pip-cache ]]; then sudo chown -R root:root /tmp/pip-cache; fi
-
-      - name: Run nox
-        run: |
-          docker run \
-            --rm \
-            -v $(pwd):/${{ github.workspace }} \
-            -v /tmp/pip-cache:/root/.cache/pip \
-            -w ${{ github.workspace }} \
-            --net=host \
-            --platform linux/${{ matrix.arch }} \
-            localhost/nox-cross-arch:latest \
-            bash -c "pip install -e .[dev-noxfile]; nox --install-only -e ${{ matrix.nox-session }}; pip freeze; nox -e ${{ matrix.nox-session }}"
-        timeout-minutes: 30
-
-      # This ensures that the runner has access to the pip cache.
-      - name: Reset pip cache ownership
-        if: always()
-        run: sudo chown -R $USER:$USER /tmp/pip-cache
-
-  # This job runs if all the `nox-cross-arch` matrix jobs ran and succeeded.
-  # As the `nox-all` job, its main purpose is to provide a single point of
-  # reference in branch protection rules, similar to how `nox-all` operates.
-  # However, there's a crucial difference: the `nox-cross-arch` job is omitted
-  # in PRs. Without the `nox-cross-arch-all` job, the inner matrix wouldn't be
-  # expanded in such scenarios. This would lead to the CI indefinitely waiting
-  # for these jobs to complete due to the branch protection rules, essentially
-  # causing it to hang. This behavior is tied to a recognized GitHub matrices
-  # issue when certain jobs are skipped. For a deeper understanding, refer to:
-  # https://github.com/orgs/community/discussions/9141
-  nox-cross-arch-all:
-    # The job name should match the name of the `nox-cross-arch` job.
-    name: Cross-arch tests with nox
-    needs: ["nox-cross-arch"]
-    # We skip this job only if nox-cross-arch was also skipped
-    if: always() && needs.nox-cross-arch.result != 'skipped'
-    runs-on: ubuntu-20.04
-    env:
-      DEPS_RESULT: ${{ needs.nox-cross-arch.result }}
-    steps:
-      - name: Check matrix job result
-        run: test "$DEPS_RESULT" = "success"
-
   build:
     name: Build distribution packages
-    runs-on: ubuntu-20.04
+    # Since this is a pure Python package, we only need to build it once. If it
+    # had any architecture specific code, we would need to build it for each
+    # architecture.
+    runs-on: ubuntu-24.04
+
     steps:
       - name: Setup Git
-        uses: frequenz-floss/gh-action-setup-git@v0.x.x
+        uses: frequenz-floss/gh-action-setup-git@v1.0.0
 
       - name: Fetch sources
         uses: actions/checkout@v4
         with:
           submodules: true
 
-      - name: Set up Python
-        uses: actions/setup-python@v5
+      - name: Setup Python
+        uses: frequenz-floss/gh-action-setup-python-with-deps@v1.0.0
         with:
           python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
-          cache: 'pip'
-
-      - name: Install required Python packages
-        run: |
-          python -m pip install -U pip
-          python -m pip install -U build
-          pip freeze
+          dependencies: build
 
       - name: Build the source and binary distribution
         run: python -m build
@@ -256,15 +100,27 @@ jobs:
           if-no-files-found: error
 
   test-installation:
-    name: Test package installation in different architectures
+    name: Test package installation
     needs: ["build"]
-    runs-on: ubuntu-20.04
+    strategy:
+      fail-fast: false
+      matrix:
+        arch:
+          - amd64
+          - arm
+        os:
+          - ubuntu-24.04
+        python:
+          - "3.11"
+          - "3.12"
+    runs-on: ${{ matrix.os }}${{ matrix.arch != 'amd64' && format('-{0}', matrix.arch) || '' }}
+
     steps:
       - name: Setup Git
-        uses: frequenz-floss/gh-action-setup-git@v0.x.x
+        uses: frequenz-floss/gh-action-setup-git@v1.0.0
 
-      - name: Fetch sources
-        uses: actions/checkout@v4
+      - name: Print environment (debug)
+        run: env
 
       - name: Download package
         uses: actions/download-artifact@v4
@@ -272,45 +128,64 @@ jobs:
           name: dist-packages
           path: dist
 
-      - name: Set up QEMU
-        uses: docker/setup-qemu-action@v3
-
-      - name: Set up docker-buildx
-        uses: docker/setup-buildx-action@v3
+      # This is necessary for the `pip` caching in the setup-python action to work
+      - name: Fetch the pyproject.toml file for this action hash
+        env:
+          GH_TOKEN: ${{ github.token }}
+          REPO: ${{ github.repository }}
+          REF: ${{ github.sha }}
+        run: |
+          set -ux
+          gh api \
+              -X GET \
+              -H "Accept: application/vnd.github.raw" \
+              "/repos/$REPO/contents/pyproject.toml?ref=$REF" \
+            > pyproject.toml
 
-      - name: Test Installation
-        uses: docker/build-push-action@v6
+      - name: Setup Python
+        uses: frequenz-floss/gh-action-setup-python-with-deps@v1.0.0
         with:
-          context: .
-          file: .github/containers/test-installation/Dockerfile
-          platforms: linux/amd64,linux/arm64
-          tags: localhost/test-installation
-          push: false
+          python-version: ${{ matrix.python }}
+          dependencies: dist/*.whl
+
+      - name: Print installed packages (debug)
+        run: python -m pip freeze
+
+  # This job runs if all the `test-installation` matrix jobs ran and succeeded.
+  # It is only used to have a single job that we can require in branch
+  # protection rules, so we don't have to update the protection rules each time
+  # we add or remove a job from the matrix.
+  test-installation-all:
+    # The job name should match the name of the `test-installation` job.
+    name: Test package installation
+    needs: ["test-installation"]
+    # We skip this job only if test-installation was also skipped
+    if: always() && needs.test-installation.result != 'skipped'
+    runs-on: ubuntu-24.04
+    env:
+      DEPS_RESULT: ${{ needs.test-installation.result }}
+    steps:
+      - name: Check matrix job result
+        run: test "$DEPS_RESULT" = "success"
 
   test-docs:
     name: Test documentation website generation
     if: github.event_name != 'push'
-    runs-on: ubuntu-20.04
+    runs-on: ubuntu-24.04
     steps:
       - name: Setup Git
-        uses: frequenz-floss/gh-action-setup-git@v0.x.x
+        uses: frequenz-floss/gh-action-setup-git@v1.0.0
 
       - name: Fetch sources
         uses: actions/checkout@v4
         with:
           submodules: true
 
-      - name: Set up Python
-        uses: actions/setup-python@v5
+      - name: Setup Python
+        uses: frequenz-floss/gh-action-setup-python-with-deps@v1.0.0
         with:
           python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
-          cache: 'pip'
-
-      - name: Install build dependencies
-        run: |
-          python -m pip install -U pip
-          python -m pip install .[dev-mkdocs]
-          pip freeze
+          dependencies: .[dev-mkdocs]
 
       - name: Generate the documentation
         env:
@@ -328,31 +203,25 @@ jobs:
 
   publish-docs:
     name: Publish documentation website to GitHub pages
-    needs: ["nox-all", "nox-cross-arch-all", "test-installation"]
+    needs: ["nox-all", "test-installation-all"]
     if: github.event_name == 'push'
-    runs-on: ubuntu-20.04
+    runs-on: ubuntu-24.04
     permissions:
       contents: write
     steps:
       - name: Setup Git
-        uses: frequenz-floss/gh-action-setup-git@v0.x.x
+        uses: frequenz-floss/gh-action-setup-git@v1.0.0
 
       - name: Fetch sources
         uses: actions/checkout@v4
         with:
           submodules: true
 
-      - name: Set up Python
-        uses: actions/setup-python@v5
+      - name: Setup Python
+        uses: frequenz-floss/gh-action-setup-python-with-deps@v1.0.0
         with:
           python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
-          cache: 'pip'
-
-      - name: Install build dependencies
-        run: |
-          python -m pip install -U pip
-          python -m pip install .[dev-mkdocs]
-          pip freeze
+          dependencies: .[dev-mkdocs]
 
       - name: Calculate and check version
         id: mike-version
@@ -407,7 +276,7 @@ jobs:
       # discussions to create the release announcement in the discussion forums
       contents: write
       discussions: write
-    runs-on: ubuntu-20.04
+    runs-on: ubuntu-24.04
     steps:
       - name: Download distribution files
         uses: actions/download-artifact@v4
@@ -449,7 +318,7 @@ jobs:
   publish-to-pypi:
     name: Publish packages to PyPI
     needs: ["create-github-release"]
-    runs-on: ubuntu-20.04
+    runs-on: ubuntu-24.04
     permissions:
       # For trusted publishing. See:
       # https://blog.pypi.org/posts/2023-04-20-introducing-trusted-publishers/
diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md
index 96a0240b..2d97259b 100644
--- a/RELEASE_NOTES.md
+++ b/RELEASE_NOTES.md
@@ -6,7 +6,7 @@
 
 ## Upgrading
 
-
+- Some minimal dependencies have been bumped, so you might need to adjust your dependencies accordingly.
 
 ## New Features
 
diff --git a/docs/_scripts/macros.py b/docs/_scripts/macros.py
index 85600826..e7bc762d 100644
--- a/docs/_scripts/macros.py
+++ b/docs/_scripts/macros.py
@@ -3,79 +3,18 @@
 
 """This module defines macros for use in Markdown files."""
 
-from typing import Any
-
-import markdown as md
+from frequenz.repo.config.mkdocs.mkdocstrings_macros import hook_env_with_everything
 from griffe import ModulesCollection, Object
-from markdown.extensions import toc
-from mkdocs_macros import plugin as macros
+from mkdocs_macros.plugin import MacrosPlugin
 from mkdocstrings_handlers.python.handler import PythonHandler
 
-_CODE_ANNOTATION_MARKER: str = (
-    r''
-    r''
-    r''
-    r""
-    r""
-)
-
-
-def _slugify(text: str) -> str:
-    """Slugify a text.
-
-    Args:
-        text: The text to slugify.
-
-    Returns:
-        The slugified text.
-    """
-    return toc.slugify_unicode(text, "-")
-
-
-def _hook_macros_plugin(env: macros.MacrosPlugin) -> None:
-    """Integrate the `mkdocs-macros` plugin into `mkdocstrings`.
-
-    This is a temporary workaround to make `mkdocs-macros` work with
-    `mkdocstrings` until a proper `mkdocs-macros` *pluglet* is available. See
-    https://github.com/mkdocstrings/mkdocstrings/issues/615 for details.
-
-    Args:
-        env: The environment to hook the plugin into.
-    """
-    # get mkdocstrings' Python handler
-    python_handler = env.conf["plugins"]["mkdocstrings"].get_handler("python")
-
-    # get the `update_env` method of the Python handler
-    update_env = python_handler.update_env
-
-    # override the `update_env` method of the Python handler
-    def patched_update_env(markdown: md.Markdown, config: dict[str, Any]) -> None:
-        update_env(markdown, config)
-
-        # get the `convert_markdown` filter of the env
-        convert_markdown = python_handler.env.filters["convert_markdown"]
 
-        # build a chimera made of macros+mkdocstrings
-        def render_convert(markdown: str, *args: Any, **kwargs: Any) -> Any:
-            return convert_markdown(env.render(markdown), *args, **kwargs)
-
-        # patch the filter
-        python_handler.env.filters["convert_markdown"] = render_convert
-
-    # patch the method
-    python_handler.update_env = patched_update_env
-
-
-def define_env(env: macros.MacrosPlugin) -> None:
+def define_env(env: MacrosPlugin) -> None:
     """Define the hook to create macro functions for use in Markdown.
 
     Args:
         env: The environment to define the macro functions in.
     """
-    # A variable to easily show an example code annotation from mkdocs-material.
-    # https://squidfunk.github.io/mkdocs-material/reference/code-blocks/#adding-annotations
-    env.variables["code_annotation_marker"] = _CODE_ANNOTATION_MARKER
-
     python_handler = env.conf["plugins"]["mkdocstrings"].get_handler("python")
     assert isinstance(python_handler, PythonHandler)
 
@@ -108,9 +47,10 @@ def docstring_summary(symbol: str) -> str:
         """
         docstring = _get_docstring(symbol)
         summary = docstring.splitlines(keepends=False)[0]
-        return python_handler.do_convert_markdown(
+        # The python_handler is untyped here, so ignore the type
+        return python_handler.do_convert_markdown(  # type: ignore[no-any-return]
             summary, heading_level=1, strip_paragraph=True
         )
 
-    # This hook needs to be done at the end of the `define_env` function.
-    _hook_macros_plugin(env)
+    # This must be at the end to enable all standard features
+    hook_env_with_everything(env)
diff --git a/pyproject.toml b/pyproject.toml
index 0bbd312b..325f74a0 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@
 requires = [
   "setuptools == 78.1.0",
   "setuptools_scm[toml] == 8.2.0",
-  "frequenz-repo-config[lib] == 0.11.0",
+  "frequenz-repo-config[lib] == 0.13.1",
 ]
 build-backend = "setuptools.build_meta"
 
@@ -26,7 +26,7 @@ classifiers = [
 ]
 requires-python = ">= 3.11, < 4"
 dependencies = [
-  "typing-extensions >= 4.5.0, < 5",
+  "typing-extensions >= 4.6.0, < 5",
   "watchfiles >= 0.15.0, < 1.1.0",
 ]
 dynamic = ["version"]
@@ -39,7 +39,7 @@ email = "floss@frequenz.com"
 dev-flake8 = [
   "flake8 == 7.2.0",
   "flake8-docstrings == 1.7.0",
-  "flake8-pyproject == 1.2.3",  # For reading the flake8 config from pyproject.toml
+  "flake8-pyproject == 1.2.3", # For reading the flake8 config from pyproject.toml
   "pydoclint == 0.6.5",
   "pydocstyle == 6.3.0",
 ]
@@ -47,7 +47,7 @@ dev-formatting = ["black == 25.1.0", "isort == 6.0.1"]
 dev-mkdocs = [
   "Markdown == 3.7",
   "black == 25.1.0",
-  "frequenz-repo-config[lib] == 0.11.0",
+  "frequenz-repo-config[lib] == 0.13.1",
   "markdown-callouts == 0.4.0",
   "markdown-svgbob == 202406.1023",
   "mike == 2.1.3",
@@ -56,8 +56,8 @@ dev-mkdocs = [
   "mkdocs-literate-nav == 0.6.2",
   "mkdocs-macros-plugin == 1.3.7",
   "mkdocs-material == 9.6.11",
-  "mkdocstrings[python] == 0.27.0",
-  "mkdocstrings-python == 1.13.0",
+  "mkdocstrings[python] == 0.29.0",
+  "mkdocstrings-python == 1.16.2",
   "pymdownx-superfence-filter-lines == 0.1.0",
 ]
 dev-mypy = [
@@ -66,7 +66,7 @@ dev-mypy = [
   "mypy == 1.15.0",
   "types-Markdown == 3.7.0.20250322",
 ]
-dev-noxfile = ["nox == 2025.2.9", "frequenz-repo-config[lib] == 0.11.0"]
+dev-noxfile = ["nox == 2025.2.9", "frequenz-repo-config[lib] == 0.13.1"]
 dev-pylint = [
   # For checking the noxfile, docs/ script, and tests
   "frequenz-channels[dev-mkdocs,dev-noxfile,dev-pytest]",
@@ -74,7 +74,7 @@ dev-pylint = [
 ]
 dev-pytest = [
   "async-solipsism == 0.7",
-  "frequenz-repo-config[extra-lint-examples] == 0.11.0",
+  "frequenz-repo-config[extra-lint-examples] == 0.13.1",
   "hypothesis == 6.130.6",
   "pytest == 8.3.5",
   "pytest-asyncio == 0.26.0",
@@ -148,6 +148,7 @@ disable = [
 ]
 
 [tool.pytest.ini_options]
+addopts = "-W=all -Werror -Wdefault::DeprecationWarning -Wdefault::PendingDeprecationWarning -vv"
 testpaths = ["tests", "src"]
 asyncio_mode = "auto"
 asyncio_default_fixture_loop_scope = "function"
diff --git a/tests/test_select_integration.py b/tests/test_select_integration.py
index 52779fc1..e0618f58 100644
--- a/tests/test_select_integration.py
+++ b/tests/test_select_integration.py
@@ -9,7 +9,7 @@ class at a time.
 """
 
 import asyncio
-from collections.abc import AsyncIterator, Iterator
+from collections.abc import AsyncIterator
 from typing import Any
 
 import async_solipsism
@@ -33,17 +33,11 @@ class TestSelect:
     recv1: Event
     recv2: Event
     recv3: Event
-    loop: async_solipsism.EventLoop
 
     @pytest.fixture(autouse=True)
-    def event_loop(
-        self, request: pytest.FixtureRequest
-    ) -> Iterator[async_solipsism.EventLoop]:
-        """Replace the loop with one that doesn't interact with the outside world."""
-        loop = async_solipsism.EventLoop()
-        request.cls.loop = loop
-        yield loop
-        loop.close()
+    def event_loop_policy(self) -> async_solipsism.EventLoopPolicy:
+        """Return an event loop policy that uses the async solipsism event loop."""
+        return async_solipsism.EventLoopPolicy()
 
     @pytest.fixture()
     async def start_run_ordered_sequence(self) -> AsyncIterator[asyncio.Task[None]]:
@@ -92,10 +86,16 @@ def assert_received_from(
         assert selected.exception is None
         assert not selected.was_stopped
         if expected_pending_tasks > 0:
-            assert len(asyncio.all_tasks(self.loop)) == expected_pending_tasks
+            assert (
+                len(asyncio.all_tasks(asyncio.get_event_loop()))
+                == expected_pending_tasks
+            )
         elif expected_pending_tasks < 0:
-            assert len(asyncio.all_tasks(self.loop)) > expected_pending_tasks
-        assert self.loop.time() == at_time
+            assert (
+                len(asyncio.all_tasks(asyncio.get_event_loop()))
+                > expected_pending_tasks
+            )
+        assert asyncio.get_event_loop().time() == at_time
 
     def assert_receiver_stopped(
         self,
@@ -125,10 +125,16 @@ def assert_receiver_stopped(
         assert isinstance(selected.exception, ReceiverStoppedError)
         assert selected.exception.receiver is receiver
         if expected_pending_tasks > 0:
-            assert len(asyncio.all_tasks(self.loop)) == expected_pending_tasks
+            assert (
+                len(asyncio.all_tasks(asyncio.get_event_loop()))
+                == expected_pending_tasks
+            )
         elif expected_pending_tasks < 0:
-            assert len(asyncio.all_tasks(self.loop)) > expected_pending_tasks
-        assert self.loop.time() == at_time
+            assert (
+                len(asyncio.all_tasks(asyncio.get_event_loop()))
+                > expected_pending_tasks
+            )
+        assert asyncio.get_event_loop().time() == at_time
 
     # We use the loop time (and the sleeps in the run_ordered_sequence method) mainly to
     # ensure we are processing the events in the correct order and we are really
@@ -362,11 +368,11 @@ async def test_multiple_ready(
         Also test that the loop waits forever if there are no more receivers ready.
         """
         received: set[str] = set()
-        last_time: float = self.loop.time()
+        last_time: float = asyncio.get_event_loop().time()
         try:
             async with asyncio.timeout(15):
                 async for selected in select(self.recv1, self.recv2, self.recv3):
-                    now = self.loop.time()
+                    now = asyncio.get_event_loop().time()
                     if now != last_time:  # Only check when there was a jump in time
                         match now:
                             case 1:
@@ -401,7 +407,7 @@ async def test_multiple_ready(
                     else:
                         assert False, "Should not reach this point"
         except asyncio.TimeoutError:
-            assert self.loop.time() == 15
+            assert asyncio.get_event_loop().time() == 15
             # This happened after time == 3, but the loop never resumes because
             # there is nothing ready, so we need to check it after the timeout.
             assert received == {
diff --git a/tests/test_timer.py b/tests/test_timer.py
index cad7c4d9..176b98b6 100644
--- a/tests/test_timer.py
+++ b/tests/test_timer.py
@@ -58,6 +58,10 @@ def _assert_tick_is_aligned(
     assert (next_tick_time - scheduled_tick_time) % interval == pytest.approx(0.0)
 
 
+# https://github.com/frequenz-floss/frequenz-channels-python/issues/405
+@pytest.mark.filterwarnings(
+    r"default:Exception ignored in.  None:
 def test_timer_construction_no_async() -> None:
     """Test the construction outside of async (using a custom loop)."""
     loop = async_solipsism.EventLoop()
-    timer = Timer(timedelta(seconds=1.0), TriggerAllMissed(), loop=loop)
-    assert timer.interval == timedelta(seconds=1.0)
-    assert isinstance(timer.missed_tick_policy, TriggerAllMissed)
-    assert timer.loop is loop
-    assert timer.is_running is True
+    try:
+        timer = Timer(timedelta(seconds=1.0), TriggerAllMissed(), loop=loop)
+        assert timer.interval == timedelta(seconds=1.0)
+        assert isinstance(timer.missed_tick_policy, TriggerAllMissed)
+        assert timer.loop is loop
+        assert timer.is_running is True
+    finally:
+        loop.close()
 
 
 def test_timer_construction_no_event_loop() -> None:
diff --git a/tests/test_timer_integration.py b/tests/test_timer_integration.py
index 27e028cb..92f248f4 100644
--- a/tests/test_timer_integration.py
+++ b/tests/test_timer_integration.py
@@ -13,11 +13,17 @@
 from frequenz.channels.timer import SkipMissedAndDrift, Timer
 
 
+@pytest.fixture(autouse=True)
+def event_loop_policy() -> async_solipsism.EventLoopPolicy:
+    """Return an event loop policy that uses the async solipsism event loop."""
+    return async_solipsism.EventLoopPolicy()
+
+
 @pytest.mark.integration
-async def test_timer_timeout_reset(
-    event_loop: async_solipsism.EventLoop,  # pylint: disable=redefined-outer-name
-) -> None:
+@pytest.mark.asyncio(loop_scope="function")
+async def test_timer_timeout_reset() -> None:
     """Test that the receiving is properly adjusted after a reset."""
+    event_loop = asyncio.get_running_loop()
 
     async def timer_wait(timer: Timer) -> None:
         await timer.receive()