Skip to content

Remove dynamic memory allocations in low-level matrix assembly functions #4347

Remove dynamic memory allocations in low-level matrix assembly functions

Remove dynamic memory allocations in low-level matrix assembly functions #4347

Workflow file for this run

name: CI (macOS)
on:
pull_request:
branches:
- main
# push:
# branches:
# - "main"
# - "**"
# tags:
# - "v*"
merge_group:
branches:
- main
workflow_dispatch:
jobs:
mac-os-build:
name: macOS Homebrew install and test
runs-on: macos-15
env:
PETSC_ARCH: arch-darwin-c-opt
PETSC_DIR: ${{ github.workspace }}/petsc
PETSC_VERSION: 3.23.6
steps:
- uses: actions/checkout@v5
- name: Load environment variables
run: |
cat .github/workflows/fenicsx-refs.env >> $GITHUB_ENV
echo DYLD_LIBRARY_PATH=/usr/local/lib >> $GITHUB_ENV
- name: Set up Python
uses: actions/setup-python@v6
id: cp3
with:
python-version: "3.12"
- name: Install Homebrew dependencies
run: |
brew install adios2 boost catch2 hdf5-mpi make ninja open-mpi pkgconf pugixml spdlog # FEniCS
brew install bison flex gfortran scalapack scotch # PETSc
- name: Install Python dependencies (petsc4py)
run: |
pip install cython setuptools wheel
pip install mpi4py numpy
- name: Cache PETSc and petsc4py
id: cache-petsc
uses: actions/cache@v4
with:
path: |
${{ env.PETSC_DIR }}
~/Library/Python/
key: ${{ runner.os }}-petsc-${{ env.PETSC_VERSION }}-${{ steps.cp3.outputs.python-version }}
- name: Install minimal PETSc and petsc4py
if: steps.cache-petsc.outputs.cache-hit != 'true'
run: |
export PATH="$(brew --prefix gfortran)/bin:$(brew --prefix bison)/bin:$PATH"
export PATH="$(brew --prefix make)/libexec/gnubin:$PATH"
git clone --depth 1 --branch v${PETSC_VERSION} https://gitlab.com/petsc/petsc.git petsc
cd petsc
./configure \
--with-64-bit-indices=no \
--with-debugging=no \
--with-fortran-bindings=no \
--with-scalar-type=real \
--with-shared-libraries \
--with-scalar-type=real \
--with-ptscotch-dir=$(brew --prefix scotch) \
--with-scalapack-dir=$(brew --prefix scalapack) \
--download-metis \
--download-parmetis \
--download-mumps \
--download-mumps-avoid-mpi-in-place
make all
cd src/binding/petsc4py
arch -arm64 pip -v install --user --no-build-isolation .
- name: Install FEniCSx dependencies
run: |
pip install git+https://github.com/${{ env.ufl_repository }}.git@${{ env.ufl_ref }}
pip install git+https://github.com/${{ env.basix_repository }}.git@${{ env.basix_ref }}
pip install git+https://github.com/${{ env.ffcx_repository }}.git@${{ env.ffcx_ref }}
- name: Configure (C++)
working-directory: cpp
run: cmake -B build -S . -Werror=dev --warn-uninitialized -G Ninja -DBUILD_TESTING=true
- name: Build and install (C++)
working-directory: cpp/build
run: |
cmake --build .
sudo cmake --install .
- name: Run tests (C++)
working-directory: cpp/build
run: |
mpiexec -np 1 ctest -V --output-on-failure -R unittests
mpiexec -np 3 ctest -V --output-on-failure -R unittests
- name: Build and install DOLFINx Python interface
run: |
pip install -r python/build-requirements.txt
pip install --check-build-dependencies --no-build-isolation 'python/[test]'
- name: Basic test
run: |
mpiexec -np 1 python -c "import dolfinx; from mpi4py import MPI; dolfinx.mesh.create_rectangle(comm=MPI.COMM_WORLD, points=((0, 0), (2, 1)), n=(32, 16))"
mpiexec -np 2 python -c "import dolfinx; from mpi4py import MPI; dolfinx.mesh.create_rectangle(comm=MPI.COMM_WORLD, points=((0, 0), (2, 1)), n=(32, 16))"
- name: Run tests (Python)
run: |
pip install pytest-xdist
mkdir -p ~/.config/dolfinx
echo '{ "cffi_extra_compile_args": ["-g0", "-O0" ] }' > ~/.config/dolfinx/dolfinx_jit_options.json
mpiexec -np 1 python -m pytest -n=auto --durations=50 python/test/unit/
mpiexec -np 3 python -m pytest python/test/unit/