Skip to content

Commit 6930829

Browse files
authored
Merge pull request #95 from gridap/hpddm
HPDDM support
2 parents 1cfc2f0 + 174b381 commit 6930829

File tree

10 files changed

+251
-9
lines changed

10 files changed

+251
-9
lines changed

.github/workflows/CI_libs.yml

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ jobs:
2424
runs-on: ${{ matrix.os }}
2525
env:
2626
P4EST_ROOT_DIR: "/opt/p4est/2.8.5/"
27-
JULIA_PETSC_LIBRARY: "/opt/petsc/3.22.4/lib/libpetsc"
27+
JULIA_PETSC_LIBRARY: "/opt/petsc/3.23.4/lib/libpetsc"
2828
strategy:
2929
fail-fast: false
3030
matrix:
@@ -67,7 +67,7 @@ jobs:
6767
run: |
6868
CURR_DIR=$(pwd)
6969
PACKAGE=petsc
70-
VERSION=3.22.4
70+
VERSION=3.23.4
7171
INSTALL_ROOT=/opt
7272
PETSC_INSTALL=$INSTALL_ROOT/$PACKAGE/$VERSION
7373
TAR_FILE=$PACKAGE-$VERSION.tar.gz
@@ -81,7 +81,8 @@ jobs:
8181
cd $SOURCES_DIR
8282
./configure --prefix=$PETSC_INSTALL --with-cc=mpicc --with-cxx=mpicxx --with-fc=mpif90 \
8383
--download-mumps --download-scalapack --download-parmetis --download-metis \
84-
--download-fblaslapack --download-ptscotch --with-debugging --with-x=0 --with-shared=1 \
84+
--download-fblaslapack --download-ptscotch --download-hpddm --download-slepc \
85+
--with-debugging --with-x=0 --with-shared=1 \
8586
--with-mpi=1 --with-64-bit-indices
8687
make
8788
make install

NEWS.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,12 @@ All notable changes to this project will be documented in this file.
55
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
66
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
77

8+
## [0.6.1] - 2025-07-25
9+
10+
### Added
11+
12+
- Added support for the `HPDDM` library, with a new solver `HPDDMLinearSolver`. Since PR[#95](https://github.com/gridap/GridapSolvers.jl/pull/95).
13+
814
## [0.6.0] - 2025-06-13
915

1016
### Added

Project.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
name = "GridapSolvers"
22
uuid = "6d3209ee-5e3c-4db7-a716-942eb12ed534"
33
authors = ["Santiago Badia <santiago.badia@monash.edu>", "Jordi Manyer <jordi.manyer@monash.edu>", "Alberto F. Martin <alberto.f.martin@anu.edu.au>", "Javier Principe <principe@cimne.upc.edu>"]
4-
version = "0.6.0"
4+
version = "0.6.1"
55

66
[deps]
77
AbstractTrees = "1520ce14-60c1-5f80-bbc7-55ef81b5835c"
@@ -19,8 +19,8 @@ SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
1919
SparseMatricesCSR = "a0a7dd2c-ebf4-11e9-1f05-cf50bc540ca1"
2020

2121
[weakdeps]
22-
GridapP4est = "c2c8e14b-f5fd-423d-9666-1dd9ad120af9"
2322
GridapPETSc = "bcdc36c2-0c3e-11ea-095a-c9dadae499f1"
23+
GridapP4est = "c2c8e14b-f5fd-423d-9666-1dd9ad120af9"
2424
IterativeSolvers = "42fd0dbc-a981-5370-80f2-aaf504508153"
2525
Pardiso = "46dd5b70-b6fb-5a00-ae2d-e8fea33afaf2"
2626

docs/src/Extensions/GridapPETSc.md

Lines changed: 22 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,10 +4,31 @@
44
CurrentModule = Base.get_extension(GridapSolvers,:GridapPETScExt)
55
```
66

7-
Building on top of [GridapPETSc.jl](https://github.com/gridap/GridapPETSc.jl), GridapSolvers provides specific solvers for some particularly complex PDEs:
7+
Building on top of [GridapPETSc.jl](https://github.com/gridap/GridapPETSc.jl), GridapSolvers provides examples of complex solvers one can build.
8+
9+
## Elasticity solver
810

911
```@docs
1012
PETScElasticitySolver
1113
PETScElasticitySolver(::FESpace)
14+
```
15+
16+
An example on how to use it on an elasticity problem can be found in ['test/Applications/Elasticity.jl'](https://github.com/gridap/GridapSolvers.jl/tree/main/test/Applications/Elasticity.jl).
17+
18+
## HPDDM solver
19+
20+
We also provide support for the [HPDDM library](https://github.com/hpddm/hpddm) through PETSc's [`PCHPDDM` preconditioner](https://petsc.org/main/manualpages/PC/PCHPDDM/):
21+
22+
```@docs
23+
HPDDMLinearSolver
24+
```
25+
26+
An example on how to use it on a Poisson problem can be found in ['test/ExtLibraries/drivers/HPDDMTests.jl'](https://github.com/gridap/GridapSolvers.jl/tree/main/test/ExtLibraries/drivers/HPDDMTests.jl).
27+
28+
## Caching PETSc solvers
29+
30+
To have zero allocations when solving linear systems, one needs to pre-allocate PETSc arrays for the solution and right-hand side. We provide a way to automate this process:
31+
32+
```@docs
1233
CachedPETScNS
1334
```

ext/GridapPETScExt/GridapPETScExt.jl

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,20 +11,24 @@ using Gridap.Arrays, Gridap.FESpaces, Gridap.MultiField
1111
using Gridap.Fields, Gridap.ReferenceFEs
1212

1313
using GridapDistributed
14+
1415
using PartitionedArrays
16+
using PartitionedArrays: getany
17+
1518
using GridapPETSc
19+
using GridapPETSc.PETSC
20+
using GridapPETSc: PETScLinearSolverNS
1621

1722
using GridapSolvers
1823
using GridapSolvers.MultilevelTools
1924
using GridapSolvers.SolverInterfaces
2025
using GridapSolvers.LinearSolvers
2126
using GridapSolvers.PatchBasedSmoothers
2227

23-
using PartitionedArrays: getany
24-
2528
include("PETScUtils.jl")
2629
include("PETScCaches.jl")
2730
include("ElasticitySolvers.jl")
31+
include("HPDDMLinearSolvers.jl")
2832
include("HipmairXuSolvers.jl")
2933

3034
end # module
Lines changed: 143 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,143 @@
1+
2+
struct _HPDDMLinearSolver <: LinearSolver
3+
ranks :: MPIArray
4+
mat :: PETScMatrix
5+
is :: PETScIndexSet
6+
ksp_setup :: Function
7+
pc_setup :: Function
8+
tols :: SolverTolerances{Float64}
9+
end
10+
11+
SolverInterfaces.get_solver_tolerances(s::_HPDDMLinearSolver) = s.tols
12+
13+
"""
14+
HPDDMLinearSolver(
15+
ranks::MPIArray,mat::PETScMatrix,is::PETScIndexSet[,ksp_setup[,pc_setup]];
16+
maxiter=500, atol=1.e-12, rtol=1.e-8
17+
)
18+
19+
Wrapper for a `PETScLinearSolver` preconditioned with the HPDDM library.
20+
21+
# Arguments
22+
23+
- `indices::MPIArray`: For each rank, the local-to-global index map for the matrix rows/cols.
24+
- `mats::MPIArray`: For each rank, the matrix for the local overlapping Neumann problem.
25+
- `ksp_setup::Function`: PETSc setup options for the KSP solver.
26+
- `pc_setup::Function`: Extra setup options for the PCHPDDM preconditioner.
27+
28+
The defaults for `ksp_setup` and `pc_setup` set options from the command line, using the
29+
following functions:
30+
31+
```julia
32+
function hpddm_default_setup_ksp(ksp::Ref{PETSC.KSP})
33+
@check_error_code GridapPETSc.PETSC.KSPSetFromOptions(ksp[])
34+
end
35+
36+
function hpddm_default_setup_pc(pc::Ref{PETSC.PC})
37+
@check_error_code PETSC.PCSetFromOptions(pc[])
38+
end
39+
```
40+
41+
To modify the default setup, you can pass your own functions (with the same signatures)
42+
as arguments to the constructor.
43+
"""
44+
function GridapSolvers.HPDDMLinearSolver(
45+
indices::MPIArray,mats::MPIArray,
46+
ksp_setup::Function = hpddm_default_setup_ksp,
47+
pc_setup::Function = hpddm_default_setup_pc;
48+
maxiter=500, atol=1.e-12, rtol=1.e-8
49+
)
50+
ranks = linear_indices(mats)
51+
is = PETScIndexSet(PartitionedArrays.getany(indices))
52+
mat = PETScMatrix(PartitionedArrays.getany(mats))
53+
tols = SolverTolerances{Float64}(;maxiter=maxiter,atol=atol,rtol=rtol)
54+
_HPDDMLinearSolver(ranks,mat,is,ksp_setup,pc_setup,tols)
55+
end
56+
57+
"""
58+
HPDDMLinearSolver(space::FESpace,biform::Function[,args...];kwargs...)
59+
60+
Creates a `HPDDMLinearSolver` from a finite element space and a bilinear form for the local overlapping
61+
Neumann problems. The extra arguments are the same as for the low-level constructor.
62+
63+
To have overlapping Neumann problems, the `Measure` has to be modified to include ghost cells.
64+
For instance, for a Poisson problem we would have:
65+
66+
```julia
67+
Ωg = Triangulation(with_ghost,model)
68+
dΩg = Measure(Ωg,qdegree)
69+
a(u,v) = ∫(∇(u)⋅∇(v))dΩg
70+
```
71+
"""
72+
function GridapSolvers.HPDDMLinearSolver(
73+
space::FESpace,biform::Function,
74+
ksp_setup::Function = hpddm_default_setup_ksp,
75+
pc_setup::Function = hpddm_default_setup_pc;
76+
kwargs...
77+
)
78+
assems = map(local_views(space)) do space
79+
SparseMatrixAssembler(
80+
SparseMatrixCSR{0,PetscScalar,PetscInt},Vector{PetscScalar},space,space
81+
)
82+
end
83+
indices, mats = subassemble_matrices(space,biform,assems)
84+
HPDDMLinearSolver(indices,mats,ksp_setup,pc_setup;kwargs...)
85+
end
86+
87+
function subassemble_matrices(space,biform,assems)
88+
89+
u, v = get_trial_fe_basis(space), get_fe_basis(space)
90+
data = collect_cell_matrix(space,space,biform(u,v))
91+
92+
indices = local_to_global(get_free_dof_ids(space))
93+
mats = map(assemble_matrix, assems, data)
94+
95+
return indices, mats
96+
end
97+
98+
struct HPDDMLinearSolverSS <: SymbolicSetup
99+
solver::_HPDDMLinearSolver
100+
end
101+
102+
function Algebra.symbolic_setup(solver::_HPDDMLinearSolver,mat::AbstractMatrix)
103+
HPDDMLinearSolverSS(solver)
104+
end
105+
106+
function Algebra.numerical_setup(ss::HPDDMLinearSolverSS,A::AbstractMatrix)
107+
B = convert(PETScMatrix,A)
108+
ns = PETScLinearSolverNS(A,B)
109+
@check_error_code PETSC.KSPCreate(B.comm,ns.ksp)
110+
@check_error_code PETSC.KSPSetOperators(ns.ksp[],ns.B.mat[],ns.B.mat[])
111+
hpddm_setup(ss.solver,ns.ksp)
112+
@check_error_code PETSC.KSPSetUp(ns.ksp[])
113+
GridapPETSc.Init(ns)
114+
end
115+
116+
function hpddm_default_setup_ksp(ksp)
117+
@check_error_code PETSC.KSPSetFromOptions(ksp[])
118+
end
119+
120+
function hpddm_default_setup_pc(pc)
121+
@check_error_code PETSC.PCSetFromOptions(pc[])
122+
end
123+
124+
function hpddm_setup(solver::_HPDDMLinearSolver,ksp)
125+
solver.ksp_setup(ksp)
126+
127+
tols = solver.tols
128+
rtol = PetscScalar(tols.rtol)
129+
atol = PetscScalar(tols.atol)
130+
dtol = PetscScalar(tols.dtol)
131+
maxits = PetscInt(tols.maxiter)
132+
@check_error_code PETSC.KSPSetTolerances(ksp[], rtol, atol, dtol, maxits)
133+
134+
pc = Ref{PETSC.PC}()
135+
mat, is = solver.mat.mat, solver.is.is
136+
@check_error_code PETSC.KSPGetPC(ksp[],pc)
137+
@check_error_code PETSC.PCSetType(pc[],PETSC.PCHPDDM)
138+
@check_error_code PETSC.PCHPDDMSetAuxiliaryMat(pc[],is[],mat[],C_NULL,C_NULL)
139+
@check_error_code PETSC.PCHPDDMHasNeumannMat(pc[],PETSC.PETSC_TRUE)
140+
@check_error_code PETSC.PCHPDDMSetSTShareSubKSP(pc[],PETSC.PETSC_TRUE)
141+
142+
solver.pc_setup(pc)
143+
end

src/extensions.jl

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,9 +22,11 @@ function P4estCartesianModelHierarchy end
2222
# GridapPETScExt
2323

2424
export PETScElasticitySolver
25+
export HPDDMLinearSolver
2526
export CachedPETScNS
2627

2728
function PETScElasticitySolver end
29+
function HPDDMLinearSolver end
2830
function CachedPETScNS end
2931

3032
# PardisoExt

test/Applications/Elasticity.jl

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
module PETScElasticitySolverTests
12

23
using Gridap
34
using Gridap.Geometry, Gridap.Algebra
@@ -50,3 +51,5 @@ function main(distribute,np)
5051

5152
uh = FEFunction(U,x)
5253
end
54+
55+
end # module

test/ExtLibraries/GridapPETScExtTests.jl

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,5 +3,11 @@ using PartitionedArrays
33
include("../Applications/Elasticity.jl")
44

55
with_mpi() do distribute
6-
main(distribute,(2,2))
6+
PETScElasticitySolverTests.main(distribute,(2,2))
7+
end
8+
9+
include("drivers/HPDDMTests.jl")
10+
11+
with_mpi() do distribute
12+
HPDDMTests.main(distribute,(2,2))
713
end
Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,56 @@
1+
2+
module HPDDMTests
3+
4+
using Test
5+
using Gridap
6+
using GridapDistributed
7+
using PartitionedArrays
8+
using SparseMatricesCSR, SparseArrays
9+
using GridapPETSc
10+
using GridapSolvers
11+
12+
function main(distribute,np)
13+
u(x) = x[1] + x[2]
14+
15+
ranks = distribute(LinearIndices((prod(np),)))
16+
model = CartesianDiscreteModel(ranks,np,(0,1,0,1),(16,16))
17+
18+
order = 1
19+
reffe = ReferenceFE(lagrangian,Float64,order)
20+
V = FESpace(model,reffe;dirichlet_tags="boundary")
21+
U = TrialFESpace(V,u)
22+
23+
# Global assembled problem
24+
qdegree = 2*(order-1)
25+
Ω = Triangulation(model)
26+
= Measure(Ω,qdegree)
27+
28+
f(x) = -Δ(u)(x)
29+
a(u,v) = ∫(∇(u)∇(v))dΩ
30+
l(v) = ∫(fv)dΩ
31+
32+
assem = SparseMatrixAssembler(
33+
SparseMatrixCSR{0,PetscScalar,PetscInt},Vector{PetscScalar},U,V
34+
)
35+
op = AffineFEOperator(a,l,U,V,assem)
36+
37+
# Overlapping Neumann problems require ghost cells in the measure
38+
Ωg = Triangulation(with_ghost,model)
39+
dΩg = Measure(Ωg,qdegree)
40+
a_g(u,v) = ∫(∇(u)∇(v))dΩg
41+
42+
options = "-ksp_error_if_not_converged true -ksp_converged_reason -ksp_monitor -pc_hpddm_levels_1_eps_nev 10 -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_define_subdomains"
43+
GridapPETSc.with(args=split(options)) do
44+
solver = HPDDMLinearSolver(V,a_g)
45+
uh = solve(solver,op)
46+
47+
eh = u - uh
48+
err_l2 = sqrt(sum(∫(eheh)dΩ))
49+
if i_am_main(ranks)
50+
@info "L2 error: $err_l2"
51+
@test err_l2 < 1e-6
52+
end
53+
end
54+
end
55+
56+
end

0 commit comments

Comments
 (0)