|
61 | 61 |
|
62 | 62 | function Algebra.solve!(x::PETScVector,ns::PETScLinearSolverNS,b::PETScVector) |
63 | 63 | @check_error_code PETSC.KSPSolve(ns.ksp[],b.vec[],x.vec[]) |
64 | | - x |
| 64 | + return x |
65 | 65 | end |
66 | 66 |
|
67 | 67 | function Algebra.solve!(x::PETScVector,ns::PETScLinearSolverNS,b::AbstractVector) |
68 | | - if (x.comm != MPI.COMM_SELF) |
69 | | - gridap_petsc_gc() # Do garbage collection of PETSc objects |
70 | | - end |
| 68 | + # Jordi: Somehow, I think this destroys PETSc objects that are |
| 69 | + # still in use. This then leads to a PETSc error 62 when calling KSPSolve. |
| 70 | + # Instead, I have added GridapPETSc.Finalize(...) calls for the specific PETSc |
| 71 | + # objects that we are creating internally. |
| 72 | + # |
| 73 | + # if (x.comm != MPI.COMM_SELF) |
| 74 | + # gridap_petsc_gc() # Do garbage collection of PETSc objects |
| 75 | + # end |
71 | 76 |
|
72 | 77 | B = convert(PETScVector,b) |
73 | 78 | solve!(x,ns,B) |
74 | | - x |
| 79 | + GridapPETSc.Finalize(B) |
| 80 | + return x |
75 | 81 | end |
76 | 82 |
|
77 | | -function Algebra.solve!(x::Vector{PetscScalar},ns::PETScLinearSolverNS,b::AbstractVector) |
| 83 | +function Algebra.solve!(x::AbstractVector,ns::PETScLinearSolverNS,b::AbstractVector) |
78 | 84 | X = convert(PETScVector,x) |
79 | 85 | solve!(X,ns,b) |
80 | | - x |
| 86 | + copy!(x,X) |
| 87 | + GridapPETSc.Finalize(X) |
| 88 | + return x |
81 | 89 | end |
82 | 90 |
|
83 | | -function Algebra.solve!(x::AbstractVector,ns::PETScLinearSolverNS,b::AbstractVector) |
84 | | - X = convert(Vector{PetscScalar},x) |
| 91 | +# When x is a Vector{PetscScalar}, the memory is aliased with the PETSc Vec object, i.e |
| 92 | +# we do not need to copy the data back into x. |
| 93 | +function Algebra.solve!(x::Vector{PetscScalar},ns::PETScLinearSolverNS,b::AbstractVector) |
| 94 | + X = convert(PETScVector,x) |
85 | 95 | solve!(X,ns,b) |
86 | | - x .= X |
87 | | - x |
| 96 | + return x |
88 | 97 | end |
89 | 98 |
|
| 99 | +# In the case of PVectors, we need to ensure that ghost layouts match. In the case they |
| 100 | +# do not, we have to create a new vector and copy (which is less efficient, but necessary). |
90 | 101 | function Algebra.solve!(x::PVector,ns::PETScLinearSolverNS,b::PVector) |
91 | | - X = similar(b,(axes(ns.A)[2],)) |
92 | | - B = similar(b,(axes(ns.A)[2],)) |
93 | | - copy!(X,x) |
94 | | - copy!(B,b) |
95 | | - Y = convert(PETScVector,X) |
96 | | - solve!(Y,ns,B) |
97 | | - copy!(x,Y) |
98 | | - x |
| 102 | + rows, cols = axes(ns.A) |
| 103 | + if partition(axes(x,1)) !== partition(cols) |
| 104 | + y = pzeros(PetscScalar,partition(cols)) |
| 105 | + copy!(y,x) |
| 106 | + else |
| 107 | + y = x |
| 108 | + end |
| 109 | + if partition(axes(b,1)) !== partition(rows) |
| 110 | + c = pzeros(PetscScalar,partition(rows)) |
| 111 | + copy!(c,b) |
| 112 | + else |
| 113 | + c = b |
| 114 | + end |
| 115 | + |
| 116 | + X = convert(PETScVector,y) |
| 117 | + solve!(X,ns,c) |
| 118 | + copy!(x,X) |
| 119 | + GridapPETSc.Finalize(X) |
| 120 | + return x |
99 | 121 | end |
100 | 122 |
|
101 | 123 | function Algebra.numerical_setup!(ns::PETScLinearSolverNS,A::AbstractMatrix) |
102 | 124 | ns.A = A |
103 | 125 | ns.B = convert(PETScMatrix,A) |
104 | 126 | @check_error_code PETSC.KSPSetOperators(ns.ksp[],ns.B.mat[],ns.B.mat[]) |
105 | 127 | @check_error_code PETSC.KSPSetUp(ns.ksp[]) |
106 | | - ns |
| 128 | + return ns |
107 | 129 | end |
0 commit comments