Skip to content

Commit 86beacf

Browse files
committed
Add needs_square_A trait
1 parent a880003 commit 86beacf

File tree

3 files changed

+48
-1
lines changed

3 files changed

+48
-1
lines changed

Project.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
name = "LinearSolve"
22
uuid = "7ed4a6bd-45f5-4d41-b270-4a48e9bafcae"
33
authors = ["SciML"]
4-
version = "2.11.1"
4+
version = "2.12.0"
55

66
[deps]
77
ArrayInterface = "4fba245c-0d91-5ea0-9b3e-6abc04ee57a9"

src/LinearSolve.jl

Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -143,6 +143,46 @@ end
143143
include("factorization_sparse.jl")
144144
end
145145

146+
# Solver Specific Traits
147+
## Needs Square Matrix
148+
"""
149+
needs_square_A(alg)
150+
151+
Returns `true` if the algorithm requires a square matrix.
152+
153+
Note that this checks if the implementation of the algorithm needs a square matrix by
154+
trying to solve an underdetermined system. It is recommended to add a dispatch to this
155+
function for custom algorithms!
156+
"""
157+
needs_square_A(::Nothing) = false # Linear Solve automatically will use a correct alg!
158+
function needs_square_A(alg::SciMLLinearSolveAlgorithm)
159+
try
160+
A = [1.0 2.0;
161+
3.0 4.0;
162+
5.0 6.0]
163+
b = ones(Float64, 3)
164+
solve(LinearProblem(A, b), alg)
165+
return false
166+
catch err
167+
return true
168+
end
169+
end
170+
for alg in (:QRFactorization, :FastQRFactorization, :NormalCholeskyFactorization,
171+
:NormalBunchKaufmanFactorization)
172+
@eval needs_square_A(::$(alg)) = false
173+
end
174+
for kralg in (Krylov.lsmr!, Krylov.craigmr!)
175+
@eval needs_square_A(::KrylovJL{$(typeof(kralg))}) = false
176+
end
177+
for alg in (:LUFactorization, :FastLUFactorization, :SVDFactorization,
178+
:GenericFactorization, :GenericLUFactorization, :SimpleLUFactorization,
179+
:RFLUFactorization, :UMFPACKFactorization, :KLUFactorization, :SparspakFactorization,
180+
:DiagonalFactorization, :CholeskyFactorization, :BunchKaufmanFactorization,
181+
:CHOLMODFactorization, :LDLtFactorization, :AppleAccelerateLUFactorization,
182+
:MKLLUFactorization, :MetalLUFactorization)
183+
@eval needs_square_A(::$(alg)) = true
184+
end
185+
146186
const IS_OPENBLAS = Ref(true)
147187
isopenblas() = IS_OPENBLAS[]
148188

test/nonsquare.jl

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,11 @@ b = rand(m)
88
prob = LinearProblem(A, b)
99
res = A \ b
1010
@test solve(prob).u res
11+
@test !LinearSolve.needs_square_A(QRFactorization())
1112
@test solve(prob, QRFactorization()) res
13+
@test !LinearSolve.needs_square_A(FastQRFactorization())
14+
@test solve(prob, FastQRFactorization()) res
15+
@test !LinearSolve.needs_square_A(KrylovJL_LSMR())
1216
@test solve(prob, KrylovJL_LSMR()) res
1317

1418
A = sprand(m, n, 0.5)
@@ -23,6 +27,7 @@ A = sprand(n, m, 0.5)
2327
b = rand(n)
2428
prob = LinearProblem(A, b)
2529
res = Matrix(A) \ b
30+
@test !LinearSolve.needs_square_A(KrylovJL_CRAIGMR())
2631
@test solve(prob, KrylovJL_CRAIGMR()) res
2732

2833
A = sprandn(1000, 100, 0.1)
@@ -35,7 +40,9 @@ A = randn(1000, 100)
3540
b = randn(1000)
3641
@test isapprox(solve(LinearProblem(A, b)).u, Symmetric(A' * A) \ (A' * b))
3742
solve(LinearProblem(A, b)).u;
43+
@test !LinearSolve.needs_square_A(NormalCholeskyFactorization())
3844
solve(LinearProblem(A, b), (LinearSolve.NormalCholeskyFactorization())).u;
45+
@test !LinearSolve.needs_square_A(NormalBunchKaufmanFactorization())
3946
solve(LinearProblem(A, b), (LinearSolve.NormalBunchKaufmanFactorization())).u;
4047
solve(LinearProblem(A, b),
4148
assumptions = (OperatorAssumptions(false;

0 commit comments

Comments
 (0)