Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 11 additions & 3 deletions src/KLU/klu.jl
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,8 @@ import Base: (\), size, getproperty, setproperty!, propertynames, show,
copy, eachindex, view, sortperm, unsafe_load, zeros, convert, eltype,
length, parent, stride, finalizer, Complex, complex, imag, real, map!,
summary, println, oneunit, sizeof, isdefined, setfield!, getfield,
OutOfMemoryError, ArgumentError, OverflowError, ErrorException, DimensionMismatch
OutOfMemoryError, ArgumentError, OverflowError, ErrorException,
DimensionMismatch

# Convert from 1-based to 0-based indices
function decrement!(A::AbstractArray{T}) where {T <: Integer}
Expand Down Expand Up @@ -166,6 +167,7 @@ function _free_symbolic(K::AbstractKLUFactorization{Tv, Ti}) where {Ti <: KLUITy
end

for Ti in KLUIndexTypes, Tv in KLUValueTypes

klufree = _klu_name("free_numeric", Tv, Ti)
ptr = _klu_name("numeric", :Float64, Ti)
@eval begin
Expand Down Expand Up @@ -218,6 +220,7 @@ end
# Certain sets of inputs must be non-null *together*:
# [Lp, Li, Lx], [Up, Ui, Ux], [Fp, Fi, Fx]
for Tv in KLUValueTypes, Ti in KLUIndexTypes

extract = _klu_name("extract", Tv, Ti)
sort = _klu_name("sort", Tv, Ti)
if Tv === :ComplexF64
Expand Down Expand Up @@ -436,6 +439,7 @@ function klu_analyze!(K::KLUFactorization{Tv, Ti}, P::Vector{Ti},
end

for Tv in KLUValueTypes, Ti in KLUIndexTypes

factor = _klu_name("factor", Tv, Ti)
@eval begin
function klu_factor!(
Expand Down Expand Up @@ -464,6 +468,7 @@ for Tv in KLUValueTypes, Ti in KLUIndexTypes
end

for Tv in KLUValueTypes, Ti in KLUIndexTypes

rgrowth = _klu_name("rgrowth", Tv, Ti)
rcond = _klu_name("rcond", Tv, Ti)
condest = _klu_name("condest", Tv, Ti)
Expand Down Expand Up @@ -639,6 +644,7 @@ See also: [`klu`](@ref)
klu!

for Tv in KLUValueTypes, Ti in KLUIndexTypes

refactor = _klu_name("refactor", Tv, Ti)
@eval begin
function klu!(K::KLUFactorization{$Tv, $Ti}, nzval::Vector{$Tv};
Expand Down Expand Up @@ -676,8 +682,8 @@ function klu!(K::KLUFactorization{U}, S::SparseMatrixCSC{U};
# what should happen here when check = false? This is not really a KLU error code.
K.colptr == S.colptr && K.rowval == S.rowval ||
(decrement!(K.colptr);
decrement!(K.rowval);
throw(ArgumentError("The pattern of the original matrix must match the pattern of the refactor."))
decrement!(K.rowval);
throw(ArgumentError("The pattern of the original matrix must match the pattern of the refactor."))
)
decrement!(K.colptr)
decrement!(K.rowval)
Expand Down Expand Up @@ -710,6 +716,7 @@ This function overwrites `B` with the solution `X`, for a new solution vector `X
"""
solve!
for Tv in KLUValueTypes, Ti in KLUIndexTypes

solve = _klu_name("solve", Tv, Ti)
@eval begin
function solve!(klu::AbstractKLUFactorization{$Tv, $Ti},
Expand All @@ -726,6 +733,7 @@ for Tv in KLUValueTypes, Ti in KLUIndexTypes
end

for Tv in KLUValueTypes, Ti in KLUIndexTypes

tsolve = _klu_name("tsolve", Tv, Ti)
if Tv === :ComplexF64
call = :($tsolve(
Expand Down
5 changes: 3 additions & 2 deletions src/LinearSolve.jl
Original file line number Diff line number Diff line change
Expand Up @@ -176,9 +176,10 @@ include("adjoint.jl")
end
end

@inline function _notsuccessful(F::LinearAlgebra.QRCompactWY{T, A}) where {T,A<:GPUArraysCore.AnyGPUArray}
@inline function _notsuccessful(F::LinearAlgebra.QRCompactWY{
T, A}) where {T, A <: GPUArraysCore.AnyGPUArray}
hasmethod(LinearAlgebra.issuccess, (typeof(F),)) ?
!LinearAlgebra.issuccess(F) : false
!LinearAlgebra.issuccess(F) : false
end

@inline function _notsuccessful(F::LinearAlgebra.QRCompactWY)
Expand Down
8 changes: 4 additions & 4 deletions src/factorization.jl
Original file line number Diff line number Diff line change
Expand Up @@ -861,8 +861,8 @@ patterns with “more structure”.
!!! note

By default, the SparseArrays.jl are implemented for efficiency by caching the
symbolic factorization. If the sparsity pattern of `A` may change between solves, set `reuse_symbolic=false`.
If the pattern is assumed or known to be constant, set `reuse_symbolic=true` to avoid
symbolic factorization. If the sparsity pattern of `A` may change between solves, set `reuse_symbolic=false`.
If the pattern is assumed or known to be constant, set `reuse_symbolic=true` to avoid
unnecessary recomputation. To further reduce computational overhead, you can disable
pattern checks entirely by setting `check_pattern = false`. Note that this may error
if the sparsity pattern does change unexpectedly.
Expand All @@ -887,8 +887,8 @@ A fast sparse LU-factorization which specializes on sparsity patterns with “le
!!! note

By default, the SparseArrays.jl are implemented for efficiency by caching the
symbolic factorization. If the sparsity pattern of `A` may change between solves, set `reuse_symbolic=false`.
If the pattern is assumed or known to be constant, set `reuse_symbolic=true` to avoid
symbolic factorization. If the sparsity pattern of `A` may change between solves, set `reuse_symbolic=false`.
If the pattern is assumed or known to be constant, set `reuse_symbolic=true` to avoid
unnecessary recomputation. To further reduce computational overhead, you can disable
pattern checks entirely by setting `check_pattern = false`. Note that this may error
if the sparsity pattern does change unexpectedly.
Expand Down
Loading