diff --git a/docs/Project.toml b/docs/Project.toml
index b6eb11d..989e28b 100644
--- a/docs/Project.toml
+++ b/docs/Project.toml
@@ -3,4 +3,7 @@ Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
FiniteDiff = "6a86dc24-6348-571c-b903-95158fe2bd41"
[compat]
-Documenter = "0.27"
+Documenter = "1"
+
+[sources]
+FiniteDiff = {path = ".."}
diff --git a/docs/make.jl b/docs/make.jl
index 0e1bf07..7e00e64 100644
--- a/docs/make.jl
+++ b/docs/make.jl
@@ -23,7 +23,7 @@ open(joinpath(@__DIR__, "src", "index.md"), "w") do io
for line in eachline(joinpath(dirname(@__DIR__), "README.md"))
println(io, line)
end
-
+
for line in eachline(joinpath(@__DIR__, "src", "reproducibility.md"))
println(io, line)
end
@@ -38,6 +38,7 @@ makedocs(sitename="FiniteDiff.jl",
doctest=false,
format=Documenter.HTML(assets=["assets/favicon.ico"],
canonical="https://docs.sciml.ai/FiniteDiff/stable/"),
+ warnonly=[:missing_docs],
pages=pages)
deploydocs(repo="github.com/JuliaDiff/FiniteDiff.jl.git"; push_preview=true)
diff --git a/docs/src/assets/Project.toml b/docs/src/assets/Project.toml
index b6eb11d..989e28b 100644
--- a/docs/src/assets/Project.toml
+++ b/docs/src/assets/Project.toml
@@ -3,4 +3,7 @@ Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
FiniteDiff = "6a86dc24-6348-571c-b903-95158fe2bd41"
[compat]
-Documenter = "0.27"
+Documenter = "1"
+
+[sources]
+FiniteDiff = {path = ".."}
diff --git a/docs/src/reproducibility.md b/docs/src/reproducibility.md
index 333d67b..552f499 100644
--- a/docs/src/reproducibility.md
+++ b/docs/src/reproducibility.md
@@ -35,20 +35,22 @@ You can also download the
manifest file and the
project file.
diff --git a/docs/src/tutorials.md b/docs/src/tutorials.md
index e3a2989..ee0d417 100644
--- a/docs/src/tutorials.md
+++ b/docs/src/tutorials.md
@@ -10,21 +10,21 @@ using FiniteDiff, StaticArrays
fcalls = 0
function f(dx,x) # in-place
- global fcalls += 1
- for i in 2:length(x)-1
- dx[i] = x[i-1] - 2x[i] + x[i+1]
- end
- dx[1] = -2x[1] + x[2]
- dx[end] = x[end-1] - 2x[end]
- nothing
+ global fcalls += 1
+ for i in 2:length(x)-1
+ dx[i] = x[i-1] - 2x[i] + x[i+1]
+ end
+ dx[1] = -2x[1] + x[2]
+ dx[end] = x[end-1] - 2x[end]
+ nothing
end
const N = 10
handleleft(x,i) = i==1 ? zero(eltype(x)) : x[i-1]
handleright(x,i) = i==length(x) ? zero(eltype(x)) : x[i+1]
function g(x) # out-of-place
- global fcalls += 1
- @SVector [handleleft(x,i) - 2x[i] + handleright(x,i) for i in 1:N]
+ global fcalls += 1
+ @SVector [handleleft(x,i) - 2x[i] + handleright(x,i) for i in 1:N]
end
```
@@ -37,7 +37,7 @@ x = @SVector rand(N)
FiniteDiff.finite_difference_jacobian(g,x)
#=
-10×10 SArray{Tuple{10,10},Float64,2,100} with indices SOneTo(10)×SOneTo(10):
+10×10 SMatrix{10, 10, Float64, 100} with indices SOneTo(10)×SOneTo(10):
-2.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0
1.0 -2.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0
0.0 1.0 -2.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0
@@ -65,7 +65,7 @@ FiniteDiff.finite_difference_jacobian!(output,f,x)
output
#=
-10×10 Array{Float64,2}:
+10×10 Matrix{Float64}:
-2.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0
1.0 -2.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0
0.0 1.0 -2.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0
@@ -175,8 +175,8 @@ we get the analytical solution to the optimal matrix colors for our structured
Jacobian. Now we can use this in our differencing routines:
```julia
-tridiagcache = FiniteDiff.JacobianCache(x,colorvec=colors,sparsity=tridiagjac)
-FiniteDiff.finite_difference_jacobian!(tridiagjac,f,x,tridiagcache)
+tridiagcache = FiniteDiff.JacobianCache(x, colorvec=colors, sparsity=tridiagjac)
+FiniteDiff.finite_difference_jacobian!(tridiagjac, f, x, tridiagcache)
```
It'll use a special iteration scheme dependent on the matrix type to accelerate
@@ -189,14 +189,16 @@ differential equations, with a function like:
```julia
function pde(out, x)
- x = reshape(x, 100, 100)
- out = reshape(out, 100, 100)
- for i in 1:100
- for j in 1:100
- out[i, j] = x[i, j] + x[max(i -1, 1), j] + x[min(i+1, size(x, 1)), j] + x[i, max(j-1, 1)] + x[i, min(j+1, size(x, 2))]
- end
- end
- return vec(out)
+ x = reshape(x, 100, 100)
+ out = reshape(out, 100, 100)
+ m = size(x, 1)
+ n = size(x, 2)
+ for i in 1:100
+ for j in 1:100
+ out[i, j] = x[i, j] + x[max(i-1, 1), j] + x[min(i+1, m), j] + x[i, max(j-1, 1)] + x[i, min(j+1, n)]
+ end
+ end
+ return vec(out)
end
x = rand(10000)
```
@@ -212,4 +214,4 @@ bbbcache = FiniteDiff.JacobianCache(x,colorvec=colorsbbb,sparsity=Jbbb)
FiniteDiff.finite_difference_jacobian!(Jbbb, pde, x, bbbcache)
```
-And boom, a fast Jacobian filling algorithm on your special matrix.
\ No newline at end of file
+And boom, a fast Jacobian filling algorithm on your special matrix.
diff --git a/src/derivatives.jl b/src/derivatives.jl
index 04cc7ff..222ff69 100644
--- a/src/derivatives.jl
+++ b/src/derivatives.jl
@@ -1,9 +1,10 @@
"""
FiniteDiff.finite_difference_derivative(
- f, x::T,
- fdtype::Type{T1}=Val{:central},
- returntype::Type{T2}=eltype(x),
- f_x::Union{Nothing,T}=nothing)
+ f,
+ x :: T,
+ fdtype :: Type{T1} = Val{:central},
+ returntype :: Type{T2} = eltype(x),
+ f_x :: Union{Nothing,T} = nothing)
Single-point derivative of scalar->scalar maps.
"""
@@ -43,7 +44,7 @@ end
FiniteDiff.DerivativeCache(
x :: AbstractArray{<:Number},
fx :: Union{Nothing,AbstractArray{<:Number}} = nothing,
- epsilon :: Union{Nothing,AbstractArray{<:Real}} = nothing,
+ epsilon :: Union{Nothing,AbstractArray{<:Real}} = nothing,
fdtype :: Type{T1} = Val{:central},
returntype :: Type{T2} = eltype(x))
@@ -146,14 +147,14 @@ end
"""
FiniteDiff.finite_difference_derivative!(
- df::AbstractArray{<:Number},
+ df :: AbstractArray{<:Number},
f,
- x::AbstractArray{<:Number},
- cache::DerivativeCache{T1,T2,fdtype,returntype};
- relstep=default_relstep(fdtype, eltype(x)),
- absstep=relstep,
- dir=true)
-
+ x :: AbstractArray{<:Number},
+ cache :: DerivativeCache{T1,T2,fdtype,returntype};
+ relstep = default_relstep(fdtype, eltype(x)),
+ absstep = relstep,
+ dir = true)
+
Compute the derivative `df` of a scalar-valued map `f` at a collection of points `x`.
Cached.
diff --git a/src/gradients.jl b/src/gradients.jl
index 40bbc39..9458142 100644
--- a/src/gradients.jl
+++ b/src/gradients.jl
@@ -85,7 +85,7 @@ Non-Allocating Cache Constructor
# Output
The output is a [`GradientCache`](@ref) struct.
-```julia
+```julia-repl
julia> x = [1.0, 3.0]
2-element Vector{Float64}:
1.0
@@ -116,12 +116,12 @@ end
FiniteDiff.finite_difference_gradient(
f,
x,
- fdtype::Type{T1}=Val{:central},
- returntype::Type{T2}=eltype(x),
- inplace::Type{Val{T3}}=Val{true};
- relstep=default_relstep(fdtype, eltype(x)),
- absstep=relstep,
- dir=true)
+ fdtype :: Type{T1} = Val{:central},
+ returntype :: Type{T2} = eltype(x),
+ inplace :: Type{Val{T3}} = Val{true};
+ relstep = default_relstep(fdtype, eltype(x)),
+ absstep = relstep,
+ dir = true)
Compute the gradient of function `f` at point `x` using finite differences.
@@ -235,13 +235,13 @@ end
"""
FiniteDiff.finite_difference_gradient!(
- df::AbstractArray{<:Number},
+ df :: AbstractArray{<:Number},
f,
- x::AbstractArray{<:Number},
- cache::GradientCache;
- relstep=default_relstep(fdtype, eltype(x)),
- absstep=relstep
- dir=true)
+ x :: AbstractArray{<:Number},
+ cache :: GradientCache;
+ relstep = default_relstep(fdtype, eltype(x)),
+ absstep = relstep
+ dir = true)
Gradients are either a vector->scalar map `f(x)`, or a scalar->vector map `f(fx,x)` if `inplace=Val{true}` and `fx=f(x)` if `inplace=Val{false}`.
diff --git a/src/hessians.jl b/src/hessians.jl
index 46b1678..562874f 100644
--- a/src/hessians.jl
+++ b/src/hessians.jl
@@ -58,12 +58,9 @@ end
"""
HessianCache(
- xpp,
- xpm,
- xmp,
- xmm,
- fdtype::Type{T1}=Val{:hcentral},
- inplace::Type{Val{T2}} = x isa StaticArray ? Val{true} : Val{false})
+ xpp, xpm, xmp, xmm,
+ fdtype :: Type{T1} = Val{:hcentral},
+ inplace :: Type{Val{T2}} = x isa StaticArray ? Val{true} : Val{false})
Non-allocating cache constructor.
"""
@@ -78,8 +75,8 @@ end
"""
HessianCache(
x,
- fdtype::Type{T1}=Val{:hcentral},
- inplace::Type{Val{T2}} = x isa StaticArray ? Val{true} : Val{false})
+ fdtype :: Type{T1} = Val{:hcentral},
+ inplace :: Type{Val{T2}} = x isa StaticArray ? Val{true} : Val{false})
Allocating cache constructor.
"""
@@ -94,11 +91,11 @@ end
"""
finite_difference_hessian(
f,
- x::AbstractArray{<:Number},
- fdtype::Type{T1}=Val{:hcentral},
- inplace::Type{Val{T2}} = x isa StaticArray ? Val{true} : Val{false};
- relstep=default_relstep(fdtype, eltype(x)),
- absstep=relstep)
+ x :: AbstractArray{<:Number},
+ fdtype :: Type{T1} = Val{:hcentral},
+ inplace :: Type{Val{T2}} = x isa StaticArray ? Val{true} : Val{false};
+ relstep = default_relstep(fdtype, eltype(x)),
+ absstep = relstep)
Compute the Hessian matrix of scalar function `f` at point `x` using finite differences.
@@ -171,13 +168,13 @@ end
"""
finite_difference_hessian!(
- H::AbstractMatrix,
+ H :: AbstractMatrix,
f,
- x::AbstractArray{<:Number},
- fdtype :: Type{T1}=Val{:hcentral},
+ x :: AbstractArray{<:Number},
+ fdtype :: Type{T1} = Val{:hcentral},
inplace :: Type{Val{T2}} = x isa StaticArray ? Val{true} : Val{false};
- relstep=default_relstep(fdtype, eltype(x)),
- absstep=relstep)
+ relstep = default_relstep(fdtype, eltype(x)),
+ absstep = relstep)
Cache-less.
"""
diff --git a/src/jacobians.jl b/src/jacobians.jl
index 0793269..e1b742c 100644
--- a/src/jacobians.jl
+++ b/src/jacobians.jl
@@ -81,11 +81,11 @@ end
"""
FiniteDiff.JacobianCache(
- x1 ,
- fx ,
+ x1,
+ fx,
fx1,
fdtype :: Type{T1} = Val{:central},
- returntype :: Type{T2} = eltype(fx),
+ returntype :: Type{T2} = eltype(fx);
colorvec = 1:length(x1),
sparsity = nothing)
@@ -189,16 +189,16 @@ end
"""
FiniteDiff.finite_difference_jacobian(
f,
- x::AbstractArray{<:Number},
- fdtype::Type{T1}=Val{:forward},
- returntype::Type{T2}=eltype(x),
- f_in=nothing;
- relstep=default_relstep(fdtype, eltype(x)),
- absstep=relstep,
- colorvec=1:length(x),
- sparsity=nothing,
- jac_prototype=nothing,
- dir=true)
+ x :: AbstractArray{<:Number},
+ fdtype :: Type{T1} = Val{:forward},
+ returntype :: Type{T2} = eltype(x),
+ f_in = nothing;
+ relstep = default_relstep(fdtype, eltype(x)),
+ absstep = relstep,
+ colorvec = 1:length(x),
+ sparsity = nothing,
+ jac_prototype = nothing,
+ dir = true)
Compute the Jacobian matrix of function `f` at point `x` using finite differences.
@@ -265,12 +265,12 @@ void_setindex!(args...) = (setindex!(args...); return)
f,
x,
cache::JacobianCache;
- relstep=default_relstep(fdtype, eltype(x)),
- absstep=relstep,
- colorvec = cache.colorvec,
- sparsity = cache.sparsity,
+ relstep = default_relstep(fdtype, eltype(x)),
+ absstep = relstep,
+ colorvec = cache.colorvec,
+ sparsity = cache.sparsity,
jac_prototype = nothing,
- dir=true)
+ dir = true)
Cached.
"""
@@ -421,14 +421,14 @@ end
"""
finite_difference_jacobian!(
- J::AbstractMatrix,
+ J :: AbstractMatrix,
f,
- x::AbstractArray{<:Number},
- fdtype :: Type{T1}=Val{:forward},
- returntype :: Type{T2}=eltype(x),
+ x :: AbstractArray{<:Number},
+ fdtype :: Type{T1} = Val{:forward},
+ returntype :: Type{T2} = eltype(x),
f_in :: Union{T2,Nothing}=nothing;
- relstep=default_relstep(fdtype, eltype(x)),
- absstep=relstep,
+ relstep = default_relstep(fdtype, eltype(x)),
+ absstep = relstep,
colorvec = 1:length(x),
sparsity = ArrayInterfaceCore.has_sparsestruct(J) ? J : nothing)
@@ -480,15 +480,15 @@ end
"""
FiniteDiff.finite_difference_jacobian!(
- J::AbstractMatrix{<:Number},
+ J :: AbstractMatrix{<:Number},
f,
- x::AbstractArray{<:Number},
- cache::JacobianCache;
- relstep=default_relstep(fdtype, eltype(x)),
- absstep=relstep,
+ x :: AbstractArray{<:Number},
+ cache :: JacobianCache;
+ relstep = default_relstep(fdtype, eltype(x)),
+ absstep = relstep,
colorvec = cache.colorvec,
sparsity = cache.sparsity,
- dir=true)
+ dir = true)
Cached.
"""
diff --git a/src/jvp.jl b/src/jvp.jl
index 7493aa2..fa940aa 100644
--- a/src/jvp.jl
+++ b/src/jvp.jl
@@ -17,9 +17,7 @@ mutable struct JVPCache{X1, FX1, FDType}
end
"""
- FiniteDiff.JVPCache(
- x,
- fdtype::Type{T1} = Val{:forward})
+ FiniteDiff.JVPCache(x, fdtype::Type{T1} = Val{:forward})
Allocating cache constructor for Jacobian-vector product computations.
@@ -47,10 +45,7 @@ function JVPCache(
end
"""
- FiniteDiff.JVPCache(
- x,
- fx1,
- fdtype::Type{T1} = Val{:forward})
+ FiniteDiff.JVPCache(x, fx1, fdtype::Type{T1} = Val{:forward})
Non-allocating cache constructor for Jacobian-vector product computations.
@@ -87,12 +82,12 @@ end
"""
FiniteDiff.finite_difference_jvp(
f,
- x::AbstractArray{<:Number},
- v::AbstractArray{<:Number},
- fdtype::Type{T1}=Val{:forward},
- f_in=nothing;
- relstep=default_relstep(fdtype, eltype(x)),
- absstep=relstep)
+ x :: AbstractArray{<:Number},
+ v :: AbstractArray{<:Number},
+ fdtype :: Type{T1} = Val{:forward},
+ f_in = nothing;
+ relstep = default_relstep(fdtype, eltype(x)),
+ absstep = relstep)
Compute the Jacobian-vector product `J(x) * v` using finite differences.
@@ -156,8 +151,8 @@ end
x,
v,
cache::JVPCache;
- relstep=default_relstep(fdtype, eltype(x)),
- absstep=relstep,
+ relstep = default_relstep(fdtype, eltype(x)),
+ absstep = relstep)
Cached.
"""
@@ -195,15 +190,15 @@ end
"""
finite_difference_jvp!(
- jvp::AbstractArray{<:Number},
+ jvp :: AbstractArray{<:Number},
f,
- x::AbstractArray{<:Number},
- v::AbstractArray{<:Number},
- fdtype :: Type{T1}=Val{:forward},
- returntype :: Type{T2}=eltype(x),
- f_in :: Union{T2,Nothing}=nothing;
- relstep=default_relstep(fdtype, eltype(x)),
- absstep=relstep)
+ x :: AbstractArray{<:Number},
+ v :: AbstractArray{<:Number},
+ fdtype :: Type{T1} = Val{:forward},
+ returntype :: Type{T2} = eltype(x),
+ f_in :: Union{T2,Nothing} = nothing;
+ relstep = default_relstep(fdtype, eltype(x)),
+ absstep = relstep)
Cache-less.
"""
@@ -229,14 +224,14 @@ end
"""
FiniteDiff.finite_difference_jvp!(
- jvp::AbstractArray{<:Number},
+ jvp :: AbstractArray{<:Number},
f,
- x::AbstractArray{<:Number},
- v::AbstractArray{<:Number},
- cache::JVPCache;
- relstep=default_relstep(fdtype, eltype(x)),
- absstep=relstep,
- dir=true)
+ x :: AbstractArray{<:Number},
+ v :: AbstractArray{<:Number},
+ cache :: JVPCache;
+ relstep = default_relstep(fdtype, eltype(x)),
+ absstep = relstep,
+ dir = true)
Cached.
"""