Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion docs/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,7 @@ Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
FiniteDiff = "6a86dc24-6348-571c-b903-95158fe2bd41"

[compat]
Documenter = "0.27"
Documenter = "1"

[sources]
FiniteDiff = {path = ".."}
3 changes: 2 additions & 1 deletion docs/make.jl
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ open(joinpath(@__DIR__, "src", "index.md"), "w") do io
for line in eachline(joinpath(dirname(@__DIR__), "README.md"))
println(io, line)
end

for line in eachline(joinpath(@__DIR__, "src", "reproducibility.md"))
println(io, line)
end
Expand All @@ -38,6 +38,7 @@ makedocs(sitename="FiniteDiff.jl",
doctest=false,
format=Documenter.HTML(assets=["assets/favicon.ico"],
canonical="https://docs.sciml.ai/FiniteDiff/stable/"),
warnonly=[:missing_docs],
pages=pages)

deploydocs(repo="github.com/JuliaDiff/FiniteDiff.jl.git"; push_preview=true)
5 changes: 4 additions & 1 deletion docs/src/assets/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,7 @@ Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
FiniteDiff = "6a86dc24-6348-571c-b903-95158fe2bd41"

[compat]
Documenter = "0.27"
Documenter = "1"

[sources]
FiniteDiff = {path = ".."}
6 changes: 4 additions & 2 deletions docs/src/reproducibility.md
Original file line number Diff line number Diff line change
Expand Up @@ -35,20 +35,22 @@ You can also download the
<a href="
```
```@eval
using TOML
using TOML, Markdown
version = TOML.parse(read("../../Project.toml",String))["version"]
name = TOML.parse(read("../../Project.toml",String))["name"]
link = "https://github.com/SciML/"*name*".jl/tree/gh-pages/v"*version*"/assets/Manifest.toml"
Markdown.MD(link)
```
```@raw html
">manifest</a> file and the
<a href="
```
```@eval
using TOML
using TOML, Markdown
version = TOML.parse(read("../../Project.toml",String))["version"]
name = TOML.parse(read("../../Project.toml",String))["name"]
link = "https://github.com/SciML/"*name*".jl/tree/gh-pages/v"*version*"/assets/Project.toml"
Markdown.MD(link)
```
```@raw html
">project</a> file.
Expand Down
46 changes: 24 additions & 22 deletions docs/src/tutorials.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,21 +10,21 @@ using FiniteDiff, StaticArrays

fcalls = 0
function f(dx,x) # in-place
global fcalls += 1
for i in 2:length(x)-1
dx[i] = x[i-1] - 2x[i] + x[i+1]
end
dx[1] = -2x[1] + x[2]
dx[end] = x[end-1] - 2x[end]
nothing
global fcalls += 1
for i in 2:length(x)-1
dx[i] = x[i-1] - 2x[i] + x[i+1]
end
dx[1] = -2x[1] + x[2]
dx[end] = x[end-1] - 2x[end]
nothing
end

const N = 10
handleleft(x,i) = i==1 ? zero(eltype(x)) : x[i-1]
handleright(x,i) = i==length(x) ? zero(eltype(x)) : x[i+1]
function g(x) # out-of-place
global fcalls += 1
@SVector [handleleft(x,i) - 2x[i] + handleright(x,i) for i in 1:N]
global fcalls += 1
@SVector [handleleft(x,i) - 2x[i] + handleright(x,i) for i in 1:N]
end
```

Expand All @@ -37,7 +37,7 @@ x = @SVector rand(N)
FiniteDiff.finite_difference_jacobian(g,x)

#=
10×10 SArray{Tuple{10,10},Float64,2,100} with indices SOneTo(10)×SOneTo(10):
10×10 SMatrix{10, 10, Float64, 100} with indices SOneTo(10)×SOneTo(10):
-2.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0
1.0 -2.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0
0.0 1.0 -2.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0
Expand Down Expand Up @@ -65,7 +65,7 @@ FiniteDiff.finite_difference_jacobian!(output,f,x)
output

#=
10×10 Array{Float64,2}:
10×10 Matrix{Float64}:
-2.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0
1.0 -2.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0
0.0 1.0 -2.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0
Expand Down Expand Up @@ -175,8 +175,8 @@ we get the analytical solution to the optimal matrix colors for our structured
Jacobian. Now we can use this in our differencing routines:

```julia
tridiagcache = FiniteDiff.JacobianCache(x,colorvec=colors,sparsity=tridiagjac)
FiniteDiff.finite_difference_jacobian!(tridiagjac,f,x,tridiagcache)
tridiagcache = FiniteDiff.JacobianCache(x, colorvec=colors, sparsity=tridiagjac)
FiniteDiff.finite_difference_jacobian!(tridiagjac, f, x, tridiagcache)
```

It'll use a special iteration scheme dependent on the matrix type to accelerate
Expand All @@ -189,14 +189,16 @@ differential equations, with a function like:

```julia
function pde(out, x)
x = reshape(x, 100, 100)
out = reshape(out, 100, 100)
for i in 1:100
for j in 1:100
out[i, j] = x[i, j] + x[max(i -1, 1), j] + x[min(i+1, size(x, 1)), j] + x[i, max(j-1, 1)] + x[i, min(j+1, size(x, 2))]
end
end
return vec(out)
x = reshape(x, 100, 100)
out = reshape(out, 100, 100)
m = size(x, 1)
n = size(x, 2)
for i in 1:100
for j in 1:100
out[i, j] = x[i, j] + x[max(i-1, 1), j] + x[min(i+1, m), j] + x[i, max(j-1, 1)] + x[i, min(j+1, n)]
end
end
return vec(out)
end
x = rand(10000)
```
Expand All @@ -212,4 +214,4 @@ bbbcache = FiniteDiff.JacobianCache(x,colorvec=colorsbbb,sparsity=Jbbb)
FiniteDiff.finite_difference_jacobian!(Jbbb, pde, x, bbbcache)
```

And boom, a fast Jacobian filling algorithm on your special matrix.
And boom, a fast Jacobian filling algorithm on your special matrix.
25 changes: 13 additions & 12 deletions src/derivatives.jl
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
"""
FiniteDiff.finite_difference_derivative(
f, x::T,
fdtype::Type{T1}=Val{:central},
returntype::Type{T2}=eltype(x),
f_x::Union{Nothing,T}=nothing)
f,
x :: T,
fdtype :: Type{T1} = Val{:central},
returntype :: Type{T2} = eltype(x),
f_x :: Union{Nothing,T} = nothing)

Single-point derivative of scalar->scalar maps.
"""
Expand Down Expand Up @@ -43,7 +44,7 @@ end
FiniteDiff.DerivativeCache(
x :: AbstractArray{<:Number},
fx :: Union{Nothing,AbstractArray{<:Number}} = nothing,
epsilon :: Union{Nothing,AbstractArray{<:Real}} = nothing,
epsilon :: Union{Nothing,AbstractArray{<:Real}} = nothing,
fdtype :: Type{T1} = Val{:central},
returntype :: Type{T2} = eltype(x))

Expand Down Expand Up @@ -146,14 +147,14 @@ end

"""
FiniteDiff.finite_difference_derivative!(
df::AbstractArray{<:Number},
df :: AbstractArray{<:Number},
f,
x::AbstractArray{<:Number},
cache::DerivativeCache{T1,T2,fdtype,returntype};
relstep=default_relstep(fdtype, eltype(x)),
absstep=relstep,
dir=true)
x :: AbstractArray{<:Number},
cache :: DerivativeCache{T1,T2,fdtype,returntype};
relstep = default_relstep(fdtype, eltype(x)),
absstep = relstep,
dir = true)

Compute the derivative `df` of a scalar-valued map `f` at a collection of points `x`.

Cached.
Expand Down
26 changes: 13 additions & 13 deletions src/gradients.jl
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ Non-Allocating Cache Constructor
# Output
The output is a [`GradientCache`](@ref) struct.

```julia
```julia-repl
julia> x = [1.0, 3.0]
2-element Vector{Float64}:
1.0
Expand Down Expand Up @@ -116,12 +116,12 @@ end
FiniteDiff.finite_difference_gradient(
f,
x,
fdtype::Type{T1}=Val{:central},
returntype::Type{T2}=eltype(x),
inplace::Type{Val{T3}}=Val{true};
relstep=default_relstep(fdtype, eltype(x)),
absstep=relstep,
dir=true)
fdtype :: Type{T1} = Val{:central},
returntype :: Type{T2} = eltype(x),
inplace :: Type{Val{T3}} = Val{true};
relstep = default_relstep(fdtype, eltype(x)),
absstep = relstep,
dir = true)

Compute the gradient of function `f` at point `x` using finite differences.

Expand Down Expand Up @@ -235,13 +235,13 @@ end

"""
FiniteDiff.finite_difference_gradient!(
df::AbstractArray{<:Number},
df :: AbstractArray{<:Number},
f,
x::AbstractArray{<:Number},
cache::GradientCache;
relstep=default_relstep(fdtype, eltype(x)),
absstep=relstep
dir=true)
x :: AbstractArray{<:Number},
cache :: GradientCache;
relstep = default_relstep(fdtype, eltype(x)),
absstep = relstep
dir = true)

Gradients are either a vector->scalar map `f(x)`, or a scalar->vector map `f(fx,x)` if `inplace=Val{true}` and `fx=f(x)` if `inplace=Val{false}`.

Expand Down
33 changes: 15 additions & 18 deletions src/hessians.jl
Original file line number Diff line number Diff line change
Expand Up @@ -58,12 +58,9 @@ end

"""
HessianCache(
xpp,
xpm,
xmp,
xmm,
fdtype::Type{T1}=Val{:hcentral},
inplace::Type{Val{T2}} = x isa StaticArray ? Val{true} : Val{false})
xpp, xpm, xmp, xmm,
fdtype :: Type{T1} = Val{:hcentral},
inplace :: Type{Val{T2}} = x isa StaticArray ? Val{true} : Val{false})

Non-allocating cache constructor.
"""
Expand All @@ -78,8 +75,8 @@ end
"""
HessianCache(
x,
fdtype::Type{T1}=Val{:hcentral},
inplace::Type{Val{T2}} = x isa StaticArray ? Val{true} : Val{false})
fdtype :: Type{T1} = Val{:hcentral},
inplace :: Type{Val{T2}} = x isa StaticArray ? Val{true} : Val{false})

Allocating cache constructor.
"""
Expand All @@ -94,11 +91,11 @@ end
"""
finite_difference_hessian(
f,
x::AbstractArray{<:Number},
fdtype::Type{T1}=Val{:hcentral},
inplace::Type{Val{T2}} = x isa StaticArray ? Val{true} : Val{false};
relstep=default_relstep(fdtype, eltype(x)),
absstep=relstep)
x :: AbstractArray{<:Number},
fdtype :: Type{T1} = Val{:hcentral},
inplace :: Type{Val{T2}} = x isa StaticArray ? Val{true} : Val{false};
relstep = default_relstep(fdtype, eltype(x)),
absstep = relstep)

Compute the Hessian matrix of scalar function `f` at point `x` using finite differences.

Expand Down Expand Up @@ -171,13 +168,13 @@ end

"""
finite_difference_hessian!(
H::AbstractMatrix,
H :: AbstractMatrix,
f,
x::AbstractArray{<:Number},
fdtype :: Type{T1}=Val{:hcentral},
x :: AbstractArray{<:Number},
fdtype :: Type{T1} = Val{:hcentral},
inplace :: Type{Val{T2}} = x isa StaticArray ? Val{true} : Val{false};
relstep=default_relstep(fdtype, eltype(x)),
absstep=relstep)
relstep = default_relstep(fdtype, eltype(x)),
absstep = relstep)

Cache-less.
"""
Expand Down
Loading
Loading