Skip to content

New High Level Interface for Sparse Jacobian Computation #253

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 15 commits into from
Aug 25, 2023
Merged
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,5 @@ Manifest.toml

docs/build/
docs/site/

.vscode
18 changes: 13 additions & 5 deletions Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "SparseDiffTools"
uuid = "47a9eef4-7e08-11e9-0b38-333d64bd3804"
authors = ["Pankaj Mishra <[email protected]>", "Chris Rackauckas <[email protected]>"]
version = "2.4.1"
version = "2.5.0"

[deps]
ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b"
Expand All @@ -13,38 +13,45 @@ FiniteDiff = "6a86dc24-6348-571c-b903-95158fe2bd41"
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
Graphs = "86223c79-3864-5bf0-83f7-82e725a168b6"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
PackageExtensionCompat = "65ce6f38-6b18-4e1d-a461-8949797d7930"
Reexport = "189a3867-3050-52da-a836-e630ba90ab69"
Requires = "ae029012-a4dd-5104-9daa-d747884805df"
SciMLOperators = "c0aeaf25-5076-4817-a8d5-81caf7dfa961"
Setfield = "efcf1570-3423-57d1-acb7-fd33fddbac46"
SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
StaticArrayInterface = "0d7ed370-da01-4f52-bd93-41d350b8b718"
StaticArrays = "90137ffa-7385-5640-81b9-e52037218182"
Tricks = "410a4b4d-49e4-4fbc-ab6d-cb71b17b3775"
UnPack = "3a884ed6-31ef-47d7-9d2a-63182c4928ed"
VertexSafeGraphs = "19fa3120-7c27-5ec5-8db8-b0b0aa330d6f"

[weakdeps]
Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9"
Symbolics = "0c5d862f-8b57-4792-8d23-62f2024744c7"
Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"

[extensions]
SparseDiffToolsEnzymeExt = "Enzyme"
SparseDiffToolsSymbolicsExt = "Symbolics"
SparseDiffToolsZygoteExt = "Zygote"

[compat]
ADTypes = "0.1"
ADTypes = "0.2.1"
Adapt = "3.0"
ArrayInterface = "7.4.2"
Compat = "4"
DataStructures = "0.18"
Enzyme = "0.11"
FiniteDiff = "2.8.1"
ForwardDiff = "0.10"
Graphs = "1"
Reexport = "1"
Requires = "1"
SciMLOperators = "0.2.11, 0.3"
Setfield = "1"
StaticArrayInterface = "1.3"
StaticArrays = "1"
Symbolics = "5.5"
Tricks = "0.1.6"
UnPack = "1"
VertexSafeGraphs = "0.2"
Zygote = "0.6"
julia = "1.6"
Expand All @@ -54,6 +61,7 @@ ArrayInterfaceBandedMatrices = "2e50d22c-5be1-4042-81b1-c572ed69783d"
ArrayInterfaceBlockBandedMatrices = "5331f1e9-51c7-46b0-a9b0-df4434785e0a"
BandedMatrices = "aae01518-5342-5314-be14-df237901396f"
BlockBandedMatrices = "ffab5731-97b5-5995-9138-79e8c1846df0"
Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9"
IterativeSolvers = "42fd0dbc-a981-5370-80f2-aaf504508153"
Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
Expand All @@ -64,4 +72,4 @@ Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"

[targets]
test = ["Test", "ArrayInterfaceBandedMatrices", "ArrayInterfaceBlockBandedMatrices", "BandedMatrices", "BlockBandedMatrices", "IterativeSolvers", "Pkg", "Random", "SafeTestsets", "Symbolics", "Zygote", "StaticArrays"]
test = ["Test", "ArrayInterfaceBandedMatrices", "ArrayInterfaceBlockBandedMatrices", "BandedMatrices", "BlockBandedMatrices", "Enzyme", "IterativeSolvers", "Pkg", "Random", "SafeTestsets", "Symbolics", "Zygote", "StaticArrays"]
61 changes: 61 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,67 @@ function g(x) # out-of-place
end
```

## High Level API

We need to perform the following steps to utilize SparseDiffTools:

1. Specify a Sparsity Detection Algorithm. There are 3 possible choices currently:
1. `NoSparsityDetection`: This will ignore any AD choice and compute the dense Jacobian
2. `JacPrototypeSparsityDetection`: If you already know the sparsity pattern, you can
specify it as `JacPrototypeSparsityDetection(; jac_prototype=<sparsity pattern>)`.
3. `SymbolicsSparsityDetection`: This will use `Symbolics.jl` to automatically detect
the sparsity pattern. (Note that `Symbolics.jl` must be explicitly loaded before
using this functionality.)
2. Now choose an AD backend from `ADTypes.jl`:
1. If using a Non `*Sparse*` type, then we will not use sparsity detection.
2. All other sparse AD types will internally compute the proper sparsity pattern, and
try to exploit that.
3. Now there are 2 options:
1. Precompute the cache using `sparse_jacobian_cache` and use the `sparse_jacobian` or
`sparse_jacobian!` functions to compute the Jacobian. This option is recommended if
you are repeatedly computing the Jacobian for the same function.
2. Directly use `sparse_jacobian` or `sparse_jacobian!` to compute the Jacobian. This
option should be used if you are only computing the Jacobian once.

```julia
using Symbolics

sd = SymbolicsSparsityDetection()
adtype = AutoSparseFiniteDiff()
x = rand(30)
y = similar(x)

# Option 1
## OOP Function
cache = sparse_jacobian_cache(adtype, sd, g, x; fx=y) # Passing `fx` is needed if size(y) != size(x)
J = sparse_jacobian(adtype, cache, g, x)
### Or
J_preallocated = similar(J)
sparse_jacobian!(J_preallocated, adtype, cache, g, x)

## IIP Function
cache = sparse_jacobian_cache(adtype, sd, f, y, x)
J = sparse_jacobian(adtype, cache, f, y, x)
### Or
J_preallocated = similar(J)
sparse_jacobian!(J_preallocated, adtype, cache, f, y, x)

# Option 2
## OOP Function
J = sparse_jacobian(adtype, sd, g, x)
### Or
J_preallocated = similar(J)
sparse_jacobian!(J_preallocated, adtype, sd, g, x)

## IIP Function
J = sparse_jacobian(adtype, sd, f, y, x)
### Or
J_preallocated = similar(J)
sparse_jacobian!(J_preallocated, adtype, sd, f, y, x)
```

## Lower Level API

For this function, we know that the sparsity pattern of the Jacobian is a
`Tridiagonal` matrix. However, if we didn't know the sparsity pattern for
the Jacobian, we could use the `Symbolics.jacobian_sparsity` function to automatically
Expand Down
16 changes: 8 additions & 8 deletions docs/make.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,13 @@ using Documenter, SparseDiffTools
include("pages.jl")

makedocs(sitename = "SparseDiffTools.jl",
authors = "Chris Rackauckas",
modules = [SparseDiffTools],
clean = true,
doctest = false,
format = Documenter.HTML(assets = ["assets/favicon.ico"],
canonical = "https://docs.sciml.ai/SparseDiffTools/stable/"),
pages = pages)
authors = "Chris Rackauckas",
modules = [SparseDiffTools],
clean = true,
doctest = false,
format = Documenter.HTML(assets = ["assets/favicon.ico"],
canonical = "https://docs.sciml.ai/SparseDiffTools/stable/"),
pages = pages)

deploydocs(repo = "github.com/JuliaDiff/SparseDiffTools.jl.git";
push_preview = true)
push_preview = true)
61 changes: 61 additions & 0 deletions docs/src/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,67 @@ function g(x) # out-of-place
end
```

## High Level API

We need to perform the following steps to utilize SparseDiffTools:

1. Specify a Sparsity Detection Algorithm. There are 3 possible choices currently:
1. `NoSparsityDetection`: This will ignore any AD choice and compute the dense Jacobian
2. `JacPrototypeSparsityDetection`: If you already know the sparsity pattern, you can
specify it as `JacPrototypeSparsityDetection(; jac_prototype=<sparsity pattern>)`.
3. `SymbolicsSparsityDetection`: This will use `Symbolics.jl` to automatically detect
the sparsity pattern. (Note that `Symbolics.jl` must be explicitly loaded before
using this functionality.)
2. Now choose an AD backend from `ADTypes.jl`:
1. If using a Non `*Sparse*` type, then we will not use sparsity detection.
2. All other sparse AD types will internally compute the proper sparsity pattern, and
try to exploit that.
3. Now there are 2 options:
1. Precompute the cache using `sparse_jacobian_cache` and use the `sparse_jacobian` or
`sparse_jacobian!` functions to compute the Jacobian. This option is recommended if
you are repeatedly computing the Jacobian for the same function.
2. Directly use `sparse_jacobian` or `sparse_jacobian!` to compute the Jacobian. This
option should be used if you are only computing the Jacobian once.

```julia
using Symbolics

sd = SymbolicsSparsityDetection()
adtype = AutoSparseFiniteDiff()
x = rand(30)
y = similar(x)

# Option 1
## OOP Function
cache = sparse_jacobian_cache(adtype, sd, g, x; fx=y) # Passing `fx` is needed if size(y) != size(x)
J = sparse_jacobian(adtype, cache, g, x)
### Or
J_preallocated = similar(J)
sparse_jacobian!(J_preallocated, adtype, cache, g, x)

## IIP Function
cache = sparse_jacobian_cache(adtype, sd, f, y, x)
J = sparse_jacobian(adtype, cache, f, y, x)
### Or
J_preallocated = similar(J)
sparse_jacobian!(J_preallocated, adtype, cache, f, y, x)

# Option 2
## OOP Function
J = sparse_jacobian(adtype, sd, g, x)
### Or
J_preallocated = similar(J)
sparse_jacobian!(J_preallocated, adtype, sd, g, x)

## IIP Function
J = sparse_jacobian(adtype, sd, f, y, x)
### Or
J_preallocated = similar(J)
sparse_jacobian!(J_preallocated, adtype, sd, f, y, x)
```

## Lower Level API

For this function, we know that the sparsity pattern of the Jacobian is a
`Tridiagonal` matrix. However, if we didn't know the sparsity pattern for
the Jacobian, we could use the `Symbolics.jacobian_sparsity` function to automatically
Expand Down
61 changes: 61 additions & 0 deletions ext/SparseDiffToolsEnzymeExt.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
module SparseDiffToolsEnzymeExt

import ArrayInterface: fast_scalar_indexing
import SparseDiffTools: __f̂,
__maybe_copy_x, __jacobian!, __gradient, __gradient!, AutoSparseEnzyme
# FIXME: For Enzyme we currently assume reverse mode
import ADTypes: AutoEnzyme
using Enzyme

using ForwardDiff

## Satisfying High-Level Interface for Sparse Jacobians
function __gradient(::Union{AutoSparseEnzyme, AutoEnzyme}, f, x, cols)
dx = zero(x)
autodiff(Reverse, __f̂, Const(f), Duplicated(x, dx), Const(cols))
return vec(dx)
end

function __gradient!(::Union{AutoSparseEnzyme, AutoEnzyme}, f!, fx, x, cols)
dx = zero(x)
dfx = zero(fx)
autodiff(Reverse, __f̂, Active, Const(f!), Duplicated(fx, dfx), Duplicated(x, dx),
Const(cols))
return dx
end

function __jacobian!(J::AbstractMatrix, ::Union{AutoSparseEnzyme, AutoEnzyme}, f, x)
J .= jacobian(Reverse, f, x, Val(size(J, 1)))
return J
end

@views function __jacobian!(J, ad::Union{AutoSparseEnzyme, AutoEnzyme}, f!, fx, x)
# This version is slowish not sure how to do jacobians for inplace functions
@warn "Current code for computing jacobian for inplace functions in Enzyme is slow." maxlog=1
dfx = zero(fx)
dx = zero(x)

function __f_row_idx(f!, fx, x, row_idx)
f!(fx, x)
if fast_scalar_indexing(fx)
return fx[row_idx]
else
# Avoid the GPU Arrays scalar indexing error
return sum(selectdim(fx, 1, row_idx:row_idx))
end
end

for row_idx in 1:size(J, 1)
autodiff(Reverse, __f_row_idx, Const(f!), DuplicatedNoNeed(fx, dfx),
Duplicated(x, dx), Const(row_idx))
J[row_idx, :] .= dx
fill!(dfx, 0)
fill!(dx, 0)
end

return J
end

__maybe_copy_x(::Union{AutoSparseEnzyme, AutoEnzyme}, x::SubArray) = copy(x)

end
21 changes: 21 additions & 0 deletions ext/SparseDiffToolsSymbolicsExt.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
module SparseDiffToolsSymbolicsExt

using SparseDiffTools, Symbolics
import SparseDiffTools: AbstractSparseADType

function (alg::SymbolicsSparsityDetection)(ad::AbstractSparseADType, f, x; fx = nothing,
kwargs...)
fx = fx === nothing ? similar(f(x)) : dx
f!(y, x) = (y .= f(x))
J = Symbolics.jacobian_sparsity(f!, fx, x)
_alg = JacPrototypeSparsityDetection(J, alg.alg)
return _alg(ad, f, x; fx, kwargs...)
end

function (alg::SymbolicsSparsityDetection)(ad::AbstractSparseADType, f!, fx, x; kwargs...)
J = Symbolics.jacobian_sparsity(f!, fx, x)
_alg = JacPrototypeSparsityDetection(J, alg.alg)
return _alg(ad, f!, fx, x; kwargs...)
end

end
Loading