|
34 | 34 | ## Right now we hardcode it to use `ForwardDiff` |
35 | 35 | function (alg::ApproximateJacobianSparsity)(ad::AbstractSparseADType, f::F, x; fx = nothing, |
36 | 36 | kwargs...) where {F} |
| 37 | + if !(ad isa AutoSparseForwardDiff) |
| 38 | + @warn "$(ad) support for approximate jacobian not implemented. Using ForwardDiff instead." maxlog=1 |
| 39 | + end |
37 | 40 | @unpack ntrials, rng = alg |
38 | 41 | fx = fx === nothing ? f(x) : fx |
39 | | - J = fill!(similar(fx, length(fx), length(x)), 0) |
40 | 42 | cfg = ForwardDiff.JacobianConfig(f, x) |
| 43 | + J = fill!(similar(fx, length(fx), length(x)), 0) |
| 44 | + J_cache = similar(J) |
| 45 | + x_ = similar(x) |
| 46 | + for _ in 1:ntrials |
| 47 | + randn!(rng, x_) |
| 48 | + ForwardDiff.jacobian!(J_cache, f, x_, cfg) |
| 49 | + @. J += abs(J_cache) |
| 50 | + end |
| 51 | + return (JacPrototypeSparsityDetection(; jac_prototype = sparse(J), alg.alg))(ad, f, x; |
| 52 | + fx, kwargs...) |
| 53 | +end |
| 54 | + |
| 55 | +function (alg::ApproximateJacobianSparsity)(ad::AbstractSparseADType, f::F, fx, x; |
| 56 | + kwargs...) where {F} |
| 57 | + if !(ad isa AutoSparseForwardDiff) |
| 58 | + @warn "$(ad) support for approximate jacobian not implemented. Using ForwardDiff instead." maxlog=1 |
| 59 | + end |
| 60 | + @unpack ntrials, rng = alg |
| 61 | + cfg = ForwardDiff.JacobianConfig(f, fx, x) |
| 62 | + J = fill!(similar(fx, length(fx), length(x)), 0) |
| 63 | + J_cache = similar(J) |
| 64 | + x_ = similar(x) |
| 65 | + for _ in 1:ntrials |
| 66 | + randn!(rng, x_) |
| 67 | + ForwardDiff.jacobian!(J_cache, f, fx, x_, cfg) |
| 68 | + @. J += abs(J_cache) |
| 69 | + end |
| 70 | + return (JacPrototypeSparsityDetection(; jac_prototype = sparse(J), alg.alg))(ad, f, x; |
| 71 | + fx, kwargs...) |
| 72 | +end |
| 73 | + |
| 74 | +function (alg::ApproximateJacobianSparsity)(ad::AutoSparseFiniteDiff, f::F, x; fx = nothing, |
| 75 | + kwargs...) where {F} |
| 76 | + @unpack ntrials, rng = alg |
| 77 | + fx = fx === nothing ? f(x) : fx |
| 78 | + cache = FiniteDiff.JacobianCache(x, fx) |
| 79 | + J = fill!(similar(fx, length(fx), length(x)), 0) |
| 80 | + x_ = similar(x) |
41 | 81 | for _ in 1:ntrials |
42 | | - x_ = similar(x) |
43 | 82 | randn!(rng, x_) |
44 | | - J .+= abs.(ForwardDiff.jacobian(f, x_, cfg)) |
| 83 | + J_cache = FiniteDiff.finite_difference_jacobian(f, x, cache) |
| 84 | + @. J += abs(J_cache) |
45 | 85 | end |
46 | 86 | return (JacPrototypeSparsityDetection(; jac_prototype = sparse(J), alg.alg))(ad, f, x; |
47 | 87 | fx, kwargs...) |
48 | 88 | end |
49 | 89 |
|
50 | | -function (alg::ApproximateJacobianSparsity)(ad::AbstractSparseADType, f!::F, fx, x; |
| 90 | +function (alg::ApproximateJacobianSparsity)(ad::AutoSparseFiniteDiff, f!::F, fx, x; |
51 | 91 | kwargs...) where {F} |
52 | 92 | @unpack ntrials, rng = alg |
53 | | - cfg = ForwardDiff.JacobianConfig(f!, fx, x) |
| 93 | + cache = FiniteDiff.JacobianCache(x, fx) |
54 | 94 | J = fill!(similar(fx, length(fx), length(x)), 0) |
| 95 | + J_cache = similar(J) |
| 96 | + x_ = similar(x) |
55 | 97 | for _ in 1:ntrials |
56 | | - x_ = similar(x) |
57 | 98 | randn!(rng, x_) |
58 | | - J .+= abs.(ForwardDiff.jacobian(f!, fx, x_, cfg)) |
| 99 | + FiniteDiff.finite_difference_jacobian!(J_cache, f!, x_, cache) |
| 100 | + @. J += abs(J_cache) |
59 | 101 | end |
60 | 102 | return (JacPrototypeSparsityDetection(; jac_prototype = sparse(J), alg.alg))(ad, f!, fx, |
61 | 103 | x; kwargs...) |
|
0 commit comments