Skip to content

Commit 4fe1f54

Browse files
Flesh out sparsity and secondorder things more, change the extension structure
1 parent ad7ca08 commit 4fe1f54

10 files changed

+112
-113
lines changed

Project.toml

+6-5
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ version = "1.3.3"
77
ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b"
88
ArrayInterface = "4fba245c-0d91-5ea0-9b3e-6abc04ee57a9"
99
DocStringExtensions = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae"
10+
DifferentiationInterface = "a0c0ee7d-e4b9-4e03-894e-1c5f64a51d63"
1011
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
1112
Manifolds = "1cead3c2-87b3-11e9-0ccd-23c62b72b94e"
1213
PDMats = "90014a1f-27ba-587c-ab20-58faa44d9150"
@@ -21,18 +22,20 @@ SparseConnectivityTracer = "9f842d2f-2579-4b1d-911e-f412cf18a3f5"
2122
SparseMatrixColorings = "0a514795-09f3-496d-8182-132a7b665d35"
2223

2324
[weakdeps]
24-
DifferentiationInterface = "a0c0ee7d-e4b9-4e03-894e-1c5f64a51d63"
2525
Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9"
2626
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
27+
FiniteDiff = "6a86dc24-6348-571c-b903-95158fe2bd41"
2728
ModelingToolkit = "961ee093-0014-501f-94e3-6117800e7a78"
2829
ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267"
2930
Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"
3031

3132
[extensions]
32-
OptimizationDIExt = ["DifferentiationInterface", "ForwardDiff", "ReverseDiff"]
33+
OptimizationForwardDiffExt = "ForwardDiff"
34+
OptimizationFiniteDiffExt = "FiniteDiff"
35+
OptimizationReverseDiffExt = "ReverseDiff"
3336
OptimizationEnzymeExt = "Enzyme"
3437
OptimizationMTKExt = "ModelingToolkit"
35-
OptimizationZygoteExt = ["Zygote", "DifferentiationInterface"]
38+
OptimizationZygoteExt = "Zygote"
3639

3740
[compat]
3841
ADTypes = "1.3"
@@ -44,11 +47,9 @@ ModelingToolkit = "9"
4447
Reexport = "1.2"
4548
Requires = "1"
4649
SciMLBase = "2"
47-
SparseDiffTools = "2.14"
4850
SymbolicAnalysis = "0.1, 0.2"
4951
SymbolicIndexingInterface = "0.3"
5052
Symbolics = "5.12"
51-
Tracker = "0.2.29"
5253
Zygote = "0.6.67"
5354
julia = "1.10"
5455

ext/OptimizationFiniteDiffExt.jl

+5
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
module OptimizationFiniteDiffExt
2+
3+
using DifferentiationInterface, FiniteDiff
4+
5+
end

ext/OptimizationForwardDiffExt.jl

+5
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
module OptimizationForwardDiffExt
2+
3+
using DifferentiationInterface, ForwardDiff
4+
5+
end

ext/OptimizationMTKExt.jl

+3-3
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,8 @@ import OptimizationBase.ADTypes: AutoModelingToolkit, AutoSymbolics, AutoSparse
77
using ModelingToolkit
88

99
function OptimizationBase.instantiate_function(
10-
f::OptimizationFunction{true}, x, adtype::AutoSparse{<:AutoSymbolics, S, C}, p,
11-
num_cons = 0) where {S, C}
10+
f::OptimizationFunction{true}, x, adtype::AutoSparse{<:AutoSymbolics}, p,
11+
num_cons = 0)
1212
p = isnothing(p) ? SciMLBase.NullParameters() : p
1313

1414
sys = complete(ModelingToolkit.modelingtoolkitize(OptimizationProblem(f, x, p;
@@ -53,7 +53,7 @@ function OptimizationBase.instantiate_function(
5353
end
5454

5555
function OptimizationBase.instantiate_function(f::OptimizationFunction{true}, cache::OptimizationBase.ReInitCache,
56-
adtype::AutoSparse{<:AutoSymbolics, S, C}, num_cons = 0) where {S, C}
56+
adtype::AutoSparse{<:AutoSymbolics}, num_cons = 0)
5757
p = isnothing(cache.p) ? SciMLBase.NullParameters() : cache.p
5858

5959
sys = complete(ModelingToolkit.modelingtoolkitize(OptimizationProblem(f, cache.u0,

ext/OptimizationReverseDiffExt.jl

+5
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
module OptimizationReverseDiffExt
2+
3+
using DifferentiationInterface, ReverseDiff
4+
5+
end

src/OptimizationBase.jl

+2
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,8 @@ Base.length(::NullData) = 0
3232
include("adtypes.jl")
3333
include("cache.jl")
3434
include("function.jl")
35+
include("OptimizationDIExt.jl")
36+
include("OptimizationDISparseExt.jl")
3537

3638
export solve, OptimizationCache, DEFAULT_CALLBACK, DEFAULT_DATA
3739

ext/OptimizationDIExt.jl renamed to src/OptimizationDIExt.jl

+30-19
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,18 @@
1-
module OptimizationDIExt
2-
3-
import OptimizationBase, OptimizationBase.ArrayInterface
1+
using OptimizationBase
2+
import OptimizationBase.ArrayInterface
43
import OptimizationBase.SciMLBase: OptimizationFunction
54
import OptimizationBase.LinearAlgebra: I
65
import DifferentiationInterface
76
import DifferentiationInterface: prepare_gradient, prepare_hessian, prepare_hvp, prepare_jacobian,
87
gradient!, hessian!, hvp!, jacobian!, gradient, hessian, hvp, jacobian
9-
using ADTypes
10-
import ForwardDiff, ReverseDiff
8+
using ADTypes, SciMLBase
119

1210
function OptimizationBase.instantiate_function(f::OptimizationFunction{true}, x, adtype::ADTypes.AbstractADType, p = SciMLBase.NullParameters(), num_cons = 0)
1311
_f = (θ, args...) -> first(f.f(θ, p, args...))
1412

15-
if ADTypes.mode(adtype) isa ADTypes.ForwardMode
16-
soadtype = DifferentiationInterface.SecondOrder(adtype, AutoReverseDiff())
17-
elseif ADTypes.mode(adtype) isa ADTypes.ReverseMode
13+
if !(adtype isa SciMLBase.NoAD) && ADTypes.mode(adtype) isa ADTypes.ForwardMode
14+
soadtype = DifferentiationInterface.SecondOrder(adtype, AutoReverseDiff()) #make zygote?
15+
elseif !(adtype isa SciMLBase.NoAD) && ADTypes.mode(adtype) isa ADTypes.ReverseMode
1816
soadtype = DifferentiationInterface.SecondOrder(AutoForwardDiff(), adtype)
1917
end
2018

@@ -32,7 +30,7 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{true}, x,
3230
if f.hess === nothing
3331
extras_hess = prepare_hessian(_f, soadtype, x)
3432
function hess(res, θ, args...)
35-
hessian!(_f, res, adtype, θ, extras_hess)
33+
hessian!(_f, res, soadtype, θ, extras_hess)
3634
end
3735
else
3836
hess = (H, θ, args...) -> f.hess(H, θ, p, args...)
@@ -79,7 +77,7 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{true}, x,
7977

8078
function cons_h(H, θ)
8179
for i in 1:num_cons
82-
hessian!(fncs[i], H[i], adtype, θ, extras_cons_hess[i])
80+
hessian!(fncs[i], H[i], soadtype, θ, extras_cons_hess[i])
8381
end
8482
end
8583
else
@@ -106,7 +104,12 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{true}, ca
106104
x = cache.u0
107105
p = cache.p
108106
_f = (θ, args...) -> first(f.f(θ, p, args...))
109-
soadtype = DifferentiationInterface.SecondOrder(adtype, adtype)
107+
108+
if !(adtype isa SciMLBase.NoAD) && ADTypes.mode(adtype) isa ADTypes.ForwardMode
109+
soadtype = DifferentiationInterface.SecondOrder(adtype, AutoReverseDiff()) #make zygote?
110+
elseif !(adtype isa SciMLBase.NoAD) && ADTypes.mode(adtype) isa ADTypes.ReverseMode
111+
soadtype = DifferentiationInterface.SecondOrder(AutoForwardDiff(), adtype)
112+
end
110113

111114
if f.grad === nothing
112115
extras_grad = prepare_gradient(_f, adtype, x)
@@ -169,7 +172,7 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{true}, ca
169172

170173
function cons_h(H, θ)
171174
for i in 1:num_cons
172-
hessian!(fncs[i], H[i], adtype, θ, extras_cons_hess[i])
175+
hessian!(fncs[i], H[i], soadtype, θ, extras_cons_hess[i])
173176
end
174177
end
175178
else
@@ -195,7 +198,12 @@ end
195198

196199
function OptimizationBase.instantiate_function(f::OptimizationFunction{false}, x, adtype::ADTypes.AbstractADType, p = SciMLBase.NullParameters(), num_cons = 0)
197200
_f = (θ, args...) -> first(f.f(θ, p, args...))
198-
soadtype = DifferentiationInterface.SecondOrder(adtype, adtype)
201+
202+
if !(adtype isa SciMLBase.NoAD) && ADTypes.mode(adtype) isa ADTypes.ForwardMode
203+
soadtype = DifferentiationInterface.SecondOrder(adtype, AutoReverseDiff()) #make zygote?
204+
elseif !(adtype isa SciMLBase.NoAD) && ADTypes.mode(adtype) isa ADTypes.ReverseMode
205+
soadtype = DifferentiationInterface.SecondOrder(AutoForwardDiff(), adtype)
206+
end
199207

200208
if f.grad === nothing
201209
extras_grad = prepare_gradient(_f, adtype, x)
@@ -211,7 +219,7 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{false}, x
211219
if f.hess === nothing
212220
extras_hess = prepare_hessian(_f, soadtype, x) #placeholder logic, can be made much better
213221
function hess(θ, args...)
214-
hessian(_f, adtype, θ, extras_hess)
222+
hessian(_f, soadtype, θ, extras_hess)
215223
end
216224
else
217225
hess = (θ, args...) -> f.hess(θ, p, args...)
@@ -259,7 +267,7 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{false}, x
259267

260268
function cons_h(θ)
261269
H = map(1:num_cons) do i
262-
hessian(fncs[i], adtype, θ, extras_cons_hess[i])
270+
hessian(fncs[i], soadtype, θ, extras_cons_hess[i])
263271
end
264272
return H
265273
end
@@ -287,7 +295,12 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{false}, c
287295
x = cache.u0
288296
p = cache.p
289297
_f = (θ, args...) -> first(f.f(θ, p, args...))
290-
soadtype = DifferentiationInterface.SecondOrder(adtype, adtype)
298+
299+
if !(adtype isa SciMLBase.NoAD) && ADTypes.mode(adtype) isa ADTypes.ForwardMode
300+
soadtype = DifferentiationInterface.SecondOrder(adtype, AutoReverseDiff()) #make zygote?
301+
elseif !(adtype isa SciMLBase.NoAD) && ADTypes.mode(adtype) isa ADTypes.ReverseMode
302+
soadtype = DifferentiationInterface.SecondOrder(AutoForwardDiff(), adtype)
303+
end
291304

292305
if f.grad === nothing
293306
extras_grad = prepare_gradient(_f, adtype, x)
@@ -351,7 +364,7 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{false}, c
351364

352365
function cons_h(θ)
353366
H = map(1:num_cons) do i
354-
hessian(fncs[i], adtype, θ, extras_cons_hess[i])
367+
hessian(fncs[i], soadtype, θ, extras_cons_hess[i])
355368
end
356369
return H
357370
end
@@ -374,5 +387,3 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{false}, c
374387
cons_hess_colorvec = conshess_colors,
375388
lag_h, f.lag_hess_prototype)
376389
end
377-
378-
end

ext/OptimizationDISparseExt.jl renamed to src/OptimizationDISparseExt.jl

+50-23
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
1-
module OptimizationDIExt
2-
3-
import OptimizationBase, OptimizationBase.ArrayInterface
1+
using OptimizationBase
2+
import OptimizationBase.ArrayInterface
43
import OptimizationBase.SciMLBase: OptimizationFunction
54
import OptimizationBase.LinearAlgebra: I
65
import DifferentiationInterface
@@ -9,21 +8,48 @@ import DifferentiationInterface: prepare_gradient, prepare_hessian, prepare_hvp,
98
using ADTypes
109
using SparseConnectivityTracer, SparseMatrixColorings
1110

12-
function OptimizationBase.instantiate_function(f::OptimizationFunction{true}, x, adtype::ADTypes.AutoSparse, p = SciMLBase.NullParameters(), num_cons = 0)
13-
_f = (θ, args...) -> first(f.f(θ, p, args...))
14-
11+
function generate_sparse_adtype(adtype)
1512
if adtype.sparsity_detector isa ADTypes.NoSparsityDetector && adtype.coloring_algorithm isa ADTypes.NoColoringAlgorithm
1613
adtype = AutoSparse(adtype.dense_ad; sparsity_detector = TracerLocalSparsityDetector(), coloring_algorithm = GreedyColoringAlgorithm())
17-
elseif adtype.sparsity_detector isa ADTypes.NoSparsityDetector && !(adtype.coloring_algorithm isa AbstractADTypes.NoColoringAlgorithm)
14+
if !(adtype.dense_ad isa SciMLBase.NoAD) && ADTypes.mode(adtype.dense_ad) isa ADTypes.ForwardMode
15+
soadtype = AutoSparse(DifferentiationInterface.SecondOrder(adtype.dense_ad, AutoReverseDiff()), sparsity_detector = TracerLocalSparsityDetector(), coloring_algorithm = GreedyColoringAlgorithm()) #make zygote?
16+
elseif !(adtype isa SciMLBase.NoAD) && ADTypes.mode(adtype) isa ADTypes.ReverseMode
17+
soadtype = AutoSparse(DifferentiationInterface.SecondOrder(AutoForwardDiff(), adtype), sparsity_detector = TracerLocalSparsityDetector(), coloring_algorithm = GreedyColoringAlgorithm())
18+
end
19+
elseif adtype.sparsity_detector isa ADTypes.NoSparsityDetector && !(adtype.coloring_algorithm isa ADTypes.NoColoringAlgorithm)
1820
adtype = AutoSparse(adtype.dense_ad; sparsity_detector = TracerLocalSparsityDetector(), coloring_algorithm = adtype.coloring_algorithm)
21+
if !(adtype.dense_ad isa SciMLBase.NoAD) && ADTypes.mode(adtype.dense_ad) isa ADTypes.ForwardMode
22+
soadtype = AutoSparse(DifferentiationInterface.SecondOrder(adtype.dense_ad, AutoReverseDiff()), sparsity_detector = TracerLocalSparsityDetector(), coloring_algorithm = adtype.coloring_algorithm)
23+
elseif !(adtype isa SciMLBase.NoAD) && ADTypes.mode(adtype) isa ADTypes.ReverseMode
24+
soadtype = AutoSparse(DifferentiationInterface.SecondOrder(AutoForwardDiff(), adtype), sparsity_detector = TracerLocalSparsityDetector(), coloring_algorithm = adtype.coloring_algorithm)
25+
end
1926
elseif !(adtype.sparsity_detector isa ADTypes.NoSparsityDetector) && adtype.coloring_algorithm isa ADTypes.NoColoringAlgorithm
2027
adtype = AutoSparse(adtype.dense_ad; sparsity_detector = adtype.sparsity_detector, coloring_algorithm = GreedyColoringAlgorithm())
28+
if !(adtype.dense_ad isa SciMLBase.NoAD) && ADTypes.mode(adtype.dense_ad) isa ADTypes.ForwardMode
29+
soadtype = AutoSparse(DifferentiationInterface.SecondOrder(adtype.dense_ad, AutoReverseDiff()), sparsity_detector = adtype.sparsity_detector, coloring_algorithm = GreedyColoringAlgorithm())
30+
elseif !(adtype isa SciMLBase.NoAD) && ADTypes.mode(adtype) isa ADTypes.ReverseMode
31+
soadtype = AutoSparse(DifferentiationInterface.SecondOrder(AutoForwardDiff(), adtype), sparsity_detector = adtype.sparsity_detector, coloring_algorithm = GreedyColoringAlgorithm())
32+
end
33+
else
34+
if !(adtype.dense_ad isa SciMLBase.NoAD) && ADTypes.mode(adtype.dense_ad) isa ADTypes.ForwardMode
35+
soadtype = AutoSparse(DifferentiationInterface.SecondOrder(adtype.dense_ad, AutoReverseDiff()), sparsity_detector = adtype.sparsity_detector, coloring_algorithm = adtype.coloring_algorithm)
36+
elseif !(adtype isa SciMLBase.NoAD) && ADTypes.mode(adtype) isa ADTypes.ReverseMode
37+
soadtype = AutoSparse(DifferentiationInterface.SecondOrder(AutoForwardDiff(), adtype), sparsity_detector = adtype.sparsity_detector, coloring_algorithm = adtype.coloring_algorithm)
38+
end
2139
end
40+
return adtype,soadtype
41+
end
42+
43+
44+
function OptimizationBase.instantiate_function(f::OptimizationFunction{true}, x, adtype::ADTypes.AutoSparse{<:AbstractADType}, p = SciMLBase.NullParameters(), num_cons = 0)
45+
_f = (θ, args...) -> first(f.f(θ, p, args...))
46+
47+
adtype, soadtype = generate_sparse_adtype(adtype)
2248

2349
if f.grad === nothing
24-
extras_grad = prepare_gradient(_f, adtype, x)
50+
extras_grad = prepare_gradient(_f, adtype.dense_ad, x)
2551
function grad(res, θ)
26-
gradient!(_f, res, adtype, θ, extras_grad)
52+
gradient!(_f, res, adtype.dense_ad, θ, extras_grad)
2753
end
2854
else
2955
grad = (G, θ, args...) -> f.grad(G, θ, p, args...)
@@ -34,7 +60,7 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{true}, x,
3460
if f.hess === nothing
3561
extras_hess = prepare_hessian(_f, soadtype, x) #placeholder logic, can be made much better
3662
function hess(res, θ, args...)
37-
hessian!(_f, res, adtype, θ, extras_hess)
63+
hessian!(_f, res, soadtype, θ, extras_hess)
3864
end
3965
else
4066
hess = (H, θ, args...) -> f.hess(H, θ, p, args...)
@@ -81,7 +107,7 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{true}, x,
81107

82108
function cons_h(H, θ)
83109
for i in 1:num_cons
84-
hessian!(fncs[i], H[i], adtype, θ, extras_cons_hess[i])
110+
hessian!(fncs[i], H[i], soadtype, θ, extras_cons_hess[i])
85111
end
86112
end
87113
else
@@ -104,11 +130,12 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{true}, x,
104130
lag_h, f.lag_hess_prototype)
105131
end
106132

107-
function OptimizationBase.instantiate_function(f::OptimizationFunction{true}, cache::OptimizationBase.ReInitCache, adtype::ADTypes.AbstractADType, num_cons = 0)
133+
function OptimizationBase.instantiate_function(f::OptimizationFunction{true}, cache::OptimizationBase.ReInitCache, adtype::ADTypes.AutoSparse{<:AbstractADType}, num_cons = 0)
108134
x = cache.u0
109135
p = cache.p
110136
_f = (θ, args...) -> first(f.f(θ, p, args...))
111-
soadtype = DifferentiationInterface.SecondOrder(adtype, adtype)
137+
138+
adtype, soadtype = generate_sparse_adtype(adtype)
112139

113140
if f.grad === nothing
114141
extras_grad = prepare_gradient(_f, adtype, x)
@@ -171,7 +198,7 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{true}, ca
171198

172199
function cons_h(H, θ)
173200
for i in 1:num_cons
174-
hessian!(fncs[i], H[i], adtype, θ, extras_cons_hess[i])
201+
hessian!(fncs[i], H[i], soadtype, θ, extras_cons_hess[i])
175202
end
176203
end
177204
else
@@ -195,9 +222,10 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{true}, ca
195222
end
196223

197224

198-
function OptimizationBase.instantiate_function(f::OptimizationFunction{false}, x, adtype::ADTypes.AbstractADType, p = SciMLBase.NullParameters(), num_cons = 0)
225+
function OptimizationBase.instantiate_function(f::OptimizationFunction{false}, x, adtype::ADTypes.AutoSparse{<:AbstractADType}, p = SciMLBase.NullParameters(), num_cons = 0)
199226
_f = (θ, args...) -> first(f.f(θ, p, args...))
200-
soadtype = DifferentiationInterface.SecondOrder(adtype, adtype)
227+
228+
adtype, soadtype = generate_sparse_adtype(adtype)
201229

202230
if f.grad === nothing
203231
extras_grad = prepare_gradient(_f, adtype, x)
@@ -213,7 +241,7 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{false}, x
213241
if f.hess === nothing
214242
extras_hess = prepare_hessian(_f, soadtype, x) #placeholder logic, can be made much better
215243
function hess(θ, args...)
216-
hessian(_f, adtype, θ, extras_hess)
244+
hessian(_f, soadtype, θ, extras_hess)
217245
end
218246
else
219247
hess = (θ, args...) -> f.hess(θ, p, args...)
@@ -261,7 +289,7 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{false}, x
261289

262290
function cons_h(θ)
263291
H = map(1:num_cons) do i
264-
hessian(fncs[i], adtype, θ, extras_cons_hess[i])
292+
hessian(fncs[i], soadtype, θ, extras_cons_hess[i])
265293
end
266294
return H
267295
end
@@ -285,11 +313,12 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{false}, x
285313
lag_h, f.lag_hess_prototype)
286314
end
287315

288-
function OptimizationBase.instantiate_function(f::OptimizationFunction{false}, cache::OptimizationBase.ReInitCache, adtype::ADTypes.AbstractADType, num_cons = 0)
316+
function OptimizationBase.instantiate_function(f::OptimizationFunction{false}, cache::OptimizationBase.ReInitCache, adtype::ADTypes.AutoSparse{<:AbstractADType}, num_cons = 0)
289317
x = cache.u0
290318
p = cache.p
291319
_f = (θ, args...) -> first(f.f(θ, p, args...))
292-
soadtype = DifferentiationInterface.SecondOrder(adtype, adtype)
320+
321+
adtype, soadtype = generate_sparse_adtype(adtype)
293322

294323
if f.grad === nothing
295324
extras_grad = prepare_gradient(_f, adtype, x)
@@ -353,7 +382,7 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{false}, c
353382

354383
function cons_h(θ)
355384
H = map(1:num_cons) do i
356-
hessian(fncs[i], adtype, θ, extras_cons_hess[i])
385+
hessian(fncs[i], soadtype, θ, extras_cons_hess[i])
357386
end
358387
return H
359388
end
@@ -376,5 +405,3 @@ function OptimizationBase.instantiate_function(f::OptimizationFunction{false}, c
376405
cons_hess_colorvec = conshess_colors,
377406
lag_h, f.lag_hess_prototype)
378407
end
379-
380-
end

0 commit comments

Comments
 (0)