@@ -2,30 +2,52 @@ using Enzyme
22using ManualNLPModels
33using MadNLP
44
5- function Control_Core (md:: model , femmodel:: FemModel )
5+ function Control_Core (md:: model , femmodel:: FemModel , solutionstring :: Symbol ) # {{{
66 # independent variable
77 α = md. inversion. independent
88 # initialize derivative as 0
9- ∂J_∂α = zero (α)
9+ ∂J_∂α = make_zero (α)
1010 if md. inversion. onlygrad
1111 # only compute the gradient
12- ComputeGradient (∂J_∂α, α, femmodel)
12+ ComputeGradient! (∂J_∂α, α, femmodel)
1313 # Put gradient in results
1414 InputUpdateFromVectorx (femmodel, ∂J_∂α, GradientEnum, VertexSIdEnum)
1515 RequestedOutputsx (femmodel, [GradientEnum])
1616 else
1717 # optimization
18- # use user defined grad, errors!
19- # optprob = OptimizationFunction(costfunction, Optimization.AutoEnzyme())
20- # prob = Optimization.OptimizationProblem(optprob, α, femmodel, lb=md.inversion.min_parameters, ub=md.inversion.max_parameters)
21- # sol = Optimization.solve(prob, Optim.LBFGS())
18+ # define cost function and gradient
19+ # need to build connection between md and x
20+ f (x) = begin
21+ fem= DJUICE. ModelProcessor (md, solutionstring)
22+ DJUICE. CostFunction (x, fem)
23+ end
24+
25+ g! (gx, x) = begin
26+ fem= DJUICE. ModelProcessor (md, solutionstring)
27+ DJUICE. ComputeGradient! (gx, x, fem)
28+ end
29+ nlp = NLPModel (
30+ α,
31+ f,
32+ grad = g!,
33+ lvar = md. inversion. min_parameters,
34+ uvar = md. inversion. max_parameters,
35+ )
36+
37+ results_qn = madnlp (
38+ nlp;
39+ linear_solver= LapackCPUSolver,
40+ hessian_approximation= MadNLP. CompactLBFGS,
41+ tol= md. inversion. tol,
42+ max_iter= md. inversion. maxiter,
43+ )
44+
2245 independent_enum = StringToEnum (md. inversion. independent_string)
23- InputUpdateFromVectorx (femmodel, sol . u , independent_enum, VertexSIdEnum)
46+ InputUpdateFromVectorx (femmodel, results_qn . solution , independent_enum, VertexSIdEnum)
2447 RequestedOutputsx (femmodel, [independent_enum])
2548 end
2649end # }}}
27-
28- function ComputeGradient (∂J_∂α:: Vector{Float64} , α:: Vector{Float64} , femmodel:: FemModel ) # {{{
50+ function ComputeGradient! (∂J_∂α:: Vector{Float64} , α:: Vector{Float64} , femmodel:: FemModel ) # {{{
2951 # zero ALL depth of the model, make sure we get correct gradient
3052 dfemmodel = make_zero (Base. Core. Typeof (femmodel), IdDict (), femmodel)
3153 # zero the gradient
@@ -54,6 +76,7 @@ function CostFunction(α::Vector{Float64}, femmodel::FemModel) #{{{
5476 # compute cost function
5577 # TODO : loop through all controls with respect to all the components in the cost function
5678 solutionstring = FindParam (Symbol, femmodel. parameters, SolutionTypeEnum)
79+ # return J
5780 CostFunctionx (femmodel, α, controlvar_enum, VertexSIdEnum, cost_enum_list, Val (solutionstring))
5881end # }}}
5982function CostFunctionx (femmodel:: FemModel , α:: Vector{Float64} , controlvar_enum:: IssmEnum , SId_enum:: IssmEnum , cost_enum_list:: Vector{IssmEnum} , :: Val{solutionstring} ) where solutionstring # {{{
0 commit comments