Skip to content

Commit b60af6c

Browse files
committed
solvers
1 parent a63e448 commit b60af6c

File tree

10 files changed

+486
-3
lines changed

10 files changed

+486
-3
lines changed

.github/workflows/CI.yml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,3 +10,6 @@ on:
1010
jobs:
1111
call:
1212
uses: control-toolbox/CTActions/.github/workflows/ci.yml@main
13+
with:
14+
runners: '["ubuntu-latest"]'
15+
versions: '["1.12"]'

Project.toml

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,11 +5,23 @@ authors = ["Olivier Cots <olivier.cots@toulouse-inp.fr>"]
55

66
[deps]
77
ADNLPModels = "54578032-b7ea-4c30-94aa-7cbd1cce6c9a"
8+
CommonSolve = "38540f10-b2f7-11e9-35d8-d573e4eb0ff2"
89
ExaModels = "1037b233-b668-4ce9-9b63-f9f681f55dd2"
910
KernelAbstractions = "63c18a36-062a-441e-b654-da1e3ab1ce7c"
11+
MadNLP = "2621e9c9-9eb4-46b1-8089-e8c72242dfb6"
12+
MadNLPMumps = "3b83494e-c0a4-4895-918b-9157a7a085a1"
13+
NLPModels = "a4795742-8479-5a88-8948-cc11e1c8c1a6"
14+
NLPModelsIpopt = "f4238b75-b362-5c4c-b852-0801c9a21d71"
15+
SolverCore = "ff4d7338-4cf1-434d-91df-b86cb86fb843"
1016

1117
[compat]
1218
ADNLPModels = "0.8.13"
19+
CommonSolve = "0.2.4"
1320
ExaModels = "0.9.2"
1421
KernelAbstractions = "0.9.39"
22+
MadNLP = "0.8.12"
23+
MadNLPMumps = "0.5.1"
24+
NLPModels = "0.21.5"
25+
NLPModelsIpopt = "0.11.0"
26+
SolverCore = "0.3.8"
1527
julia = "1.11"

src/CTSolvers.jl

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,14 @@ module CTSolvers
33
using ADNLPModels
44
using ExaModels
55
using KernelAbstractions
6+
using SolverCore
7+
using NLPModels
8+
using NLPModelsIpopt
9+
using MadNLP
10+
using MadNLPMumps
11+
using CommonSolve: CommonSolve, solve
612

713
include("models.jl")
14+
include("solvers.jl")
815

916
end

src/models.jl

Lines changed: 107 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,107 @@
1+
# ------------------------------------------------------------------------------
2+
# Problems definition
3+
struct ADNLPProblem
4+
build_adnlp_model::Function
5+
end
6+
function (prob::ADNLPProblem)(
7+
initial_guess::AbstractVector;
8+
kwargs...
9+
)::ADNLPModels.ADNLPModel
10+
return prob.build_adnlp_model(initial_guess; kwargs...)
11+
end
12+
13+
struct ExaProblem
14+
build_exa_model::Function
15+
end
16+
function (prob::ExaProblem)(
17+
::Type{BaseType},
18+
initial_guess;
19+
kwargs...
20+
)::ExaModels.ExaModel where {BaseType<:AbstractFloat}
21+
return prob.build_exa_model(BaseType, initial_guess; kwargs...)
22+
end
23+
24+
abstract type AbstractOptimizationProblem end
25+
26+
# ------------------------------------------------------------------------------
27+
# Generic model builders with ADNLPModels and ExaModels backends
28+
abstract type AbstractNLPModelBackend end
29+
30+
# ------------------------------------------------------------------------------
31+
# ADNLPModels
32+
struct ADNLPModelBackend <: AbstractNLPModelBackend
33+
# attributes
34+
show_time::Bool
35+
backend::Symbol
36+
empty_backends::NamedTuple
37+
kwargs
38+
39+
# constructor
40+
function ADNLPModelBackend(;
41+
show_time::Bool=false,
42+
backend::Symbol=:optimized,
43+
empty_backends::NamedTuple=(
44+
hprod_backend=ADNLPModels.EmptyADbackend,
45+
jtprod_backend=ADNLPModels.EmptyADbackend,
46+
jprod_backend=ADNLPModels.EmptyADbackend,
47+
ghjvprod_backend=ADNLPModels.EmptyADbackend,
48+
),
49+
kwargs...,
50+
)
51+
return new(show_time, backend, empty_backends, kwargs)
52+
end
53+
end
54+
55+
function build_model(
56+
prob::AbstractOptimizationProblem,
57+
initial_guess::AbstractVector,
58+
modeler::ADNLPModelBackend,
59+
)
60+
61+
# build the backend options
62+
backend_options = if modeler.backend==:manual # we define the AD backend manually with sparsity pattern (OCP)
63+
(
64+
gradient_backend=ADNLPModels.ReverseDiffADGradient,
65+
jacobian_backend=ADNLPModels.SparseADJacobian,
66+
hessian_backend=ADNLPModels.SparseReverseADHessian,
67+
modeler.empty_backends...
68+
)
69+
else
70+
(backend=modeler.backend, modeler.empty_backends...)
71+
end
72+
73+
# build the model
74+
return prob.build_adnlp_model(initial_guess;
75+
show_time=modeler.show_time,
76+
backend_options...,
77+
modeler.kwargs...,
78+
)
79+
end;
80+
81+
# ------------------------------------------------------------------------------
82+
# ExaModels
83+
struct ExaModelBackend{
84+
BackendType<:Union{Nothing, KernelAbstractions.Backend}
85+
} <: AbstractNLPModelBackend
86+
base_type::DataType
87+
backend::BackendType
88+
kwargs
89+
function ExaModelBackend(;
90+
base_type::DataType=Float64,
91+
backend::Union{Nothing, KernelAbstractions.Backend}=nothing,
92+
kwargs...,
93+
)
94+
return new{typeof(backend)}(base_type, backend, kwargs)
95+
end
96+
end
97+
98+
function build_model(
99+
prob::AbstractOptimizationProblem,
100+
initial_guess,
101+
modeler::ExaModelBackend,
102+
)
103+
return prob.build_exa_model(modeler.base_type, initial_guess;
104+
backend=modeler.backend,
105+
modeler.kwargs...,
106+
)
107+
end;

src/solvers.jl

Lines changed: 117 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,117 @@
1+
2+
# ------------------------------------------------------------------------------
3+
# ------------------------------------------------------------------------------
4+
# Solvers on CPU: NLPModelsIpopt, MadNLP, MadNCL
5+
6+
# NLPModelsIpopt
7+
function solve_with_ipopt(
8+
nlp::NLPModels.AbstractNLPModel;
9+
kwargs...,
10+
):: SolverCore.GenericExecutionStats
11+
solver = NLPModelsIpopt.IpoptSolver(nlp)
12+
return NLPModelsIpopt.solve!(solver, nlp; kwargs...)
13+
end
14+
15+
# MadNLP
16+
function solve_with_madnlp(
17+
nlp::NLPModels.AbstractNLPModel;
18+
kwargs...,
19+
):: MadNLP.MadNLPExecutionStats
20+
solver = MadNLP.MadNLPSolver(nlp; kwargs...)
21+
return MadNLP.solve!(solver)
22+
end
23+
24+
# ------------------------------------------------------------------------------
25+
# ------------------------------------------------------------------------------
26+
# Generic solvers
27+
abstract type AbstractNLPSolverBackend end
28+
29+
# ------------------------------------------------------------------------------
30+
# NLPModelsIpopt
31+
struct NLPModelsIpoptBackend <: AbstractNLPSolverBackend
32+
# attributes
33+
max_iter::Int
34+
tol::Float64
35+
print_level::Int
36+
mu_strategy::String
37+
linear_solver::String
38+
sb::String
39+
kwargs
40+
41+
# constructor
42+
function NLPModelsIpoptBackend(;
43+
max_iter::Int=100,
44+
tol::Float64=1e-6,
45+
print_level::Int=5,
46+
mu_strategy::String="adaptive",
47+
linear_solver::String="Mumps",
48+
sb::String="yes",
49+
kwargs...,
50+
)
51+
return new(max_iter, tol, print_level, mu_strategy, linear_solver, sb, kwargs)
52+
end
53+
end
54+
55+
function CommonSolve.solve(
56+
prob::AbstractOptimizationProblem,
57+
initial_guess,
58+
modeler::AbstractNLPModelBackend,
59+
solver::NLPModelsIpoptBackend;
60+
):: SolverCore.GenericExecutionStats
61+
62+
# build the model
63+
nlp = build_model(prob, initial_guess, modeler)
64+
65+
# solve the problem
66+
return solve_with_ipopt(nlp;
67+
max_iter=solver.max_iter,
68+
tol=solver.tol,
69+
print_level=solver.print_level,
70+
mu_strategy=solver.mu_strategy,
71+
linear_solver=solver.linear_solver,
72+
sb=solver.sb,
73+
solver.kwargs...
74+
)
75+
end
76+
77+
# ------------------------------------------------------------------------------
78+
# MadNLP
79+
struct MadNLPBackend <: AbstractNLPSolverBackend
80+
# attributes
81+
max_iter::Int
82+
tol::Float64
83+
print_level::MadNLP.LogLevels
84+
linear_solver::Type{<:MadNLP.AbstractLinearSolver}
85+
kwargs
86+
87+
# constructor
88+
function MadNLPBackend(;
89+
max_iter::Int=100,
90+
tol::Float64=1e-6,
91+
print_level::MadNLP.LogLevels=MadNLP.INFO,
92+
linear_solver::Type{<:MadNLP.AbstractLinearSolver}=MadNLPMumps.MumpsSolver,
93+
kwargs...,
94+
)
95+
return new(max_iter, tol, print_level, linear_solver, kwargs)
96+
end
97+
end
98+
99+
function CommonSolve.solve(
100+
prob::AbstractOptimizationProblem,
101+
initial_guess,
102+
modeler::AbstractNLPModelBackend,
103+
solver::MadNLPBackend;
104+
):: MadNLP.MadNLPExecutionStats
105+
106+
# build the model
107+
nlp = build_model(prob, initial_guess, modeler)
108+
109+
# solve the problem
110+
return solve_with_madnlp(nlp;
111+
max_iter=solver.max_iter,
112+
tol=solver.tol,
113+
print_level=solver.print_level,
114+
linear_solver=solver.linear_solver,
115+
solver.kwargs...
116+
)
117+
end

test/Project.toml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,10 @@
11
[deps]
22
ADNLPModels = "54578032-b7ea-4c30-94aa-7cbd1cce6c9a"
33
Aqua = "4c88cf16-eb10-579e-8560-4a9242c79595"
4+
CommonSolve = "38540f10-b2f7-11e9-35d8-d573e4eb0ff2"
45
ExaModels = "1037b233-b668-4ce9-9b63-f9f681f55dd2"
6+
MadNLP = "2621e9c9-9eb4-46b1-8089-e8c72242dfb6"
7+
MadNLPMumps = "3b83494e-c0a4-4895-918b-9157a7a085a1"
58
NLPModels = "a4795742-8479-5a88-8948-cc11e1c8c1a6"
69
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
710

test/rosenbrock.jl

Lines changed: 57 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
function rosenbrock_objective(x)
2+
return (x[1] - 1.0)^2 + 100*(x[2] - x[1]^2)^2
3+
end
4+
function rosenbrock_constraint(x)
5+
return x[1]
6+
end
7+
function rosenbrock_is_minimize()
8+
return true
9+
end
10+
11+
function Rosenbrock()
12+
# define common functions
13+
F(x) = rosenbrock_objective(x)
14+
c(x) = rosenbrock_constraint(x)
15+
lcon = [-Inf]
16+
ucon = [10.0]
17+
minimize = rosenbrock_is_minimize()
18+
19+
# define ADNLPModels builder
20+
function build_adnlp_model(
21+
initial_guess::AbstractVector;
22+
kwargs...
23+
)::ADNLPModels.ADNLPModel
24+
return ADNLPModels.ADNLPModel(
25+
F,
26+
initial_guess,
27+
c,
28+
lcon,
29+
ucon;
30+
minimize=minimize,
31+
kwargs...
32+
)
33+
end
34+
35+
# define ExaModels builder
36+
function build_exa_model(
37+
::Type{BaseType},
38+
initial_guess::AbstractVector;
39+
kwargs...
40+
)::ExaModels.ExaModel where {BaseType<:AbstractFloat}
41+
m = ExaModels.ExaCore(BaseType; minimize=minimize, kwargs...)
42+
x = ExaModels.variable(m, length(initial_guess); start=initial_guess)
43+
ExaModels.objective(m, F(x))
44+
ExaModels.constraint(m, c(x); lcon=lcon, ucon=ucon)
45+
return ExaModels.ExaModel(m)
46+
end
47+
48+
# return the problem
49+
return OptimizationProblem(
50+
CTSolvers.ADNLPProblem(build_adnlp_model),
51+
CTSolvers.ExaProblem(build_exa_model),
52+
)
53+
end
54+
55+
const rosenbrock_prob = Rosenbrock()
56+
const rosenbrock_init = [-1.2; 1.0]
57+
const rosenbrock_solu = [1.0; 1.0]

test/runtests.jl

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,25 +6,30 @@ using CTSolvers
66
using ADNLPModels
77
using ExaModels
88
using NLPModels
9+
using CommonSolve
10+
using MadNLP
11+
using MadNLPMumps
912

1013
# ------------------------------------------------------------------------------
1114
# Problems definition
1215
struct OptimizationProblem <: CTSolvers.AbstractOptimizationProblem
1316
build_adnlp_model::CTSolvers.ADNLPProblem
1417
build_exa_model::CTSolvers.ExaProblem
1518
end
16-
1719
include("rosenbrock.jl")
1820

1921
# ------------------------------------------------------------------------------
2022
# Tests
2123
const VERBOSE = true
22-
@testset verbose = VERBOSE showtiming = true "CTSolvers tests" begin
24+
const SHOWTIMING = true
25+
26+
@testset verbose=VERBOSE showtiming=SHOWTIMING "CTSolvers tests" begin
2327
for name in (
2428
# :aqua,
2529
:models,
30+
:solvers,
2631
)
27-
@testset "$(name)" begin
32+
@testset "$(name)" verbose=VERBOSE showtiming=SHOWTIMING begin
2833
test_name = Symbol(:test_, name)
2934
include("$(test_name).jl")
3035
@eval $test_name()

0 commit comments

Comments
 (0)