Skip to content

Commit 3d53d56

Browse files
committed
redisgn IpoptProb
1 parent 4c44a68 commit 3d53d56

File tree

4 files changed

+57
-23
lines changed

4 files changed

+57
-23
lines changed

src/Drivers/hiopbbpy/BODriverEX.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,8 @@ def con_jac_ineq(x):
7878
'bo_maxiter': 10,
7979
'opt_solver': 'IPOPT', #"SLSQP" "IPOPT"
8080
'solver_options': {
81-
'maxiter': 200
81+
'max_iter': 200,
82+
'print_level': 1
8283
}
8384
}
8485

src/hiopbbpy/opt/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
11
from .boalgorithm import (BOAlgorithmBase, BOAlgorithm)
22
from .acquisition import (acquisition, LCBacquisition, EIacquisition)
3-
from .optproblem import (IpoptProbFromScipy)
3+
from .optproblem import (IpoptProb)
44

55
__all__ = [
66
"BOAlgorithmBase"
77
"BOAlgorithm"
88
"acquisition"
99
"LCBacquisition"
1010
"EIacquisition"
11-
"IpoptProbFromScipy"
11+
"IpoptProb"
1212
]

src/hiopbbpy/opt/boalgorithm.py

Lines changed: 28 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -9,11 +9,11 @@
99
from numpy.random import uniform
1010
from scipy.optimize import minimize
1111
from scipy.stats import qmc
12-
from cyipopt import Problem as IpoptProb
12+
import warnings
1313
from ..surrogate_modeling.gp import GaussianProcess
1414
from .acquisition import LCBacquisition, EIacquisition
1515
from ..problems.problem import Problem
16-
from .optproblem import IpoptProbFromScipy
16+
from .optproblem import IpoptProb
1717

1818
# A base class defining a general framework for Bayesian Optimization
1919
class BOAlgorithmBase:
@@ -83,9 +83,9 @@ def __init__(self, gpsurrogate, xtrain, ytrain,
8383
assert self.bo_maxiter > 0, f"Invalid bo_maxiter: {self.bo_maxiter }"
8484

8585
if options and 'solver_options' in options:
86-
self.options = options['solver_options']
86+
self.solver_options = options['solver_options']
8787
else:
88-
self.options = {"maxiter": 200}
88+
self.solver_options = {"maxiter": 200}
8989

9090
if options and 'acquisition_type' in options:
9191
acquisition_type = options['acquisition_type']
@@ -110,7 +110,7 @@ def __init__(self, gpsurrogate, xtrain, ytrain,
110110

111111
# Method to set up a callback function to minimize the acquisition function
112112
def _setup_acqf_minimizer_callback(self):
113-
self.acqf_minimizer_callback = lambda fun, x0: pyminimize(fun, x0, self.opt_solver, self.bounds, self.constraints, self.options)
113+
self.acqf_minimizer_callback = lambda fun, x0: minimizer(fun, x0, self.opt_solver, self.bounds, self.constraints, self.solver_options)
114114

115115
# Method to train the GP model
116116
def _train_surrogate(self, x_train, y_train):
@@ -143,11 +143,14 @@ def _find_best_point(self, x_train, y_train, x0 = None):
143143
else:
144144
x0 = np.array([uniform(b[0], b[1]) for b in self.bounds])
145145
xopt, yout, success = self.acqf_minimizer_callback(acqf_callback, x0)
146-
146+
147147
if success:
148148
x_all.append(xopt)
149149
y_all.append(yout)
150-
150+
151+
if not x_all:
152+
raise RuntimeError("Optimization failed for all initial points — no solution found.")
153+
151154
best_xopt = x_all[np.argmin(np.array(y_all))]
152155

153156
return best_xopt
@@ -156,9 +159,9 @@ def _find_best_point(self, x_train, y_train, x0 = None):
156159
def set_method(self, method):
157160
self.opt_solver = method
158161

159-
# Set the user options for the Bayesian optimization
160-
def set_options(self, options):
161-
self.options = options
162+
# Set the options for the internal optimization solver
163+
def set_options(self, solver_options):
164+
self.solver_options = solver_options
162165

163166
# Method to perform Bayesian optimization
164167
def optimize(self, prob:Problem):
@@ -211,24 +214,31 @@ def optimize(self, prob:Problem):
211214
print()
212215

213216
# Find the minimum of the input objective `fun`, using the minimize function from SciPy.
214-
def pyminimize(fun, x0, method, bounds, constraints, options):
217+
def minimizer(fun, x0, method, bounds, constraints, solver_options):
215218
if method != "IPOPT":
216219
if 'grad' in fun:
217220
y = minimize(fun['obj'], x0, method=method, bounds=bounds, jac=fun['grad'], constraints=constraints, options=options)
218221
else:
219-
y = minimize(fun['obj'], x0, method=method, bounds=bounds, constraints=constraints, options=options)
222+
y = minimize(fun['obj'], x0, method=method, bounds=bounds, constraints=constraints, options=solver_options)
220223
success = y.success
221224
if not success:
222225
print(y.message)
223226
xopt = y.x
224227
yopt = y.fun
225228
else:
226-
ipopt_prob = IpoptProbFromScipy(fun['obj'], fun['grad'], constraints, bounds)
227-
nlp = IpoptProb(n=ipopt_prob.nvar, m=ipopt_prob.ncon, problem_obj=ipopt_prob, lb=ipopt_prob.xl, ub=ipopt_prob.xu, cl=ipopt_prob.cl, cu=ipopt_prob.cu)
229+
ipopt_prob = IpoptProb(fun['obj'], fun['grad'], constraints, bounds, solver_options)
230+
sol, info = ipopt_prob.solve(x0)
231+
232+
status = info.get('status', -1)
233+
msg = info.get('status_msg', -1)
234+
if status == 0:
235+
# ipopt returns 0 as success
236+
success = True
237+
else:
238+
warnings.warn(f"Ipopt failed to solve the problem. Status msg: {msg}")
239+
success = False
228240

229-
sol = nlp.solve(x0)
230-
success = not sol[1]['status'] # ipopt returns 0 as success
231-
yopt = sol[1]['obj_val']
232-
xopt = sol[0]
241+
yopt = info['obj_val']
242+
xopt = sol
233243

234244
return xopt, yopt, success

src/hiopbbpy/opt/optproblem.py

Lines changed: 25 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import numpy as np
2+
import cyipopt
23

34
"""
45
Convert a Scipy optimization problem to an Ipopt problem.
@@ -12,8 +13,8 @@
1213
Returns:
1314
Ipopt-compatible prob and bounds
1415
"""
15-
class IpoptProbFromScipy:
16-
def __init__(self, objective, gradient, constraints_list, xbounds):
16+
class IpoptProb:
17+
def __init__(self, objective, gradient, constraints_list, xbounds, solver_options=None):
1718
self.constraints_list = constraints_list
1819
self.eval_f = objective
1920
self.eval_g = gradient
@@ -23,6 +24,7 @@ def __init__(self, objective, gradient, constraints_list, xbounds):
2324
self.cu = []
2425
self.nvar = len(xbounds)
2526
self.ncon = len(self.constraints_list)
27+
self.ipopt_options = solver_options
2628

2729
for con in constraints_list:
2830
if con['type'] == 'eq':
@@ -34,6 +36,16 @@ def __init__(self, objective, gradient, constraints_list, xbounds):
3436
else:
3537
raise ValueError(f"Unknown constraint type: {con['type']}")
3638

39+
self.nlp = cyipopt.Problem(
40+
n=self.nvar,
41+
m=self.ncon,
42+
problem_obj=self,
43+
lb=self.xl,
44+
ub=self.xu,
45+
cl=self.cl,
46+
cu=self.xu
47+
)
48+
3749
def objective(self, x):
3850
return self.eval_f(x)
3951

@@ -51,3 +63,14 @@ def jacobian(self, x):
5163
else:
5264
raise ValueError("Jacobian not provided for constraint.")
5365
return np.vstack(jacs)
66+
67+
def solve(self, x0, solver_options=None):
68+
ipopt_options = self.ipopt_options
69+
if solver_options is not None:
70+
ipopt_options = solver_options
71+
if ipopt_options is not None:
72+
for key, value in ipopt_options.items():
73+
self.nlp.add_option(key, value)
74+
75+
# Solve the optimization problem
76+
return self.nlp.solve(x0)

0 commit comments

Comments
 (0)