Skip to content

Commit 02b105e

Browse files
authored
Add support to use ipopt to solve constrained problem (#722)
* add support to use ipopt to solve constrainted problem * fix CI redisgn IpoptProb * add authors * move constraints into Problem class * update CI
1 parent f2c38f7 commit 02b105e

File tree

13 files changed

+418
-75
lines changed

13 files changed

+418
-75
lines changed

.github/workflows/test_hiopbbpy.yml

Lines changed: 26 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -14,19 +14,39 @@ jobs:
1414
uses: actions/checkout@v4
1515

1616
- name: Set up Python
17-
uses: actions/setup-python@v4 # Set up Python environment
17+
uses: actions/setup-python@v4
1818
with:
19-
python-version: '3.13'
19+
python-version: "3.13"
2020

21+
- name: Set up Conda
22+
uses: conda-incubator/setup-miniconda@v2
23+
with:
24+
auto-activate-base: true
25+
channels: conda-forge
26+
27+
- name: Install IPOPT via conda
28+
run: |
29+
conda init bash
30+
conda install -c conda-forge cyipopt pkg-config
31+
export LD_LIBRARY_PATH=$CONDA_PREFIX/lib:$LD_LIBRARY_PATH
2132
22-
- name: Install HiOpBBPy and its dependencies
33+
- name: Verify cyipopt installation
2334
run: |
35+
source ~/.bashrc
36+
conda activate base
37+
python -c "import cyipopt; print('cyipopt OK')"
38+
39+
- name: Install HiOpBBPy
40+
run: |
41+
source ~/.bashrc
42+
conda activate base
43+
export SKIP_CYIPOPT=1
2444
python -m pip install --upgrade pip
2545
pip install .
2646
2747
- name: Run Tests
2848
run: |
29-
python src/Drivers/hiopbbpy/BODriver.py 10
49+
source ~/.bashrc
50+
conda activate base
51+
python src/Drivers/hiopbbpy/BODriverCI.py 10
3052
continue-on-error: false
31-
32-

setup.py

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,20 +6,26 @@
66
'''
77

88
import sys
9+
import os
910
import numpy as np
1011
from setuptools import setup, find_packages
1112

1213

14+
install_requires = ["smt"]
15+
16+
if os.getenv("SKIP_CYIPOPT", "0") != "1":
17+
install_requires.append("cyipopt")
18+
1319
metadata = dict(
1420
name="hiopbbpy",
15-
version="0.0.3",
21+
version="0.0.4",
1622
description="HiOp black box optimization (hiopbbpy)",
1723
author="Tucker hartland et al.",
1824
author_email="hartland1@llnl.gov",
1925
license="BSD-3",
2026
packages=find_packages(where="src"),
2127
package_dir={"": "src"},
22-
install_requires=["smt"],
28+
install_requires=install_requires,
2329
python_requires=">=3.9",
2430
zip_safe=False,
2531
url="https://github.com/LLNL/hiop",
Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -70,6 +70,15 @@
7070
min_obj[prob_type][acq_type] = np.inf
7171
y_opt[prob_type][acq_type] = np.zeros(num_repeat)
7272

73+
options = {
74+
'acquisition_type': acq_type,
75+
'bo_maxiter': 20,
76+
'opt_solver': 'SLSQP', #"SLSQP" "IPOPT"
77+
'solver_options': {
78+
'maxiter': 200
79+
}
80+
}
81+
7382
print("Problem name: ", problem.name)
7483
print("Acquisition type: ", acq_type)
7584

@@ -84,8 +93,8 @@
8493
gp_model.train(x_train, y_train)
8594

8695
# Instantiate and run Bayesian Optimization
87-
bo = BOAlgorithm(gp_model, x_train, y_train, acquisition_type = acq_type) #EI or LCB
88-
bo.optimize(problem)
96+
bo = BOAlgorithm(problem, gp_model, x_train, y_train, options = options)
97+
bo.optimize()
8998

9099
# Retrieve optimal objec
91100
y_opt[prob_type][acq_type][n_repeat] = bo.getOptimalObjective()

src/Drivers/hiopbbpy/BODriverEX.py

Lines changed: 95 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,95 @@
1+
"""
2+
Code description:
3+
for a 2D example LpNormProblem
4+
1) randomly sample training points
5+
2) define a Kriging-based Gaussian-process (smt backend)
6+
trained on said data
7+
3) determine the minimizer via BOAlgorithm
8+
9+
Authors: Tucker Hartland <hartland1@llnl.gov>
10+
Nai-Yuan Chiang <chiang7@llnl.gov>
11+
"""
12+
13+
import sys
14+
import os
15+
import numpy as np
16+
import matplotlib.pyplot as plt
17+
import warnings
18+
warnings.filterwarnings("ignore")
19+
from LpNormProblem import LpNormProblem
20+
from hiopbbpy.surrogate_modeling import smtKRG
21+
from hiopbbpy.opt import BOAlgorithm
22+
from hiopbbpy.problems import BraninProblem
23+
24+
25+
# Get user input for the number of repetitions from command-line arguments
26+
if len(sys.argv) != 2 or int(sys.argv[1]) < 0:
27+
num_repeat = 1
28+
else:
29+
num_repeat = int(sys.argv[1])
30+
31+
### parameters
32+
n_samples = 5 # number of the initial samples to train GP
33+
theta = 1.e-2 # hyperparameter for GP kernel
34+
nx = 2 # dimension of the problem
35+
xlimits = np.array([[-5, 5], [-5, 5]]) # bounds on optimization variable
36+
37+
prob_type_l = ["LpNorm"] # ["LpNorm", "Branin"]
38+
acq_type_l = ["LCB"] # ["LCB", "EI"]
39+
40+
def con_eq(x):
41+
return x[0] + x[1] - 4
42+
43+
def con_jac_eq(x):
44+
return np.array([1.0, 1.0])
45+
46+
def con_ineq(x):
47+
return x[0] - x[1]
48+
49+
def con_jac_ineq(x):
50+
return np.array([1.0, -1.0])
51+
52+
# only 'trust-constr' method supports vector-valued constraints
53+
user_constraint = [{'type': 'ineq', 'fun': con_ineq, 'jac': con_jac_ineq},
54+
{'type': 'eq', 'fun': con_eq, 'jac': con_jac_eq}]
55+
56+
retval = 0
57+
for prob_type in prob_type_l:
58+
print()
59+
if prob_type == "LpNorm":
60+
problem = LpNormProblem(nx, xlimits, constraints=None)
61+
else:
62+
problem = BraninProblem(constraints=None)
63+
problem.set_constraints(user_constraint)
64+
65+
for acq_type in acq_type_l:
66+
print("Problem name: ", problem.name)
67+
print("Acquisition type: ", acq_type)
68+
69+
### initial training set
70+
x_train = problem.sample(n_samples)
71+
y_train = problem.evaluate(x_train)
72+
73+
### Define the GP surrogate model
74+
gp_model = smtKRG(theta, xlimits, nx)
75+
gp_model.train(x_train, y_train)
76+
77+
options = {
78+
'acquisition_type': acq_type,
79+
'bo_maxiter': 10,
80+
'opt_solver': 'IPOPT', #"SLSQP" "IPOPT"
81+
'solver_options': {
82+
'max_iter': 200,
83+
'print_level': 1
84+
}
85+
}
86+
87+
# Instantiate and run Bayesian Optimization
88+
bo = BOAlgorithm(problem, gp_model, x_train, y_train, options = options) #EI or LCB
89+
bo.optimize()
90+
91+
sys.exit(retval)
92+
93+
94+
95+

src/Drivers/hiopbbpy/LpNormProblem.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,9 +8,9 @@
88
from hiopbbpy.problems.problem import Problem
99

1010
class LpNormProblem(Problem):
11-
def __init__(self, ndim, xlimits, p=2.0):
11+
def __init__(self, ndim, xlimits, p=2.0, constraints=None):
1212
name = "LpNormProblem"
13-
super().__init__(ndim, xlimits, name=name)
13+
super().__init__(ndim, xlimits, name=name, constraints=constraints)
1414
self.p = p
1515

1616
def _evaluate(self, x):

src/hiopbbpy/opt/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,12 @@
11
from .boalgorithm import (BOAlgorithmBase, BOAlgorithm)
22
from .acquisition import (acquisition, LCBacquisition, EIacquisition)
3+
from .optproblem import (IpoptProb)
34

45
__all__ = [
56
"BOAlgorithmBase"
67
"BOAlgorithm"
78
"acquisition"
89
"LCBacquisition"
910
"EIacquisition"
11+
"IpoptProb"
1012
]

src/hiopbbpy/opt/acquisition.py

Lines changed: 47 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -14,28 +14,41 @@ class acquisition(object):
1414
def __init__(self, gpsurrogate):
1515
assert isinstance(gpsurrogate, GaussianProcess) # add something here
1616
self.gpsurrogate = gpsurrogate
17+
self.has_gradient = False
1718

1819
# Abstract method to evaluate the acquisition function at x.
1920
def evaluate(self, x: np.ndarray) -> np.ndarray:
2021
raise NotImplementedError("Child class of acquisition should implement method evaluate")
2122

23+
# Abstract method to evaluate the gradient of acquisition function at x.
24+
def eval_g(self, x: np.ndarray) -> np.ndarray:
25+
raise NotImplementedError("Child class of acquisition should implement method evaluate")
2226

2327
# A subclass of acquisition, implementing the Lower Confidence Bound (LCB) acquisition function.
2428
class LCBacquisition(acquisition):
2529
def __init__(self, gpsurrogate, beta=3.0):
2630
super().__init__(gpsurrogate)
2731
self.beta = beta
32+
self.has_gradient = True
2833

2934
# Method to evaluate the acquisition function at x.
3035
def evaluate(self, x : np.ndarray) -> np.ndarray:
3136
mu = self.gpsurrogate.mean(x)
32-
sig = self.gpsurrogate.variance(x)
33-
return mu - self.beta * np.sqrt(sig)
37+
sig2 = self.gpsurrogate.variance(x)
38+
return mu - self.beta * np.sqrt(sig2)
39+
40+
def eval_g(self, x: np.ndarray) -> np.ndarray:
41+
mu = self.gpsurrogate.mean(x)
42+
sig2 = self.gpsurrogate.variance(x)
43+
dsig2_dx = self.gpsurrogate.variance_gradient(x)
44+
dmu_dx = self.gpsurrogate.mean_gradient(x)
45+
return dmu_dx - 0.5 * self.beta * dsig2_dx / np.sqrt(sig2)
3446

3547
# A subclass of acquisition, implementing the Expected improvement (EI) acquisition function.
3648
class EIacquisition(acquisition):
3749
def __init__(self, gpsurrogate):
3850
super().__init__(gpsurrogate)
51+
self.has_gradient = True
3952

4053
# Method to evaluate the acquisition function at x.
4154
def evaluate(self, x : np.ndarray) -> np.ndarray:
@@ -47,12 +60,41 @@ def evaluate(self, x : np.ndarray) -> np.ndarray:
4760

4861
retval = []
4962
if sig.size == 1 and np.abs(sig) > 1e-12:
50-
arg0 = (y_min - pred) / sig
51-
retval = (y_min - pred) * norm.cdf(arg0) + sig * norm.pdf(arg0)
63+
z = (y_min - pred) / sig
64+
retval = (y_min - pred) * norm.cdf(z) + sig * norm.pdf(z)
5265
retval *= -1.
5366
elif sig.size == 1 and np.abs(sig) <= 1e-12:
5467
retval = 0.0
5568
elif sig.size > 1:
56-
NotImplementedError("TODO --- Not implemented yet!")
69+
raise NotImplementedError("TODO --- Not implemented yet!")
5770

5871
return retval
72+
73+
def eval_g(self, x: np.ndarray) -> np.ndarray:
74+
y_data = self.gpsurrogate.training_y
75+
y_min = y_data[np.argmin(y_data[:, 0])]
76+
77+
mean = self.gpsurrogate.mean(x)
78+
sig2 = self.gpsurrogate.variance(x)
79+
sig = np.sqrt(sig2)
80+
81+
grad_EI = None
82+
if sig.size == 1 and np.abs(sig) > 1e-12:
83+
dmean_dx = self.gpsurrogate.mean_gradient(x)
84+
dsig2_dx = self.gpsurrogate.variance_gradient(x)
85+
dsig_dx = 0.5 * dsig2_dx / sig
86+
87+
z = (y_min - mean) / sig
88+
ncdf = norm.cdf(z)
89+
npdf = norm.pdf(z)
90+
EI = (y_min - mean) * ncdf + sig * npdf
91+
92+
dz_dx = -dmean_dx / sig - (y_min - mean) * dsig_dx / sig**2
93+
grad_EI = -dmean_dx * ncdf + dsig_dx * npdf
94+
grad_EI *= -1.
95+
elif sig.size == 1 and np.abs(sig) <= 1e-12:
96+
grad_EI = 0.0
97+
elif sig.size > 1:
98+
raise NotImplementedError("TODO --- Not implemented yet!")
99+
100+
return grad_EI

0 commit comments

Comments
 (0)