Skip to content

Commit b6d5011

Browse files
committed
other batch_types added
1 parent 59a7c72 commit b6d5011

File tree

1 file changed

+17
-7
lines changed

1 file changed

+17
-7
lines changed

src/hiopbbpy/opt/boalgorithm.py

Lines changed: 17 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -91,8 +91,6 @@ def __init__(self, prob:Problem, gpsurrogate:GaussianProcess, xtrain, ytrain,
9191
batch_size = options.get('batch_size', 1)
9292
assert isinstance(batch_size, int), f"batch_size {batch_size} not an integer"
9393
assert batch_size > 0, f"batch_size {batch_size} is not strictly positive"
94-
assert ((batch_size == 1 and acquisition_type == "LCB") or (acquisition_type == "EI")), \
95-
f"batched BO only supported for expected-improvement"
9694
self.setAcquisitionType(acquisition_type, batch_size)
9795

9896
self.evaluator = options.get('evaluator', self.evaluator)
@@ -158,11 +156,20 @@ def _find_best_point(self, x_train, y_train, x0 = None):
158156
return best_xopt
159157

160158
def _get_virtual_point(self, x):
161-
# Kriging-Believer
162-
if self.batch_type == "KB":
163-
return self.gpsurrogate.mean(x)
164-
else:
159+
if self.batch_type not in ["CLmin", "KB", "KBUB", "KBLB", "KBRand"]:
165160
raise NotImplementedError("No implemented batch_type associated to"+self.batch_type)
161+
# constant-liar, Kriging-believer and Kriging-believer variants
162+
if self.batch_type == "CLmin":
163+
return min(self.gpsurrogate.training_y)
164+
elif self.batch_type == "KB":
165+
beta = 0.
166+
elif self.batch_type == "KBUB":
167+
beta = 3.0
168+
elif self.batch_type == "KBLB":
169+
beta = -3.0
170+
elif self.batch_type == "KBRand":
171+
beta = np.random.randn()
172+
return self.gpsurrogate.mean(x) + beta * np.sqrt(self.gpsurrogate.variance(x))
166173

167174
# Set the optimization method
168175
def set_method(self, method):
@@ -208,8 +215,11 @@ def optimize(self):
208215
for batch in range(1, self.batch_size+1):
209216
self.x_hist.append(x_train[-batch].flatten())
210217
self.y_hist.append(y_train[-batch].flatten())
218+
if self.batch_size == 1:
219+
print(f"Sample point X: {x_train[-self.batch_size:]}, Observation Y: {y_new}")
220+
else:
221+
print(f"Sample points X: {x_train[-self.batch_size:]}, Observations Y: {y_new}")
211222

212-
print(f"Sampled point X: {x_train[-self.batch_size:]}, Observation Y: {y_new}")
213223

214224
# Save the optimal results and all the training data
215225
self.idx_opt = np.argmin(self.y_hist)

0 commit comments

Comments
 (0)