Skip to content

Commit bdc953c

Browse files
committed
add experimental API CARBOSampler.reevaluate_trials to re-evaluate acquisition function values using the latest GP model
1 parent 4a38523 commit bdc953c

File tree

2 files changed

+120
-2
lines changed

2 files changed

+120
-2
lines changed

package/samplers/carbo/_optim.py

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -99,3 +99,28 @@ def suggest_by_carbo(
9999
assert isinstance(robust_x_local, np.ndarray)
100100
bounds = _create_bounds(robust_x_local, local_radius)
101101
return robust_x_local, *_gradient_descent(lcb_acqf, robust_x_local, bounds, tol=tol)
102+
103+
104+
def evaluate_by_carbo(
105+
*,
106+
robust_params: np.ndarray,
107+
gpr: GPRegressor,
108+
constraints_gpr_list: list[GPRegressor] | None,
109+
constraints_threshold_list: list[float] | None,
110+
rng: np.random.RandomState | None,
111+
rho: float,
112+
beta: float,
113+
n_local_search: int,
114+
local_radius: float,
115+
tol: float = 1e-4,
116+
) -> tuple[np.ndarray, float]:
117+
dim = len(gpr.length_scales)
118+
lcb_acqf = CombinedLCB(
119+
gpr=gpr,
120+
constraints_gpr_list=constraints_gpr_list,
121+
constraints_threshold_list=constraints_threshold_list,
122+
rho=rho,
123+
beta=beta,
124+
)
125+
bounds = _create_bounds(robust_params, local_radius)
126+
return _gradient_descent(lcb_acqf, robust_params, bounds, tol=tol)

package/samplers/carbo/sampler.py

Lines changed: 95 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
from __future__ import annotations
22

3-
from typing import Any
3+
from typing import Any, TypedDict
44
from typing import TYPE_CHECKING
55

66
import numpy as np
@@ -17,7 +17,7 @@
1717
from ._gp import convert_inf
1818
from ._gp import GPRegressor
1919
from ._gp import KernelParamsTensor
20-
from ._optim import suggest_by_carbo
20+
from ._optim import evaluate_by_carbo, suggest_by_carbo
2121

2222

2323
if TYPE_CHECKING:
@@ -34,6 +34,15 @@
3434
_ROBUST_PARAMS_KEY = "robust_params"
3535

3636

37+
class EvaluationResult(TypedDict):
38+
trial: FrozenTrial
39+
# robust parameters can also be obtained from sampler.get_robust_params_from_trial(self["trial"]),
40+
# but please note that it raises exception when the attribute is not defined.
41+
robust_params: dict[str, Any]
42+
worst_robust_params: dict[str, Any]
43+
worst_robust_acqf_val: float
44+
45+
3746
def _standardize_values(values: np.ndarray) -> tuple[np.ndarray, np.ndarray, np.ndarray]:
3847
clipped_values = convert_inf(values)
3948
means = np.mean(clipped_values, axis=0)
@@ -285,6 +294,90 @@ def after_trial(
285294
_process_constraints_after_trial(self._constraints_func, study, trial, state)
286295
self._independent_sampler.after_trial(study, trial, state, values)
287296

297+
def reevaluate_trials(
298+
self,
299+
study: Study,
300+
) -> Sequence[EvaluationResult]:
301+
if study._is_multi_objective():
302+
raise ValueError("CARBOSampler does not support multi-objective optimization.")
303+
304+
trials = study._get_trials(deepcopy=False, states=(TrialState.COMPLETE,), use_cache=True)
305+
search_space = self.infer_relative_search_space(study, trials[0])
306+
if search_space == {}:
307+
return []
308+
309+
X_train, y_train = self._preproc(study, trials, search_space)
310+
gpr = GPRegressor(
311+
X_train, y_train, kernel_params=self._kernel_params_cache
312+
).fit_kernel_params(self._log_prior, self._minimum_noise, self._deterministic)
313+
self._kernel_params_cache = gpr.kernel_params.clone()
314+
constraint_vals = (
315+
None if self._constraints_func is None else _get_constraint_vals(study, trials)
316+
)
317+
if constraint_vals is None:
318+
constraints_gpr_list = None
319+
constraints_threshold_list = None
320+
else:
321+
_cache_list = (
322+
self._constraints_kernel_params_cache_list
323+
if self._constraints_kernel_params_cache_list is not None
324+
else [None] * constraint_vals.shape[-1] # type: ignore[list-item]
325+
)
326+
stded_c_vals, means, stdevs = _standardize_values(-constraint_vals)
327+
constraints_threshold_list = (-means / np.maximum(EPS, stdevs)).tolist()
328+
C_train = torch.from_numpy(stded_c_vals)
329+
constraints_gpr_list = [
330+
GPRegressor(X_train, c_train, kernel_params=cache).fit_kernel_params(
331+
self._log_prior, self._minimum_noise, self._deterministic
332+
)
333+
for cache, c_train in zip(_cache_list, C_train.T)
334+
]
335+
336+
lows, highs, is_log = _get_dist_info_as_arrays(search_space)
337+
338+
robust_params = np.empty((len(trials), len(search_space)), dtype=float)
339+
for i, t in enumerate(trials):
340+
for d, (name, dist) in enumerate(search_space.items()):
341+
if _ROBUST_PARAMS_KEY in t.system_attrs:
342+
robust_params[i, d] = t.system_attrs[_ROBUST_PARAMS_KEY][name]
343+
else:
344+
robust_params[i, d] = t.params[name]
345+
robust_params = normalize_params(robust_params, is_log, lows, highs)
346+
347+
results = []
348+
349+
for i, trial in enumerate(trials):
350+
worst_robust_params, worst_robust_acqf_val = evaluate_by_carbo(
351+
robust_params=robust_params[i],
352+
gpr=gpr,
353+
constraints_gpr_list=constraints_gpr_list,
354+
constraints_threshold_list=constraints_threshold_list,
355+
rng=self._rng.rng,
356+
rho=self._rho,
357+
beta=self._beta,
358+
n_local_search=self._n_local_search,
359+
local_radius=self._local_ratio / 2,
360+
)
361+
362+
result: EvaluationResult = {
363+
"trial": trial,
364+
"robust_params": trial.system_attrs[_ROBUST_PARAMS_KEY]
365+
if _ROBUST_PARAMS_KEY in trial.system_attrs
366+
else trial.params,
367+
"worst_robust_params": {
368+
name: float(param_value)
369+
for name, param_value in zip(
370+
search_space,
371+
unnormalize_params(worst_robust_params[None], is_log, lows, highs)[0],
372+
)
373+
},
374+
"worst_robust_acqf_val": worst_robust_acqf_val,
375+
}
376+
377+
results.append(result)
378+
379+
return results
380+
288381

289382
def _get_constraint_vals(study: Study, trials: list[FrozenTrial]) -> np.ndarray:
290383
_constraint_vals = [

0 commit comments

Comments
 (0)