Skip to content

Commit 37bb5c1

Browse files
authored
Avoid distances computation when using SGP (#497)
1 parent 620ecfb commit 37bb5c1

1 file changed

Lines changed: 21 additions & 14 deletions

File tree

smt/surrogate_models/krg_based.py

Lines changed: 21 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -371,6 +371,7 @@ def _new_train(self):
371371
self.is_acting_train = is_acting
372372
self._corr_params = None
373373
_, self.cat_features = compute_X_cont(self.X_train, self.design_space)
374+
D = None # For SGP, D is not computed at all
374375
if not (self.is_continuous):
375376
D, self.ij, X = gower_componentwise_distances(
376377
X=X,
@@ -451,14 +452,16 @@ def _new_train(self):
451452
self.optimal_noise[i] = np.std(diff, ddof=1) ** 2
452453
self.optimal_noise = self.optimal_noise / nt_reps
453454
self.y_norma = y_norma_unique
454-
if self.is_continuous:
455-
# Calculate matrix of distances D between samples
456-
D, self.ij = cross_distances(self.X_norma)
457455

458-
if np.min(np.sum(np.abs(D), axis=1)) == 0.0:
459-
warnings.warn(
460-
"Warning: multiple x input features have the same value (at least same row twice)."
461-
)
456+
if self.name not in ["SGP"]:
457+
if self.is_continuous:
458+
# Calculate matrix of distances D between samples
459+
D, self.ij = cross_distances(self.X_norma)
460+
461+
if np.min(np.sum(np.abs(D), axis=1)) == 0.0:
462+
warnings.warn(
463+
"Warning: multiple x input features have the same value (at least same row twice)."
464+
)
462465

463466
####
464467
# Regression matrix and parameters
@@ -1692,6 +1695,7 @@ def _optimize_hyperparam(self, D):
16921695
D: np.ndarray [n_obs * (n_obs - 1) / 2, dim]
16931696
- The componentwise cross-spatial-correlation-distance between the
16941697
vectors in X.
1698+
For SGP surrogate, D is not used
16951699
16961700
Returns
16971701
-------
@@ -1799,11 +1803,14 @@ def grad_minus_reduced_likelihood_function(log10t):
17991803
)
18001804
theta0 = np.log10(self.theta0)
18011805

1802-
if not (self.is_continuous):
1803-
self.D = D
1804-
else:
1805-
##from abs distance to kernel distance
1806-
self.D = self._componentwise_distance(D, opt=ii)
1806+
if self.name not in ["SGP"]:
1807+
if not (self.is_continuous):
1808+
self.D = D
1809+
else:
1810+
##from abs distance to kernel distance
1811+
self.D = self._componentwise_distance(D, opt=ii)
1812+
else: # SGP case, D is not used
1813+
pass
18071814

18081815
# Initialization
18091816
k, incr, stop, best_optimal_rlf_value, max_retry = 0, 0, 1, -1e20, 10
@@ -1856,11 +1863,11 @@ def grad_minus_reduced_likelihood_function(log10t):
18561863
for i in range(len(self.noise0)):
18571864
noise_bounds = np.log10(noise_bounds)
18581865
constraints.append(
1859-
lambda log10t: log10t[offset + i + len(self.theta0)]
1866+
lambda log10t, i=i: log10t[offset + i + len(self.theta0)]
18601867
- noise_bounds[0]
18611868
)
18621869
constraints.append(
1863-
lambda log10t: noise_bounds[1]
1870+
lambda log10t, i=i: noise_bounds[1]
18641871
- log10t[offset + i + len(self.theta0)]
18651872
)
18661873
bounds_hyp.append(noise_bounds)

0 commit comments

Comments
 (0)