Skip to content

Commit abb65bf

Browse files
authored
Add Gradient Enhanced EGO optimization (SMTorg#340)
* Add Gradient Enhanced EGO optimization * Fix test_ego_gek result assertion * Clarify superclass object of ego_gek test problem class * Simplify y_data update in ego.optimize() * Change location of GEKPLS name declaration for correct printing purposes * Change ego_gek test optimization function to exp * Set n_comp=2 for GEKPLS objects to pass option assertions * Update author list
1 parent 45fb78c commit abb65bf

File tree

7 files changed

+76
-14
lines changed

7 files changed

+76
-14
lines changed

AUTHORS.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,3 +26,4 @@ SMT has been developed thanks to contributions from:
2626
* Ruben Conde
2727
* Steven Berguin
2828
* Vincent Drouet
29+
* Laurent Wilkens

smt/applications/ego.py

Lines changed: 13 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222
)
2323
from smt.utils.misc import compute_rms_error
2424

25-
from smt.surrogate_models import KPLS, KRG, KPLSK, MGP
25+
from smt.surrogate_models import KPLS, KRG, KPLSK, MGP, GEKPLS
2626
from smt.sampling_methods import LHS
2727

2828

@@ -119,7 +119,7 @@ def _initialize(self):
119119
declare(
120120
"surrogate",
121121
KRG(print_global=False),
122-
types=(KRG, KPLS, KPLSK, MGP),
122+
types=(KRG, KPLS, KPLSK, GEKPLS, MGP),
123123
desc="SMT kriging-based surrogate model used internaly",
124124
)
125125
declare(
@@ -180,7 +180,8 @@ def optimize(self, fun):
180180
y_et_k = self._get_virtual_point(np.atleast_2d(x_et_k), y_data)
181181

182182
# Update y_data with predicted value
183-
y_data = np.atleast_2d(np.append(y_data, y_et_k)).T
183+
y_data = y_data.reshape(y_data.shape[0], self.gpr.ny)
184+
y_data = np.vstack((y_data, y_et_k))
184185
x_data = np.atleast_2d(np.append(x_data, x_et_k, axis=0))
185186

186187
# Compute the real values of y_data
@@ -191,7 +192,7 @@ def optimize(self, fun):
191192
y_data[-n_parallel:] = y
192193

193194
# Find the optimal point
194-
ind_best = np.argmin(y_data)
195+
ind_best = np.argmin(y_data if y_data.ndim == 1 else y_data[:, 0])
195196
x_opt = x_data[ind_best]
196197
y_opt = y_data[ind_best]
197198

@@ -335,6 +336,13 @@ def _find_best_point(self, x_data=None, y_data=None, enable_tunneling=False):
335336
336337
"""
337338
self.gpr.set_training_values(x_data, y_data)
339+
if self.gpr.supports["training_derivatives"]:
340+
for kx in range(self.gpr.nx):
341+
self.gpr.set_training_derivatives(
342+
x_data,
343+
y_data[:, 1 + kx].reshape((y_data.shape[0], 1)),
344+
kx
345+
)
338346
self.gpr.train()
339347

340348
criterion = self.options["criterion"]
@@ -364,7 +372,7 @@ def _find_best_point(self, x_data=None, y_data=None, enable_tunneling=False):
364372
try:
365373
opt_all.append(
366374
minimize(
367-
lambda x: float(self.obj_k(x)),
375+
lambda x: float(np.array(self.obj_k(x)).flat[0]),
368376
x_start[ii, :],
369377
method="SLSQP",
370378
bounds=bounds,

smt/applications/tests/test_ego.py

Lines changed: 48 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323
from smt.problems import Branin, Rosenbrock
2424
from smt.sampling_methods import FullFactorial
2525
from multiprocessing import Pool
26-
from smt.surrogate_models import KRG, QP
26+
from smt.surrogate_models import KRG, QP, GEKPLS
2727
from smt.applications.mixed_integer import (
2828
MixedIntegerContext,
2929
MixedIntegerSamplingMethod,
@@ -464,6 +464,53 @@ def test_find_best_point(self):
464464
x, _ = ego._find_best_point(xdoe, ydoe, enable_tunneling=False)
465465
self.assertAlmostEqual(6.5, float(x), delta=1)
466466

467+
def test_ego_gek(self):
468+
from smt.problems import TensorProduct
469+
470+
class TensorProductIndirect(TensorProduct):
471+
def __init__(self, **kwargs):
472+
super().__init__(**kwargs)
473+
self.super = super()
474+
475+
def _evaluate(self, x, kx):
476+
assert kx is None
477+
response = self.super._evaluate(x, kx)
478+
sens = np.hstack(self.super._evaluate(x, ki) for ki in range(x.shape[1]))
479+
return np.hstack((response, sens))
480+
481+
fun = TensorProductIndirect(ndim=2, func="exp")
482+
483+
# Construction of the DOE
484+
sampling = LHS(xlimits=fun.xlimits, criterion="m")
485+
xdoe = sampling(20)
486+
ydoe = fun(xdoe)
487+
488+
# Build the GEKPLS surrogate model
489+
n_comp = 2
490+
sm = GEKPLS(
491+
theta0=[1e-2] * n_comp,
492+
xlimits=fun.xlimits,
493+
extra_points=1,
494+
print_prediction=False,
495+
n_comp=n_comp,
496+
)
497+
498+
# Build the EGO optimizer and optimize
499+
ego = EGO(
500+
xdoe=xdoe,
501+
ydoe=ydoe,
502+
n_iter=5,
503+
criterion="LCB",
504+
xlimits=fun.xlimits,
505+
surrogate=sm,
506+
n_start=30,
507+
enable_tunneling=False,
508+
)
509+
x_opt, _, _, _, _ = ego.optimize(fun=fun)
510+
511+
self.assertAlmostEqual(-1.0, float(x_opt[0]), delta=1e-4)
512+
self.assertAlmostEqual(-1.0, float(x_opt[1]), delta=1e-4)
513+
467514
def test_qei_criterion_default(self):
468515
fun = TestEGO.function_test_1d
469516
xlimits = np.array([[0.0, 25.0]])

smt/surrogate_models/gekpls.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@
1010

1111

1212
class GEKPLS(KPLS):
13-
name = "GEKPLS"
1413

1514
def _initialize(self):
1615
super(GEKPLS, self)._initialize()
@@ -37,6 +36,8 @@ def _initialize(self):
3736
)
3837
self.supports["training_derivatives"] = True
3938

39+
self.name = "GEKPLS"
40+
4041
def _compute_pls(self, X, y):
4142
if 0 in self.training_points[None]:
4243
self.coeff_pls, XX, yy = ge_compute_pls(

smt/surrogate_models/tests/test_surrogate_model_examples.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -411,8 +411,13 @@ def test_gekpls(self):
411411
yt = np.concatenate((yt, yd), axis=1)
412412

413413
# Build the GEKPLS model
414+
n_comp = 2
414415
sm = GEKPLS(
415-
theta0=[1e-2], xlimits=fun.xlimits, extra_points=1, print_prediction=False
416+
theta0=[1e-2] * n_comp,
417+
xlimits=fun.xlimits,
418+
extra_points=1,
419+
print_prediction=False,
420+
n_comp=n_comp,
416421
)
417422
sm.set_training_values(xt, yt[:, 0])
418423
for i in range(2):

smt/tests/test_all.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ def setUp(self):
7373
sms["KPLS"] = KPLS(theta0=[1e-2] * ncomp, n_comp=ncomp)
7474
sms["KPLSK"] = KPLSK(theta0=[1] * ncomp, n_comp=ncomp)
7575
sms["MGP"] = KPLSK(theta0=[1e-2] * ncomp, n_comp=ncomp)
76-
sms["GEKPLS"] = GEKPLS(theta0=[1e-2] * ncomp, n_comp=ncomp, delta_x=1e-1)
76+
sms["GEKPLS"] = GEKPLS(theta0=[1e-2] * 2, n_comp=2, delta_x=1e-1)
7777
sms["GENN"] = genn()
7878
if compiled_available:
7979
sms["IDW"] = IDW()

smt/utils/kriging_utils.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1048,7 +1048,7 @@ def ge_compute_pls(X, y, n_comp, pts, delta_x, xlimits, extra_points):
10481048
X: np.ndarray [n_obs,dim]
10491049
- - The input variables.
10501050
1051-
y: np.ndarray [n_obs,1]
1051+
y: np.ndarray [n_obs,ny]
10521052
- The output variable
10531053
10541054
n_comp: int
@@ -1081,7 +1081,7 @@ def ge_compute_pls(X, y, n_comp, pts, delta_x, xlimits, extra_points):
10811081
"""
10821082
nt, dim = X.shape
10831083
XX = np.empty(shape=(0, dim))
1084-
yy = np.empty(shape=(0, 1))
1084+
yy = np.empty(shape=(0, y.shape[1]))
10851085
_pls = pls(n_comp)
10861086

10871087
coeff_pls = np.zeros((nt, dim, n_comp))
@@ -1168,9 +1168,9 @@ def ge_compute_pls(X, y, n_comp, pts, delta_x, xlimits, extra_points):
11681168
for ii in max_coeff:
11691169
XX = np.vstack((XX, X[i, :]))
11701170
XX[-1, ii] += delta_x * (xlimits[ii, 1] - xlimits[ii, 0])
1171-
yy = np.vstack((yy, y[i, 0]))
1172-
yy[-1, 0] += (
1173-
pts[None][1 + ii][1][i, 0]
1171+
yy = np.vstack((yy, y[i]))
1172+
yy[-1] += (
1173+
pts[None][1 + ii][1][i]
11741174
* delta_x
11751175
* (xlimits[ii, 1] - xlimits[ii, 0])
11761176
)

0 commit comments

Comments
 (0)