Skip to content

Commit

Permalink
multiple changes
Browse files Browse the repository at this point in the history
  • Loading branch information
m-meliani committed May 30, 2018
1 parent 08e41ba commit 29e58d1
Show file tree
Hide file tree
Showing 4 changed files with 117 additions and 21 deletions.
107 changes: 99 additions & 8 deletions smt/extensions/mfk.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
Created on Fri May 04 10:26:49 2018
@author: Mostafa Meliani <[email protected]>
Multi-Fidelity co-Kriging: recursive formulation with autoregressive model of
order 1 (AR1)
"""
Expand All @@ -27,14 +26,13 @@ class MFK(KrgBased):
"""
- MFK
"""

def _initialize(self):
super(MFK, self)._initialize()
declare = self.options.declare

declare('rho_regr', 'constant',types=FunctionType,\
values=('constant', 'linear', 'quadratic'), desc='regr. term')
declare('theta0', None, types=(np.ndarray),\
declare('theta0', None, types=(np.ndarray), \
desc='Initial hyperparameters')
self.name = 'MFK'

Expand Down Expand Up @@ -167,6 +165,7 @@ def _new_train(self):
self._optimize_hyperparam(D)

del self.y_norma, self.D


def _componentwise_distance(self,dx,opt=0):
d = componentwise_distance(dx,self.options['corr'].__name__,
Expand All @@ -190,7 +189,7 @@ def _predict_values(self, X):

# Initialization X = atleast_2d(X)
nlevel = self.nlvl
n_eval, n_features_X = X.shape
n_eval, _ = X.shape
# if n_features_X != self.n_features:
# raise ValueError("Design must be an array of n_features columns.")

Expand Down Expand Up @@ -227,7 +226,6 @@ def _predict_values(self, X):
f = np.vstack((g.T*mu[:,i-1], f0.T))

Ft = solve_triangular(C, F, lower=True)
#TODO: consider different regressions?
yt = solve_triangular(C, self.y[i], lower=True)
r_t = solve_triangular(C,r_.T, lower=True)
beta = self.optimal_par[i]['beta']
Expand Down Expand Up @@ -308,7 +306,6 @@ def _predict_variances(self, X):
f = np.vstack((g.T*mu[:,i-1], f0.T))

Ft = solve_triangular(C, F, lower=True)
#TODO: consider different regressions?
yt = solve_triangular(C, self.y[i], lower=True)
r_t = solve_triangular(C,r_.T, lower=True)
G = self.optimal_par[i]['G']
Expand Down Expand Up @@ -338,6 +335,100 @@ def _predict_variances(self, X):
for i in range(nlevel):# Predictor
MSE[:,i] = self.y_std**2 * MSE[:,i]

self.MSE_all = MSE
return MSE[:,-1].reshape((n_eval,1))


def predict_variances_all_levels(self, X):
"""
Evaluates the model at a set of points.
Arguments
---------
x : np.ndarray [n_evals, dim]
Evaluation point input variable values
Returns
-------
y : np.ndarray
Evaluation point output variable values
"""
# Initialization X = atleast_2d(X)
nlevel = self.nlvl
sigma2_rhos =[]
n_eval, n_features_X = X.shape
# if n_features_X != self.n_features:
# raise ValueError("Design must be an array of n_features columns.")

# Calculate kriging mean and variance at level 0
mu = np.zeros((n_eval, nlevel))
# if self.normalize:
# X = (X - self.X_mean) / self.X_std
## X = (X - self.X_mean[0]) / self.X_std[0]
f = self.options['poly'](X)
f0 = self.options['poly'](X)
dx = manhattan_distances(X, Y=self.X[0], sum_over_features=False)
d = self._componentwise_distance(dx)
# Get regression function and correlation
F = self.F_all[0]
C = self.optimal_par[0]['C']

beta = self.optimal_par[0]['beta']
Ft = solve_triangular(C, F, lower=True)
yt = solve_triangular(C, self.y[0], lower=True)
r_ = self.options['corr'](self.optimal_theta[0], d).reshape(n_eval, self.nt_all[0])
gamma = solve_triangular(C.T, yt - np.dot(Ft,beta), lower=False)

# Scaled predictor
mu[:,0]= (np.dot(f, beta) + np.dot(r_,gamma)).ravel()


self.sigma2_rho = nlevel*[None]
MSE = np.zeros((n_eval,nlevel))
r_t = solve_triangular(C, r_.T, lower=True)
G = self.optimal_par[0]['G']

u_ = solve_triangular(G.T, f.T - np.dot(Ft.T, r_t), lower=True)
MSE[:,0] = self.optimal_par[0]['sigma2'] * (1 \
- (r_t**2).sum(axis=0) + (u_**2).sum(axis=0))

# Calculate recursively kriging mean and variance at level i
for i in range(1,nlevel):
F = self.F_all[i]
C = self.optimal_par[i]['C']
g = self.options['rho_regr'](X)
dx = manhattan_distances(X, Y=self.X[i], sum_over_features=False)
d = self._componentwise_distance(dx)
r_ = self.options['corr'](self.optimal_theta[i], d).reshape(n_eval, self.nt_all[i])
f = np.vstack((g.T*mu[:,i-1], f0.T))

Ft = solve_triangular(C, F, lower=True)
yt = solve_triangular(C, self.y[i], lower=True)
r_t = solve_triangular(C,r_.T, lower=True)
G = self.optimal_par[i]['G']
beta = self.optimal_par[i]['beta']

# scaled predictor



sigma2 = self.optimal_par[i]['sigma2']
q = self.q_all[i]
p = self.p_all[i]
Q_ = (np.dot((yt-np.dot(Ft,beta)).T, yt-np.dot(Ft,beta)))[0,0]
u_ = solve_triangular(G.T, f - np.dot(Ft.T, r_t), lower=True)
sigma2_rho = np.dot(g, \
sigma2*linalg.inv(np.dot(G.T,G))[:q,:q] \
+ np.dot(beta[:q], beta[:q].T))
sigma2_rho = (sigma2_rho * g).sum(axis=1)
sigma2_rhos.append(sigma2_rho)
MSE[:,i] = sigma2_rho * MSE[:,i-1] \
+ Q_/(2*(self.nt_all[i]-p-q)) \
* (1 - (r_t**2).sum(axis=0)) \
+ sigma2 * (u_**2).sum(axis=0)



# scaled predictor
for i in range(nlevel):# Predictor
MSE[:,i] = self.y_std**2 * MSE[:,i]

return MSE, sigma2_rhos
27 changes: 16 additions & 11 deletions smt/extensions/tests/test_mfk.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,16 +15,20 @@ def cheap(Xc):
def expensive(Xe):
return ((Xe*6-2)**2)*np.sin((Xe*6-2)*2)

Xe = np.array([[0],[0.4],[1]])
Xc = np.vstack((np.array([[0.1],[0.2],[0.3],[0.5],[0.6],[0.7],[0.8],[0.9]]),Xe))
Xe = np.array([[0],[0.5],[1]])
Xc = np.vstack((np.array([[0.2],[0.5],[0.8]]),Xe))

n_doe = Xe.size
n_cheap = Xc.size

ye = expensive(Xe)
yc = cheap(Xc)

Xr = np.linspace(0,1, 100)
Yr = expensive (Xr)
from smt.extensions import MFK

sm = MFK(theta0=np.array(Xe.shape[1]*[1.]))
sm = MFK(theta0=np.array(Xe.shape[1]*[1.]), print_global = False)


sm.set_training_values(Xc, yc, name =0) #low-fidelity dataset names being integers from 0 to level-1
Expand All @@ -35,14 +39,15 @@ def expensive(Xe):
y = sm.predict_values(x)
MSE = sm.predict_variances(x)

fig = plt.figure()
ax = fig.add_subplot(111)
ax.fill_between(np.ravel(x), np.ravel(y-10*np.sqrt(MSE)),np.ravel(y+10*np.sqrt(MSE)), facecolor ="grey", edgecolor="g" ,label ='tolerance +/- 10*sigma')
ax.scatter(Xe, ye, label ='expensive')
ax.scatter(Xc, yc, label ='cheap')
ax.plot(x, y, label ='surrogate')
ax.plot(Xr, expensive(Xr), label ='reference')
plt.figure()

ax.legend(fontsize ='x-large')
plt.plot(Xr, expensive(Xr), label ='reference')
plt.plot(x, y, label ='mean_gp')
plt.plot(Xe, ye, 'ko', label ='expensive doe')
plt.plot(Xc, yc, 'g*', label ='cheap doe')

plt.fill_between(np.ravel(x), np.ravel(y-3*np.sqrt(MSE)),np.ravel(y+3*np.sqrt(MSE)), facecolor ="lightgrey", edgecolor="g" ,label ='tolerance +/- 3*sigma')
plt.legend(loc=0)
plt.ylim(-10,17)
plt.xlim(-0.1,1.1)
plt.show()
2 changes: 1 addition & 1 deletion smt/surrogate_models/krg_based.py
Original file line number Diff line number Diff line change
Expand Up @@ -352,7 +352,7 @@ def _predict_variances(self, x):
r = self.options['corr'](self.optimal_theta, d).reshape(n_eval,self.nt)

C = self.optimal_par['C']
rt = linalg.solve_triangular(self.optimal_par['C'], r.T, lower=True)
rt = linalg.solve_triangular(C, r.T, lower=True)

u = linalg.solve_triangular(self.optimal_par['G'].T,np.dot(self.optimal_par['Ft'].T, rt) -
self.options['poly'](x).T)
Expand Down
2 changes: 1 addition & 1 deletion smt/utils/checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,4 +27,4 @@ def check_nx(nx, x):
if nx == 1:
raise ValueError('x should have shape [:, 1] or [:]')
else:
raise ValueError('x should have shape [:, {}]'.format(nx))
raise ValueError('x should have shape [:, {}] and not {}'.format(nx, x.shape))

0 comments on commit 29e58d1

Please sign in to comment.