diff --git a/smt/applications/mfk.py b/smt/applications/mfk.py index 0a3847e2c..d113e3ed0 100644 --- a/smt/applications/mfk.py +++ b/smt/applications/mfk.py @@ -123,7 +123,10 @@ def _initialize(self): desc="Turning this option to True, forces variance to zero at HF samples ", ) declare( - "noise0", 1e-6, types=(float, list), desc="Initial noise hyperparameters" + "noise0", + [1e-6], + types=(list, np.ndarray), + desc="Initial noise hyperparameters", ) self.name = "MFK" @@ -208,8 +211,8 @@ def _new_train(self): self.X_norma_all = [(x - self.X_offset) / self.X_scale for x in X] self.y_norma_all = [(f - self.y_mean) / self.y_std for f in y] - if isinstance(self.options["noise0"], float): - self.options["noise0"] = self.nlvl * [self.options["noise0"]] + if len(self.options["noise0"]) == 1: + self.options["noise0"] = self.nlvl * self.options["noise0"] noise0 = self.options["noise0"].copy() if ( @@ -227,7 +230,7 @@ def _new_train(self): theta0 = self.options["theta0"].copy() for lvl in range(nlevel): - self.options["noise0"] = noise0[lvl] + self.options["noise0"] = [noise0[lvl]] self.options["theta0"] = theta0[lvl, :] self.X_norma = self.X_norma_all[lvl] diff --git a/smt/applications/mfkpls.py b/smt/applications/mfkpls.py index 4870b30b8..66417cfa7 100644 --- a/smt/applications/mfkpls.py +++ b/smt/applications/mfkpls.py @@ -57,7 +57,12 @@ def _initialize(self): values=(True, False), desc="noise evaluation flag", ) - declare("noise0", 1e-6, types=float, desc="Initial noise hyperparameter") + declare( + "noise0", + [1e-6], + types=(list, np.ndarray), + desc="Initial noise hyperparameter", + ) self.name = "MFKPLS" def _componentwise_distance(self, dx, opt=0): diff --git a/smt/surrogate_models/krg_based.py b/smt/surrogate_models/krg_based.py index ad46cfbd6..737e11ddf 100644 --- a/smt/surrogate_models/krg_based.py +++ b/smt/surrogate_models/krg_based.py @@ -180,8 +180,8 @@ def _new_train(self): self._specific_train() else: if self.options["eval_noise"]: - self.noise = self.optimal_theta[self.nx :] - self.optimal_theta = self.optimal_theta[0 : self.nx] + self.noise = self.optimal_theta[self.D.shape[1] :] + self.optimal_theta = self.optimal_theta[0 : self.D.shape[1]] # if self.name != "MGP": # del self.y_norma, self.D @@ -244,8 +244,8 @@ def _reduced_likelihood_function(self, theta): noise = 0 tmp_var = theta if self.options["eval_noise"]: - theta = tmp_var[0 : self.nx] - noise = tmp_var[self.nx :] + theta = tmp_var[0 : self.D.shape[1]] + noise = tmp_var[self.D.shape[1] :] r = self._correlation_types[self.options["corr"]](theta, self.D).reshape(-1, 1) if self.options["is_noise_het"]: @@ -1066,12 +1066,14 @@ def _check_param(self): # FIXME: _check_param should be overriden in corresponding subclasses if self.name in ["KPLS", "KPLSK", "GEKPLS", "MFKPLS", "MFKPLSK"]: - d = self.options["n_comp"] elif self.name in ["MGP"]: d = self.options["n_comp"] * self.nx else: d = self.nx + # self.n_features_x = d + # if self.name in ["KPLS", "MFKPLSK"]: + # self.n_features_x = self.nx if self.name in ["MGP"]: if self.options["corr"] != "act_exp":