Skip to content

Commit

Permalink
Merge branch 'main' of https://github.com/ches-001/MetaTune into main
Browse files Browse the repository at this point in the history
  • Loading branch information
ches-001 committed May 31, 2023
2 parents 71e0b03 + fcd37fb commit 0118d38
Show file tree
Hide file tree
Showing 6 changed files with 94 additions and 16 deletions.
2 changes: 1 addition & 1 deletion baseline/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def evaluate_sampled_model(

try:
model_class(**params).fit(X, y)
except ValueError as e:
except (ValueError, NotImplementedError) as e:
raise optuna.exceptions.TrialPruned(e)

model = model_class(**params)
Expand Down
9 changes: 9 additions & 0 deletions tests/test_tuners.py
Original file line number Diff line number Diff line change
Expand Up @@ -305,3 +305,12 @@ class TestRANSACRegressor(BaseTest):
model: BaseTuner = tune_regressor.RANSACRegressorTuner()
task: str = "regression"


class TestLinearDiscriminantAnalysis(BaseTest):
model: BaseTuner = tune_classifier.LDAClassifierTuner()
task: str = "classification"


class TestQuadraticDiscriminantAnalysis(BaseTest):
model: BaseTuner = tune_classifier.QDAClassifierTuner()
task: str = "classification"
6 changes: 5 additions & 1 deletion tune_classifier/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from .naive_bayes_classifier import *
from .neighbor_classifier import *
from .mlp_classifier import *
from .discriminant_analysis_classifier import *
from typing import Iterable, Dict, Callable


Expand All @@ -17,6 +18,7 @@
"tune_classifier.naive_bayes_classifier",
"tune_classifier.neighbor_classifier",
"tune_classifier.mlp_classifier",
"tune_classifier.discriminant_analysis_classifier"
]

classifier_tuning_entities: Dict[str, object] = {k:v for k, v in sum(list(map(get_tuner_entities, __modules__)), [])}
Expand Down Expand Up @@ -51,5 +53,7 @@
"KNeighborsClassifierTuner",
"MLPClassifierTuner",
"RadiusNeighborsClassifierTuner",
"NearestCentroidClassifierTuner"
"NearestCentroidClassifierTuner",
"LDAClassifierTuner",
"QDAClassifierTuner"
]
66 changes: 66 additions & 0 deletions tune_classifier/discriminant_analysis_classifier.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
from baseline import BaseTuner
from optuna.trial import Trial
from dataclasses import dataclass
from typing import Iterable, Optional, Dict, Any, Union, Callable
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis, QuadraticDiscriminantAnalysis


@dataclass
class LDAClassifierTuner(BaseTuner):
solver_space: Iterable[str] = ("svd", "lsqr", "eigen")
shrinkage_space: Iterable[str] = (None, "auto")
tol_space: Iterable[float] = (1e-10, 1e-1)

def sample_params(self, trial: Optional[Trial] = None) -> Dict[str, Any]:
super().sample_params(trial)

params = {}
params["solver"] = trial.suggest_categorical(f"{self.__class__.__name__}_solver", self.solver_space)
if self.is_valid_categorical_space(self.shrinkage_space):
params["shrinkage"] = trial.suggest_categorical(f"{self.__class__.__name__}_shrinkage", self.shrinkage_space)
else:
params["shrinkage"] = trial.suggest_float(f"{self.__class__.__name__}_shrinkage", *self.shrinkage_space, log=False)

params["tol"] = trial.suggest_float(f"{self.__class__.__name__}_tol", *self.tol_space, log=True)

return params

def sample_model(self, trial: Optional[Trial] = None) -> Any:
super().sample_model(trial)

params = self.sample_params(trial)
model = super().evaluate_sampled_model("classification", LinearDiscriminantAnalysis, params)

self.model = model

return model


@dataclass
class QDAClassifierTuner(BaseTuner):
reg_param_space: Iterable[float] = (0.0, 1.0)
tol_space: Iterable[float] = (1e-10, 1e-1)

def sample_params(self, trial: Optional[Trial] = None) -> Dict[str, Any]:
super().sample_params(trial)

params = {}
params["reg_param"] = trial.suggest_float(f"{self.__class__.__name__}_reg_param", *self.reg_param_space, log=False)
params["tol"] = trial.suggest_float(f"{self.__class__.__name__}_tol", *self.tol_space, log=True)

return params

def sample_model(self, trial: Optional[Trial] = None) -> Any:
super().sample_model(trial)

params = self.sample_params(trial)
model = self.evaluate_sampled_model("classification", QuadraticDiscriminantAnalysis, params)

self.model = model
return model


tuner_model_class_dict: Dict[str, Callable] = {
LDAClassifierTuner.__name__: LinearDiscriminantAnalysis,
QDAClassifierTuner.__name__: QuadraticDiscriminantAnalysis
}
19 changes: 9 additions & 10 deletions tune_classifier/neighbor_classifier.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,10 @@

@dataclass
class KNeighborsClassifierTuner(BaseTuner):
radius_space: Iterable[int] = (1, 20)
n_neighbors_space: Iterable[int] = (1, 10)
weights_space: Iterable[str] = ("uniform", "distance")
algorithm_space: Iterable[str] = ("ball_tree", "kd_tree", "brute")
leaf_size_space: Iterable[int] = (2, 60)
leaf_size_space: Iterable[int] = (2, 100)
p_space: Iterable[int] = (3, 8)
metric_space: Iterable[str] = ("cityblock", "cosine", "euclidean", "manhattan", "minkowski")

Expand All @@ -20,11 +19,11 @@ def sample_params(self, trial: Optional[Trial] = None) -> Dict[str, Any]:

params = {}
params["n_neighbors"] = trial.suggest_categorical(
"n_neighbors", [i for i in range(*self.n_neighbors_space) if i % 2 != 0 and i != 1])
f"{self.__class__.__name__}_n_neighbors", [i for i in range(*self.n_neighbors_space) if i % 2 != 0 and i != 1])
params["weights"] = trial.suggest_categorical(f"{self.__class__.__name__}_weight", self.weights_space)
params["algorithm"] = trial.suggest_categorical(f"{self.__class__.__name__}_algorithm", self.algorithm_space)
params["leaf_size"] = trial.suggest_int(f"{self.__class__.__name__}_leaf_size", *self.leaf_size_space)
params["p"] = trial.suggest_int(f"{self.__class__.__name__}_p", *self.p_space)
params["leaf_size"] = trial.suggest_int(f"{self.__class__.__name__}_leaf_size", *self.leaf_size_space, log=True)
params["p"] = trial.suggest_int(f"{self.__class__.__name__}_p", *self.p_space, log=False)
params["metric"] = trial.suggest_categorical(f"{self.__class__.__name__}_metric", self.metric_space)

return params
Expand All @@ -46,7 +45,7 @@ class RadiusNeighborsClassifierTuner(BaseTuner):
radius_space: Iterable[int] = (1, 10)
weight_space: Iterable[str] = ("uniform", "distance")
algorithm_space: Iterable[str] = ("ball_tree", "kd_tree", "brute")
leaf_size_space: Iterable[int] = (2, 60)
leaf_size_space: Iterable[int] = (2, 100)
p_space: Iterable[int] = (3, 10)
metric_space: Iterable[str] = ("cityblock", "cosine", "euclidean", "manhattan", "minkowski")
outlier_label_space: Iterable[str] = (None, "most_frequent")
Expand All @@ -55,11 +54,11 @@ def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]:
super().sample_params(trial)

params = {}
params["radius"] = trial.suggest_int(f"{self.__class__.__name__}_radius", *self.radius_space)
params["radius"] = trial.suggest_int(f"{self.__class__.__name__}_radius", *self.radius_space, log=False)
params["weights"] = trial.suggest_categorical(f"{self.__class__.__name__}_weight", self.weight_space)
params["algorithm"] = trial.suggest_categorical(f"{self.__class__.__name__}_algorithm", self.algorithm_space)
params["leaf_size"] = trial.suggest_int(f"{self.__class__.__name__}_leaf_size", *self.leaf_size_space)
params["p"] = trial.suggest_int(f"{self.__class__.__name__}_p", *self.p_space)
params["leaf_size"] = trial.suggest_int(f"{self.__class__.__name__}_leaf_size", *self.leaf_size_space, log=True)
params["p"] = trial.suggest_int(f"{self.__class__.__name__}_p", *self.p_space, log=False)
params["metric"] = trial.suggest_categorical(f"{self.__class__.__name__}_metric", self.metric_space)
params["outlier_label"] = trial.suggest_categorical(f"{self.__class__.__name__}_outlier_label", self.outlier_label_space)

Expand Down Expand Up @@ -87,7 +86,7 @@ def sample_params(self, trial: Optional[Trial] = None) -> Dict[str, Any]:

params = {}
params["metric"] = trial.suggest_categorical(f"{self.__class__.__name__}_metric", self.metric_space)
params["shrink_threshold"] = trial.suggest_float(f"{self.__class__.__name__}_shrink_threshold", *self.shrink_threshold_space)
params["shrink_threshold"] = trial.suggest_float(f"{self.__class__.__name__}_shrink_threshold", *self.shrink_threshold_space, log=False)

return params

Expand Down
8 changes: 4 additions & 4 deletions tune_regressor/neighbor_regressor.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,19 +27,19 @@ class RadiusNeighborsRegressorTuner(BaseTuner):
radius_space: Iterable[int] = (2, 20)
weight_space: Iterable[str] = ("uniform", "distance")
algorithm_space: Iterable[str] = ("ball_tree", "kd_tree", "brute")
leaf_size_space: Iterable[int] = (2, 60)
leaf_size_space: Iterable[int] = (2, 100)
p_space: Iterable[int] = (3, 10)
metric_space: Iterable[str] = ("cityblock", "cosine", "euclidean", "manhattan", "minkowski")

def sample_params(self, trial: Optional[Trial] = None) -> Dict[str, Any]:
super().sample_params(trial)

params = {}
params["radius"] = trial.suggest_int(f"{self.__class__.__name__}_radius", *self.radius_space)
params["radius"] = trial.suggest_int(f"{self.__class__.__name__}_radius", *self.radius_space, log=False)
params["weights"] = trial.suggest_categorical(f"{self.__class__.__name__}_weight", self.weight_space)
params["algorithm"] = trial.suggest_categorical(f"{self.__class__.__name__}_algorithm", self.algorithm_space)
params["leaf_size"] = trial.suggest_int(f"{self.__class__.__name__}_leaf_size", *self.leaf_size_space)
params["p"] = trial.suggest_int(f"{self.__class__.__name__}_p", *self.p_space)
params["leaf_size"] = trial.suggest_int(f"{self.__class__.__name__}_leaf_size", *self.leaf_size_space, log=True)
params["p"] = trial.suggest_int(f"{self.__class__.__name__}_p", *self.p_space, log=False)
params["metric"] = trial.suggest_categorical(f"{self.__class__.__name__}_metric", self.metric_space)

return params
Expand Down

0 comments on commit 0118d38

Please sign in to comment.