Skip to content

Commit

Permalink
module import edit
Browse files Browse the repository at this point in the history
  • Loading branch information
ches-001 committed Jun 1, 2023
1 parent d6fc781 commit 858dfa3
Show file tree
Hide file tree
Showing 24 changed files with 198 additions and 225 deletions.
4 changes: 2 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
**/__pycache__/
**/data/
nb_test.ipynb
tests/data/
tests/nb_test.ipynb
temp.py
.gitignore
.pytest_cache
Expand Down
16 changes: 8 additions & 8 deletions _metatune.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
import inspect, copy
from baseline import BaseTuner, TrialCheckMixin
from .baseline import BaseTuner, TrialCheckMixin
from optuna.trial import Trial, FrozenTrial
from tune_regressor import regressor_tuning_entities, regressor_tuner_model_class_dict
from tune_classifier import classifier_tuning_entities, classifier_tuner_model_class_dict
from utils import make_default_tuner_type_mutable
from .tune_regressor import regressor_search_space, regressor_tuner_model_class_map
from .tune_classifier import classifier_search_space, classifier_tuner_model_class_map
from .utils import make_default_tuner_type_mutable
from typing import Iterable, Tuple, Dict, Union, Optional, Any, Callable


Expand Down Expand Up @@ -143,12 +143,12 @@ def __init__(
self.single_tuner = single_tuner

if self.task == "regression":
self.search_space: Dict[str, BaseTuner] = copy.deepcopy(regressor_tuning_entities)
self.tuner_model_class_map: Dict[str, Callable] = copy.deepcopy(regressor_tuner_model_class_dict)
self.search_space: Dict[str, BaseTuner] = copy.deepcopy(regressor_search_space)
self.tuner_model_class_map: Dict[str, Callable] = copy.deepcopy(regressor_tuner_model_class_map)

else:
self.search_space: Dict[str, BaseTuner] = copy.deepcopy(classifier_tuning_entities)
self.tuner_model_class_map: Dict[str, Callable] = copy.deepcopy(classifier_tuner_model_class_dict)
self.search_space: Dict[str, BaseTuner] = copy.deepcopy(classifier_search_space)
self.tuner_model_class_map: Dict[str, Callable] = copy.deepcopy(classifier_tuner_model_class_map)

self._exclude_tuners()
self._prepare_custom_tuners()
Expand Down
9 changes: 9 additions & 0 deletions readme.md
Original file line number Diff line number Diff line change
Expand Up @@ -114,3 +114,12 @@ except Exception as e:
```

This way, instead of terminating the metaheuristic search program, optuna simply skips to the next trial and gives you a log relating to why the trial was pruned. Similarly, if you happen to want to optimize an objective the relies on predicted probability scores for each class, rather than the class label, you can call the `predict_proba(...)` method on `X_train` in the `try` block, just after called the `fit(...)` method.

<br>
<br>
<hr>

## Advanced Configurations


<hr>
8 changes: 4 additions & 4 deletions tests/test_tuners.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from baseline.base import BaseTuner
import tune_classifier
import tune_regressor
from tests.utils import BaseTest
from ..baseline.base import BaseTuner
from .. import tune_classifier
from .. import tune_regressor
from .utils import BaseTest


# run python -m pytest -v
Expand Down
10 changes: 5 additions & 5 deletions tests/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@
from sklearn import datasets
from sklearn.preprocessing import MinMaxScaler
from optuna.trial import Trial
from baseline.base import BaseTuner
from tune_classifier import classifier_tuner_model_class_dict
from tune_regressor import regressor_tuner_model_class_dict
from ..baseline.base import BaseTuner
from ..tune_classifier import classifier_tuner_model_class_map
from ..tune_regressor import regressor_tuner_model_class_map


# load sample datasets
Expand Down Expand Up @@ -59,10 +59,10 @@ class BaseTest:

def test_dict_mapping(self):
if self.task == "classification":
assert self.model.__class__.__name__ in classifier_tuner_model_class_dict.keys()
assert self.model.__class__.__name__ in classifier_tuner_model_class_map.keys()

elif self.task == "regression":
assert self.model.__class__.__name__ in regressor_tuner_model_class_dict.keys()
assert self.model.__class__.__name__ in regressor_tuner_model_class_map.keys()

else: assert False

Expand Down
82 changes: 62 additions & 20 deletions tune_classifier/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from utils.module_utils import get_tuner_entities, get_tuner_model_dict
from ..utils import make_default_tuner_type_mutable
from .svc import *
from .tree_classifier import *
from .linear_model_classifier import *
Expand All @@ -7,32 +7,74 @@
from .neighbor_classifier import *
from .mlp_classifier import *
from .discriminant_analysis_classifier import *
from utils import make_default_tuner_type_mutable
from typing import Iterable, Dict, Callable


__modules__: Iterable[str] = [
"tune_classifier.svc",
"tune_classifier.tree_classifier",
"tune_classifier.linear_model_classifier",
"tune_classifier.ensemble_classifier",
"tune_classifier.naive_bayes_classifier",
"tune_classifier.neighbor_classifier",
"tune_classifier.mlp_classifier",
"tune_classifier.discriminant_analysis_classifier"
]

classifier_tuning_entities: Dict[str, object] = {k:v for k, v in sum(list(map(get_tuner_entities, __modules__)), [])}
classifier_tuning_entities: Dict[str, object] = dict(
map(lambda pair : (pair[0], make_default_tuner_type_mutable(pair[1])), classifier_tuning_entities.items()))
classifier_tuner_model_class_map: Dict[str, Callable] = {
SVCTuner.__name__: SVC,
LinearSVCTuner.__name__: LinearSVC,
NuSVCTuner.__name__: NuSVC,
DecisionTreeClassifierTuner.__name__: DecisionTreeClassifier,
ExtraTreeClassifierTuner.__name__: ExtraTreeClassifier,
LogisticRegressionTuner.__name__: LogisticRegression,
PerceptronTuner.__name__: Perceptron,
PassiveAggressiveClassifierTuner.__name__: PassiveAggressiveClassifier,
SGDClassifierTuner.__name__: SGDClassifier,
RandomForestClassifierTuner.__name__: RandomForestClassifier,
ExtraTreesClassifierTuner.__name__: ExtraTreesClassifier,
AdaBoostClassifierTuner.__name__: AdaBoostClassifier,
GradientBoostingClassifierTuner.__name__: GradientBoostingClassifier,
BaggingClassifierTuner.__name__: BaggingClassifier,
HistGradientBoostingClassifierTuner.__name__: HistGradientBoostingClassifier,
GaussianNBTuner.__name__: GaussianNB,
BernoulliNBTuner.__name__: BernoulliNB,
MultinomialNBTuner.__name__: MultinomialNB,
ComplementNBTuner.__name__: ComplementNB,
CategoricalNBTuner.__name__: CategoricalNB,
KNeighborsClassifierTuner.__name__: KNeighborsClassifier,
RadiusNeighborsClassifierTuner.__name__: RadiusNeighborsClassifier,
NearestCentroidClassifierTuner.__name__: NearestCentroid,
MLPClassifierTuner.__name__: MLPClassifier,
LDAClassifierTuner.__name__: LinearDiscriminantAnalysis,
QDAClassifierTuner.__name__: QuadraticDiscriminantAnalysis,
}

classifier_tuner_model_class_dict: Dict[str, Callable] = {
k:v for _dict in map(get_tuner_model_dict, __modules__) for k, v in _dict.items()
classifier_search_space: Dict[str, BaseTuner] = {
SVCTuner.__name__: SVCTuner(),
LinearSVCTuner.__name__: LinearSVCTuner(),
NuSVCTuner.__name__: NuSVCTuner(),
DecisionTreeClassifierTuner.__name__: DecisionTreeClassifierTuner(),
ExtraTreeClassifierTuner.__name__: ExtraTreeClassifierTuner(),
LogisticRegressionTuner.__name__: LogisticRegressionTuner(),
PerceptronTuner.__name__: PerceptronTuner(),
PassiveAggressiveClassifierTuner.__name__: PassiveAggressiveClassifierTuner(),
SGDClassifierTuner.__name__: SGDClassifierTuner(),
RandomForestClassifierTuner.__name__: RandomForestClassifierTuner(),
ExtraTreesClassifierTuner.__name__: ExtraTreesClassifierTuner(),
AdaBoostClassifierTuner.__name__: AdaBoostClassifierTuner(),
GradientBoostingClassifierTuner.__name__: GradientBoostingClassifierTuner(),
BaggingClassifierTuner.__name__: BaggingClassifierTuner(),
HistGradientBoostingClassifierTuner.__name__: HistGradientBoostingClassifierTuner(),
GaussianNBTuner.__name__: GaussianNBTuner(),
BernoulliNBTuner.__name__: BernoulliNBTuner(),
MultinomialNBTuner.__name__: MultinomialNBTuner(),
ComplementNBTuner.__name__: ComplementNBTuner(),
CategoricalNBTuner.__name__: CategoricalNBTuner(),
KNeighborsClassifierTuner.__name__: KNeighborsClassifierTuner(),
RadiusNeighborsClassifierTuner.__name__: RadiusNeighborsClassifierTuner(),
NearestCentroidClassifierTuner.__name__: NearestCentroidClassifierTuner(),
MLPClassifierTuner.__name__: MLPClassifierTuner(),
LDAClassifierTuner.__name__: LDAClassifierTuner(),
QDAClassifierTuner.__name__: QDAClassifierTuner(),
}

classifier_search_space: Dict[str, BaseTuner] = dict(
map(lambda pair : (pair[0], make_default_tuner_type_mutable(pair[1])), classifier_search_space.items())
)

__all__: Iterable[str] = [
"classifier_tuning_entities",
"classifier_tuner_model_class_dict",
"classifier_tuner_model_class_map",
"classifier_search_space",
"SVCTuner",
"LinearSVCTuner",
"NuSVCTuner",
Expand Down
10 changes: 2 additions & 8 deletions tune_classifier/discriminant_analysis_classifier.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from baseline import BaseTuner
from ..baseline import BaseTuner
from optuna.trial import Trial
from dataclasses import dataclass
from types import MappingProxyType
Expand Down Expand Up @@ -58,10 +58,4 @@ def sample_model(self, trial: Optional[Trial] = None) -> Any:
model = self.evaluate_sampled_model("classification", QuadraticDiscriminantAnalysis, params)

self.model = model
return model


tuner_model_class_dict: Dict[str, Callable] = {
LDAClassifierTuner.__name__: LinearDiscriminantAnalysis,
QDAClassifierTuner.__name__: QuadraticDiscriminantAnalysis
}
return model
14 changes: 2 additions & 12 deletions tune_classifier/ensemble_classifier.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from baseline import BaseTuner
from ..baseline import BaseTuner
from optuna.trial import Trial
from dataclasses import dataclass
from typing import Iterable, Optional, Dict, Any, Union, Callable
Expand Down Expand Up @@ -333,14 +333,4 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any:
model = super().evaluate_sampled_model("classification", HistGradientBoostingClassifier, params)
self.model = model

return model


tuner_model_class_dict: Dict[str, Callable] = {
RandomForestClassifierTuner.__name__: RandomForestClassifier,
ExtraTreesClassifierTuner.__name__: ExtraTreesClassifier,
AdaBoostClassifierTuner.__name__: AdaBoostClassifier,
GradientBoostingClassifierTuner.__name__: GradientBoostingClassifier,
BaggingClassifierTuner.__name__: BaggingClassifier,
HistGradientBoostingClassifierTuner.__name__: HistGradientBoostingClassifier,
}
return model
12 changes: 2 additions & 10 deletions tune_classifier/linear_model_classifier.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from baseline import BaseTuner
from ..baseline import BaseTuner
from optuna.trial import Trial
from dataclasses import dataclass
from typing import Iterable, Optional, Dict, Any, Callable
Expand Down Expand Up @@ -211,12 +211,4 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any:
model = super().evaluate_sampled_model("classification", SGDClassifier, params)
self.model = model

return model


tuner_model_class_dict: Dict[str, Callable] = {
LogisticRegressionTuner.__name__: LogisticRegression,
PerceptronTuner.__name__: Perceptron,
PassiveAggressiveClassifierTuner.__name__: PassiveAggressiveClassifier,
SGDClassifierTuner.__name__: SGDClassifier,
}
return model
9 changes: 2 additions & 7 deletions tune_classifier/mlp_classifier.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from baseline import BaseTuner
from ..baseline import BaseTuner
from optuna.trial import Trial
from dataclasses import dataclass
from typing import Iterable, Optional, Dict, Any, Callable
Expand Down Expand Up @@ -73,9 +73,4 @@ def sample_model(self, trial: Optional[Trial] = None) -> Any:
params = self.sample_params(trial)
model = super().evaluate_sampled_model("classification", MLPClassifier, params)
self.model = model
return model


tuner_model_class_dict: Dict[str, Callable] = {
MLPClassifierTuner.__name__: MLPClassifier
}
return model
12 changes: 2 additions & 10 deletions tune_classifier/naive_bayes_classifier.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from baseline import BaseTuner
from ..baseline import BaseTuner
from optuna.trial import Trial
from dataclasses import dataclass
from typing import Callable,Iterable, Optional, Dict, Any, Union
Expand Down Expand Up @@ -154,12 +154,4 @@ def sample_model(self, trial: Optional[Trial] = None) -> Any:
model = super().evaluate_sampled_model("classification", CategoricalNB, params)

self.model = model
return model

tuner_model_class_dict: Dict[str, Callable] = {
GaussianNBTuner.__name__: GaussianNB,
BernoulliNBTuner.__name__: BernoulliNB,
MultinomialNBTuner.__name__: MultinomialNB,
ComplementNBTuner.__name__: ComplementNB,
CategoricalNBTuner.__name__: CategoricalNB,
}
return model
11 changes: 2 additions & 9 deletions tune_classifier/neighbor_classifier.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from baseline import BaseTuner
from ..baseline import BaseTuner
from optuna.trial import Trial
from dataclasses import dataclass
from typing import Iterable, Optional, Dict, Any, Callable
Expand Down Expand Up @@ -99,11 +99,4 @@ def sample_model(self, trial: Optional[Trial] = None) -> Any:

self.model = model

return model


tuner_model_class_dict: Dict[str, Callable] = {
KNeighborsClassifierTuner.__name__: KNeighborsClassifier,
RadiusNeighborsClassifierTuner.__name__: RadiusNeighborsClassifier,
NearestCentroidClassifierTuner.__name__: NearestCentroid
}
return model
9 changes: 1 addition & 8 deletions tune_classifier/svc.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from baseline import BaseTuner
from ..baseline import BaseTuner
from optuna.trial import Trial
from dataclasses import dataclass
from typing import Iterable, Optional, Dict, Any, Callable
Expand Down Expand Up @@ -129,10 +129,3 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any:
params = self.sample_params(trial)
model = super().evaluate_sampled_model("classification", NuSVC, params)
return model


tuner_model_class_dict: Dict[str, Callable] = {
SVCTuner.__name__: SVC,
LinearSVCTuner.__name__: LinearSVC,
NuSVCTuner.__name__: NuSVC,
}
10 changes: 2 additions & 8 deletions tune_classifier/tree_classifier.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from baseline import BaseTuner
from ..baseline import BaseTuner
from optuna.trial import Trial
from dataclasses import dataclass
from typing import Iterable, Optional, Dict, Any, Union, Callable
Expand Down Expand Up @@ -73,10 +73,4 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any:
params = self.sample_params(trial)
model = super(DecisionTreeClassifierTuner, self).evaluate_sampled_model("classification", ExtraTreeClassifier, params)
self.model = model
return model


tuner_model_class_dict: Dict[str, Callable] = {
DecisionTreeClassifierTuner.__name__: DecisionTreeClassifier,
ExtraTreeClassifierTuner.__name__: ExtraTreeClassifier,
}
return model
Loading

0 comments on commit 858dfa3

Please sign in to comment.