From ad0f46cc0f162d5ffa595f1c50e0712a6e964a28 Mon Sep 17 00:00:00 2001 From: Alex Morehead Date: Wed, 5 Oct 2022 17:30:59 -0500 Subject: [PATCH] Make use of learning rate scheduler optional (#449) * Made `trainer.configure_optimizers()` robust to unspecified learning rate schedulers --- src/models/mnist_module.py | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/src/models/mnist_module.py b/src/models/mnist_module.py index 8873b8586..7795a669b 100644 --- a/src/models/mnist_module.py +++ b/src/models/mnist_module.py @@ -124,17 +124,18 @@ def configure_optimizers(self): https://pytorch-lightning.readthedocs.io/en/latest/common/lightning_module.html#configure-optimizers """ optimizer = self.hparams.optimizer(params=self.parameters()) - scheduler = self.hparams.scheduler(optimizer=optimizer) - - return { - "optimizer": optimizer, - "lr_scheduler": { - "scheduler": scheduler, - "monitor": "val/loss", - "interval": "epoch", - "frequency": 1, - }, - } + if self.hparams.scheduler is not None: + scheduler = self.hparams.scheduler(optimizer=optimizer) + return { + "optimizer": optimizer, + "lr_scheduler": { + "scheduler": scheduler, + "monitor": "val/loss", + "interval": "epoch", + "frequency": 1, + }, + } + return {"optimizer": optimizer} if __name__ == "__main__":