Skip to content

Commit

Permalink
support more optimizer
Browse files Browse the repository at this point in the history
  • Loading branch information
xinntao committed Sep 6, 2022
1 parent a7a55af commit b156d25
Showing 1 changed file with 11 additions and 1 deletion.
12 changes: 11 additions & 1 deletion basicsr/models/base_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,8 +105,18 @@ def get_optimizer(self, optim_type, params, lr, **kwargs):
optimizer = torch.optim.Adam(params, lr, **kwargs)
elif optim_type == 'AdamW':
optimizer = torch.optim.AdamW(params, lr, **kwargs)
elif optim_type == 'Adamax':
optimizer = torch.optim.Adamax(params, lr, **kwargs)
elif optim_type == 'SGD':
optimizer = torch.optim.SGD(params, lr, **kwargs)
elif optim_type == 'ASGD':
optimizer = torch.optim.ASGD(params, lr, **kwargs)
elif optim_type == 'RMSprop':
optimizer = torch.optim.RMSprop(params, lr, **kwargs)
elif optim_type == 'Rprop':
optimizer = torch.optim.Rprop(params, lr, **kwargs)
else:
raise NotImplementedError(f'optimizer {optim_type} is not supperted yet.')
raise NotImplementedError(f'optimizer {optim_type} is not supported yet.')
return optimizer

def setup_schedulers(self):
Expand Down

0 comments on commit b156d25

Please sign in to comment.