Skip to content

Commit

Permalink
Better error message when there is no gradient defined (keras-team#9170)
Browse files Browse the repository at this point in the history
* better error message

* better error messag

* pep8

* changes according to review
  • Loading branch information
Frédéric Branchaud-Charron authored and fchollet committed Jan 23, 2018
1 parent c7efb4e commit a3dcce7
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 3 deletions.
4 changes: 4 additions & 0 deletions keras/optimizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,10 @@ def get_updates(self, loss, params):

def get_gradients(self, loss, params):
grads = K.gradients(loss, params)
if None in grads:
raise ValueError('An operation has `None` for gradient.',
'Please be sure that all of your ops have a gradient defined \"i.e. are differentiable\".',
'Common ops without gradient : K.argmax, K.round, K.cast, K.eval.')
if hasattr(self, 'clipnorm') and self.clipnorm > 0:
norm = K.sqrt(sum([K.sum(K.square(g)) for g in grads]))
grads = [clip_norm(g, self.clipnorm, norm) for g in grads]
Expand Down
18 changes: 15 additions & 3 deletions tests/keras/optimizers_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@
from numpy.testing import assert_allclose

from keras.utils import test_utils
from keras import optimizers
from keras.models import Sequential
from keras.layers.core import Dense, Activation
from keras import optimizers, Input
from keras.models import Sequential, Model
from keras.layers.core import Dense, Activation, Lambda
from keras.utils.test_utils import keras_test
from keras.utils.np_utils import to_categorical
from keras import backend as K
Expand Down Expand Up @@ -64,10 +64,22 @@ def _test_optimizer(optimizer, target=0.75):
assert_allclose(bias, 2.)


@keras_test
def _test_no_grad(optimizer):
inp = Input([3])
x = Dense(10)(inp)
x = Lambda(lambda l: 1.0 * K.reshape(K.cast(K.argmax(l), 'float32'), [-1, 1]))(x)
mod = Model(inp, x)
mod.compile(optimizer, 'mse')
with pytest.raises(ValueError):
mod.fit(np.zeros([10, 3]), np.zeros([10, 1], np.float32), batch_size=10, epochs=10)


@keras_test
def test_sgd():
sgd = optimizers.SGD(lr=0.01, momentum=0.9, nesterov=True)
_test_optimizer(sgd)
_test_no_grad(sgd)


@keras_test
Expand Down

0 comments on commit a3dcce7

Please sign in to comment.