Skip to content

Commit

Permalink
add selu activation
Browse files Browse the repository at this point in the history
  • Loading branch information
aymericdamien committed Jun 16, 2017
1 parent 1fcef7c commit 03ca500
Show file tree
Hide file tree
Showing 2 changed files with 22 additions and 1 deletion.
2 changes: 1 addition & 1 deletion tflearn/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
from .optimizers import SGD, AdaGrad, Adam, RMSProp, Momentum, Ftrl, AdaDelta, \
ProximalAdaGrad
from .activations import linear, tanh, sigmoid, softmax, softplus, softsign,\
relu, relu6, leaky_relu, prelu, elu
relu, relu6, leaky_relu, prelu, elu, crelu, selu
from .variables import variable, get_all_trainable_variable, \
get_all_variables, get_layer_variables_by_name, get_layer_variables_by_scope
from .objectives import categorical_crossentropy, binary_crossentropy, \
Expand Down
21 changes: 21 additions & 0 deletions tflearn/activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -283,3 +283,24 @@ def crelu(x):
"""

return tf.nn.crelu(x)


def selu(x):
""" SELU.
Scaled Exponential Linear Unit.
Arguments
x : A `Tensor` with type `float`, `double`, `int32`, `int64`, `uint8`,
`int16`, or `int8`
References:
Self-Normalizing Neural Networks, Klambauer et al., 2017.
Links:
[https://arxiv.org/abs/1706.02515](https://arxiv.org/abs/1706.02515)
"""
alpha = 1.6732632423543772848170429916717
scale = 1.0507009873554804934193349852946
return scale * tf.nn.elu(x, alpha)

0 comments on commit 03ca500

Please sign in to comment.