forked from jorgenkg/python-neural-network
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathactivation_functions.py
131 lines (102 loc) · 3.41 KB
/
activation_functions.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
import numpy as np
try:
# PYPY hasn't got scipy
from scipy.special import expit
except:
expit = lambda x: 1.0 / (1 + np.exp(-x))
def softmax_function( signal, derivative=False ):
# Calculate activation signal
e_x = np.exp( signal - np.max(signal, axis=1, keepdims = True) )
signal = e_x / np.sum( e_x, axis = 1, keepdims = True )
if derivative:
return np.ones( signal.shape )
else:
# Return the activation signal
return signal
#end activation function
def sigmoid_function( signal, derivative=False ):
# Prevent overflow.
signal = np.clip( signal, -500, 500 )
# Calculate activation signal
signal = expit( signal )
if derivative:
# Return the partial derivation of the activation function
return np.multiply(signal, 1 - signal)
else:
# Return the activation signal
return signal
#end activation function
def elliot_function( signal, derivative=False ):
""" A fast approximation of sigmoid """
s = 1 # steepness
abs_signal = (1 + np.abs(signal * s))
if derivative:
return 0.5 * s / abs_signal**2
else:
# Return the activation signal
return 0.5*(signal * s) / abs_signal + 0.5
#end activation function
def symmetric_elliot_function( signal, derivative=False ):
""" A fast approximation of tanh """
s = 1.0 # steepness
abs_signal = (1 + np.abs(signal * s))
if derivative:
return s / abs_signal**2
else:
# Return the activation signal
return (signal * s) / abs_signal
#end activation function
def ReLU_function( signal, derivative=False ):
if derivative:
return (signal > 0).astype(float)
else:
# Return the activation signal
return np.maximum( 0, signal )
#end activation function
def LReLU_function( signal, derivative=False, leakage = 0.01 ):
"""
Leaky Rectified Linear Unit
"""
if derivative:
# Return the partial derivation of the activation function
return np.clip(signal > 0, leakage, 1.0)
else:
# Return the activation signal
output = np.copy( signal )
output[ output < 0 ] *= leakage
return output
#end activation function
def tanh_function( signal, derivative=False ):
# Calculate activation signal
signal = np.tanh( signal )
if derivative:
# Return the partial derivation of the activation function
return 1-np.power(signal,2)
else:
# Return the activation signal
return signal
#end activation function
def linear_function( signal, derivative=False ):
if derivative:
# Return the partial derivation of the activation function
return np.ones( signal.shape )
else:
# Return the activation signal
return signal
#end activation function
def softplus_function( signal, derivative=False ):
if derivative:
# Return the partial derivation of the activation function
return np.exp(signal) / (1 + np.exp(signal))
else:
# Return the activation signal
return np.log(1 + np.exp(signal))
#end activation function
def softsign_function( signal, derivative=False ):
if derivative:
# Return the partial derivation of the activation function
return 1. / (1 + np.abs(signal))**2
else:
# Return the activation signal
return signal / (1 + np.abs(signal))
#end activation function