Skip to content

Commit

Permalink
Update back_propagation_neural_network.py (TheAlgorithms#1342)
Browse files Browse the repository at this point in the history
* Update back_propagation_neural_network.py

Added comments below functions

* Update back_propagation_neural_network.py

Co-authored-by: Christian Clauss <[email protected]>
  • Loading branch information
y4shb and cclauss committed Dec 26, 2019
1 parent 725834b commit 1b39858
Showing 1 changed file with 6 additions and 17 deletions.
23 changes: 6 additions & 17 deletions neural_network/back_propagation_neural_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@ class DenseLayer:
"""
Layers of BP neural network
"""

def __init__(
self, units, activation=None, learning_rate=None, is_input_layer=False
):
Expand All @@ -58,6 +57,7 @@ def initializer(self, back_units):
self.activation = sigmoid

def cal_gradient(self):
# activation function may be sigmoid or linear
if self.activation == sigmoid:
gradient_mat = np.dot(self.output, (1 - self.output).T)
gradient_activation = np.diag(np.diag(gradient_mat))
Expand All @@ -78,7 +78,6 @@ def forward_propagation(self, xdata):
return self.output

def back_propagation(self, gradient):

gradient_activation = self.cal_gradient() # i * i 维
gradient = np.asmatrix(np.dot(gradient.T, gradient_activation))

Expand All @@ -89,19 +88,17 @@ def back_propagation(self, gradient):
self.gradient_weight = np.dot(gradient.T, self._gradient_weight.T)
self.gradient_bias = gradient * self._gradient_bias
self.gradient = np.dot(gradient, self._gradient_x).T
# ----------------------upgrade
# -----------the Negative gradient direction --------
# upgrade: the Negative gradient direction
self.weight = self.weight - self.learn_rate * self.gradient_weight
self.bias = self.bias - self.learn_rate * self.gradient_bias.T

# updates the weights and bias according to learning rate (0.3 if undefined)
return self.gradient


class BPNN:
"""
Back Propagation Neural Network model
"""

def __init__(self):
self.layers = []
self.train_mse = []
Expand Down Expand Up @@ -144,8 +141,7 @@ def train(self, xdata, ydata, train_round, accuracy):
loss, gradient = self.cal_loss(_ydata, _xdata)
all_loss = all_loss + loss

# back propagation
# the input_layer does not upgrade
# back propagation: the input_layer does not upgrade
for layer in self.layers[:0:-1]:
gradient = layer.back_propagation(gradient)

Expand Down Expand Up @@ -176,7 +172,6 @@ def plot_loss(self):


def example():

x = np.random.randn(10, 10)
y = np.asarray(
[
Expand All @@ -192,17 +187,11 @@ def example():
[0.1, 0.5],
]
)

model = BPNN()
model.add_layer(DenseLayer(10))
model.add_layer(DenseLayer(20))
model.add_layer(DenseLayer(30))
model.add_layer(DenseLayer(2))

for i in (10, 20, 30, 2):
model.add_layer(DenseLayer(i))
model.build()

model.summary()

model.train(xdata=x, ydata=y, train_round=100, accuracy=0.01)


Expand Down

0 comments on commit 1b39858

Please sign in to comment.