Skip to content

Commit

Permalink
Merge pull request mnielsen#20 from CesiumLifeJacket/master
Browse files Browse the repository at this point in the history
removed numpy.vectorize() wrappers
  • Loading branch information
mnielsen committed Aug 6, 2015
2 parents 84be581 + 8886ab7 commit bce65a6
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 16 deletions.
12 changes: 4 additions & 8 deletions src/network.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def __init__(self, sizes):
def feedforward(self, a):
"""Return the output of the network if ``a`` is input."""
for b, w in zip(self.biases, self.weights):
a = sigmoid_vec(np.dot(w, a)+b)
a = sigmoid(np.dot(w, a)+b)
return a

def SGD(self, training_data, epochs, mini_batch_size, eta,
Expand Down Expand Up @@ -96,11 +96,11 @@ def backprop(self, x, y):
for b, w in zip(self.biases, self.weights):
z = np.dot(w, activation)+b
zs.append(z)
activation = sigmoid_vec(z)
activation = sigmoid(z)
activations.append(activation)
# backward pass
delta = self.cost_derivative(activations[-1], y) * \
sigmoid_prime_vec(zs[-1])
sigmoid_prime(zs[-1])
nabla_b[-1] = delta
nabla_w[-1] = np.dot(delta, activations[-2].transpose())
# Note that the variable l in the loop below is used a little
Expand All @@ -111,7 +111,7 @@ def backprop(self, x, y):
# that Python can use negative indices in lists.
for l in xrange(2, self.num_layers):
z = zs[-l]
spv = sigmoid_prime_vec(z)
spv = sigmoid_prime(z)
delta = np.dot(self.weights[-l+1].transpose(), delta) * spv
nabla_b[-l] = delta
nabla_w[-l] = np.dot(delta, activations[-l-1].transpose())
Expand All @@ -136,10 +136,6 @@ def sigmoid(z):
"""The sigmoid function."""
return 1.0/(1.0+np.exp(-z))

sigmoid_vec = np.vectorize(sigmoid)

def sigmoid_prime(z):
"""Derivative of the sigmoid function."""
return sigmoid(z)*(1-sigmoid(z))

sigmoid_prime_vec = np.vectorize(sigmoid_prime)
12 changes: 4 additions & 8 deletions src/network2.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def fn(a, y):
@staticmethod
def delta(z, a, y):
"""Return the error delta from the output layer."""
return (a-y) * sigmoid_prime_vec(z)
return (a-y) * sigmoid_prime(z)


class CrossEntropyCost(object):
Expand Down Expand Up @@ -123,7 +123,7 @@ def large_weight_initializer(self):
def feedforward(self, a):
"""Return the output of the network if ``a`` is input."""
for b, w in zip(self.biases, self.weights):
a = sigmoid_vec(np.dot(w, a)+b)
a = sigmoid(np.dot(w, a)+b)
return a

def SGD(self, training_data, epochs, mini_batch_size, eta,
Expand Down Expand Up @@ -220,7 +220,7 @@ def backprop(self, x, y):
for b, w in zip(self.biases, self.weights):
z = np.dot(w, activation)+b
zs.append(z)
activation = sigmoid_vec(z)
activation = sigmoid(z)
activations.append(activation)
# backward pass
delta = (self.cost).delta(zs[-1], activations[-1], y)
Expand All @@ -234,7 +234,7 @@ def backprop(self, x, y):
# that Python can use negative indices in lists.
for l in xrange(2, self.num_layers):
z = zs[-l]
spv = sigmoid_prime_vec(z)
spv = sigmoid_prime(z)
delta = np.dot(self.weights[-l+1].transpose(), delta) * spv
nabla_b[-l] = delta
nabla_w[-l] = np.dot(delta, activations[-l-1].transpose())
Expand Down Expand Up @@ -327,10 +327,6 @@ def sigmoid(z):
"""The sigmoid function."""
return 1.0/(1.0+np.exp(-z))

sigmoid_vec = np.vectorize(sigmoid)

def sigmoid_prime(z):
"""Derivative of the sigmoid function."""
return sigmoid(z)*(1-sigmoid(z))

sigmoid_prime_vec = np.vectorize(sigmoid_prime)

0 comments on commit bce65a6

Please sign in to comment.