Skip to content

Commit

Permalink
Twiddling
Browse files Browse the repository at this point in the history
  • Loading branch information
mnielsen committed Jan 7, 2015
1 parent 34ec420 commit 2ae7c75
Showing 1 changed file with 10 additions and 23 deletions.
33 changes: 10 additions & 23 deletions src/conv.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
"""

import network3
from network3 import Network
from network3 import sigmoid, tanh, ReLU, Network
from network3 import ConvPoolLayer, FullyConnectedLayer, SoftmaxLayer
training_data, validation_data, test_data = network3.load_data_shared()
mini_batch_size = 10
Expand Down Expand Up @@ -44,17 +44,20 @@ def omit_FC():
SoftmaxLayer(n_in=20*12*12, n_out=10)], mini_batch_size)
net.SGD(training_data, 60, mini_batch_size, 0.1, validation_data, test_data)

def dbl_conv():
def dbl_conv(activation_fn=sigmoid):
for j in range(3):
print "Conv + Conv + FC architecture"
net = Network([
ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
filter_shape=(20, 1, 5, 5),
poolsize=(2, 2)),
poolsize=(2, 2),
activation_fn=activation_fn),
ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
filter_shape=(40, 20, 5, 5),
poolsize=(2, 2)),
FullyConnectedLayer(n_in=40*4*4, n_out=100),
poolsize=(2, 2),
activation_fn=activation_fn),
FullyConnectedLayer(
n_in=40*4*4, n_out=100, activation_fn=activation_fn),
SoftmaxLayer(n_in=100, n_out=10)], mini_batch_size)
net.SGD(training_data, 60, mini_batch_size, 0.1, validation_data, test_data)

Expand All @@ -73,24 +76,8 @@ def regularized_dbl_conv():
SoftmaxLayer(n_in=100, n_out=10)], mini_batch_size)
net.SGD(training_data, 60, mini_batch_size, 0.1, validation_data, test_data, lmbda=lmbda)

def dbl_conv_tanh():
for j in range(3):
print "Conv + Conv + FC, using tanh, trial %s" % j
net = Network([
ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
filter_shape=(20, 1, 5, 5),
poolsize=(2, 2),
activation_fn=tanh),
ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
filter_shape=(40, 20, 5, 5),
poolsize=(2, 2),
activation_fn=tanh),
FullyConnectedLayer(n_in=40*4*4, n_out=100, activation_fn=tanh),
SoftmaxLayer(n_in=100, n_out=10)], mini_batch_size)
net.SGD(training_data, 60, mini_batch_size, 0.1, validation_data, test_data)

def dbl_conv_relu():
for lmbda in [0.00001, 0.0001, 0.001, 0.01, 0.1, 1.0, 10.0, 100.0]:
for lmbda in [0.0, 0.00001, 0.0001, 0.001, 0.01, 0.1, 1.0, 10.0, 100.0]:
for j in range(3):
print "Conv + Conv + FC num %s, relu, with regularization %s" % (j, lmbda)
net = Network([
Expand Down Expand Up @@ -126,5 +113,5 @@ def expanded_data():
FullyConnectedLayer(n_in=40*4*4, n_out=100, activation_fn=ReLU),
SoftmaxLayer(n_in=100, n_out=10)], mini_batch_size)
net.SGD(expanded_training_data, 20, mini_batch_size, 0.03,
validation_data, test_data, lmbda=1.0)
validation_data, test_data, lmbda=0.1)

0 comments on commit 2ae7c75

Please sign in to comment.