-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy path3.1_convolutional_dropout.py
112 lines (89 loc) · 3.6 KB
/
3.1_convolutional_dropout.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
from util import readDatabase, AccuracyHistory, showPerformance, showConfusionMatrix
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation, Flatten
from keras.layers.convolutional import Convolution2D, MaxPooling2D, Conv2D
from keras.optimizers import Adam
from keras import backend as K
# Neural network structure for this sample:
#
# · · · · · · · · · · (input data, 1-deep) X [batch, 28, 28, 1]
# @ @ @ @ @ @ @ @ @ @ -- conv. layer 5x5x1=>4 stride 1 W1 [5, 5, 1, 4] B1 [4]
# ∶∶∶∶∶∶∶∶∶∶∶∶∶∶∶∶∶∶∶ Y1 [batch, 28, 28, 4]
# @ @ @ @ @ @ @ @ -- conv. layer 5x5x4=>8 stride 2 W2 [5, 5, 4, 8] B2 [8]
# ∶∶∶∶∶∶∶∶∶∶∶∶∶∶∶ Y2 [batch, 14, 14, 8]
# @ @ @ @ @ @ -- conv. layer 4x4x8=>12 stride 2 W3 [4, 4, 8, 12] B3 [12]
# ∶∶∶∶∶∶∶∶∶∶∶ Y3 [batch, 7, 7, 12] => reshaped to YY [batch, 7*7*12]
# \x/x\x\x/ -- fully connected layer (relu) W4 [7*7*12, 200] B4 [200]
# · · · · Y4 [batch, 200]
# \x/x\x/ -- fully connected layer (softmax) W5 [200, 10] B5 [10]
# · · · Y [batch, 10]
import argparse
ap = argparse.ArgumentParser()
ap.add_argument("-v", "--verbose", required=False, help="show images (0 = False, 1 = True)")
args = vars(ap.parse_args())
verbose = args["verbose"]
if verbose is None:
verbose = False
else:
if verbose == '1':
verbose = True
else:
verbose = False
# Read the training / testing dataset and labels
xTrain, yTrain, xTest, yTest, yLabels = readDatabase(reshape=True)
# Network parameters
firstConvLayerDepth = 4
secondConvLayerDepth = 8
thirdConvLayerDepth = 12
numberOfNeurons = 200
# Training hyperparameters
learningRate = 0.001
noOfEpochs = 3
batchSize = 32
numberOfClasses = yTrain.shape[1]
featureSize = xTrain.shape[1]
# Program parameters
history = AccuracyHistory()
showPlot = verbose
# Network architecture
model = Sequential()
model.add(Conv2D(firstConvLayerDepth, kernel_size=(5, 5),
activation='relu',
strides=(1, 1),
padding='same',
input_shape=(28, 28, 1)))
model.add(Dropout(0.25))
# output is 28x28
model.add(Conv2D(secondConvLayerDepth, kernel_size=(5, 5),
activation='relu',
strides=(2, 2),
padding='same'))
model.add(Dropout(0.25))
# output is 14x14
model.add(Conv2D(thirdConvLayerDepth, kernel_size=(5, 5),
activation='relu',
strides=(2, 2),
padding='same'))
model.add(Dropout(0.25))
# output is 7x7
model.add(Flatten())
model.add(Dense(numberOfNeurons, activation='relu'))
model.add(Dropout(0.25))
model.add(Dense(numberOfClasses, activation='softmax'))
sgd = Adam(lr=learningRate)
model.compile(optimizer=sgd,
loss='categorical_crossentropy',
metrics=['accuracy'])
model.fit(x=xTrain,
y=yTrain,
epochs=noOfEpochs,
batch_size=batchSize,
verbose=1,
callbacks=[history])
(loss, accuracy) = model.evaluate(xTest, yTest)
showPerformance(accuracy, loss, noOfEpochs, history, plot=showPlot)
if showPlot:
predictedValues = model.predict(xTest, batch_size=1)
showConfusionMatrix(yLabels, predictedValues)
K.clear_session()
# Acuracy 0.9858