Skip to content

Commit

Permalink
Merge pull request maxpumperla#196 from amw5g/patch-1
Browse files Browse the repository at this point in the history
Create simple_notebook_multiple_inputs.ipynb
  • Loading branch information
maxpumperla authored Oct 6, 2018
2 parents 0151f59 + b2f46d4 commit 89d5bf6
Showing 1 changed file with 77 additions and 0 deletions.
77 changes: 77 additions & 0 deletions examples/simple_notebook_multiple_inputs.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
from __future__ import print_function
from hyperopt import Trials, STATUS_OK, tpe
from hyperas import optim
from hyperas.distributions import choice, uniform

from keras.models import Model
from keras.layers import Input, Dense, Dropout, concatenate, Activation
from keras.optimizers import RMSprop

from keras.datasets import mnist
from keras.utils import np_utils
import numpy

def data():
'''
Data providing function:
This function is separated from model() so that hyperopt
won't reload data for each evaluation run.
'''
(mnist_X_train, y_train), (mnist_X_test, y_test) = mnist.load_data()
mnist_X_train = mnist_X_train.reshape(60000, 784)
mnist_X_test = mnist_X_test.reshape(10000, 784)
mnist_X_train = mnist_X_train.astype('float32')
mnist_X_test = mnist_X_test.astype('float32')
mnist_X_train /= 255
mnist_X_test /= 255
nb_classes = 10
Y_train = np_utils.to_categorical(y_train, nb_classes)
Y_test = np_utils.to_categorical(y_test, nb_classes)
X_train = {'x1': mnist_X_train, 'x2': numpy.flip(mnist_X_train, axis=1)}
X_test = {'x1': mnist_X_test, 'x2': numpy.flip(mnist_X_test, axis=1)}
return X_train, Y_train, X_test, Y_test


def model(X_train, Y_train, X_test, Y_test):
'''
Model providing function:
Create Keras model with double curly brackets dropped-in as needed.
Return value has to be a valid python dictionary with two customary keys:
- loss: Specify a numeric evaluation metric to be minimized
- status: Just use STATUS_OK and see hyperopt documentation if not feasible
The last one is optional, though recommended, namely:
- model: specify the model just created so that we can later use it again.
'''
x1 = Input(shape=(784,), name='x1')
dense_x1 = Dense(512)(x1)
x2 = Input(shape=(784,), name='x2')
dense_x2 = Dense(512)(x2)
merged = concatenate([dense_x1, dense_x2])
merged = Activation('relu')(merged)
merged = Dropout({{uniform(0, 1)}})(merged)
merged = Dense({{choice([256, 512, 1024])}}, activation='relu')(merged)
merged = Dropout({{uniform(0, 1)}})(merged)
output = Dense(10, activation='softmax', name='output')(merged)
model = Model(inputs=[x1, x2], outputs=[output])

rms = RMSprop()
model.compile(loss='categorical_crossentropy', optimizer=rms, metrics=['accuracy'])

model.fit({'x1': X_train['x1'], 'x2': X_train['x2']}, {'output': Y_train},
batch_size={{choice([64, 128])}},
epochs=1,
verbose=2,
validation_data=({'x1': X_test['x1'], 'x2': X_test['x2']}, {'output': Y_test}))
score, acc = model.evaluate({'x1': X_test['x1'], 'x2': X_test['x2']}, {'output': Y_test}, verbose=0)
print('Test accuracy:', acc)
return {'loss': -acc, 'status': STATUS_OK, 'model': model}


X_train, Y_train, X_test, Y_test = data()

best_run, best_model = optim.minimize(model=model,
data=data,
algo=tpe.suggest,
max_evals=5,
trials=Trials(),
notebook_name='simple_notebook')

0 comments on commit 89d5bf6

Please sign in to comment.