Skip to content

Commit

Permalink
Rename objects to follow reference literature
Browse files Browse the repository at this point in the history
  • Loading branch information
carlthome authored Mar 29, 2017
1 parent ab9e7d7 commit 6a32e3d
Showing 1 changed file with 16 additions and 16 deletions.
32 changes: 16 additions & 16 deletions cell.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,33 +25,33 @@ def state_size(self):
def output_size(self):
return self._size

def __call__(self, input, state, scope=None):
def __call__(self, x, h, scope=None):
with tf.variable_scope(scope or self.__class__.__name__):
previous_memory, previous_output = state
previous_memory, previous_output = h

with tf.variable_scope('Expand'):
samples = input.get_shape()[0].value
samples = x.get_shape()[0].value
shape = [samples, self._height, self._width, -1]
input = tf.reshape(input, shape)
x = tf.reshape(x, shape)
previous_memory = tf.reshape(previous_memory, shape)
previous_output = tf.reshape(previous_output, shape)

with tf.variable_scope('Convolve'):
channels = input.get_shape()[-1].value
channels = x.get_shape()[-1].value
filters = self._filters
gates = 4 * filters if filters > 1 else 4
x = tf.concat([input, previous_output], axis=3)
x = tf.concat([x, previous_output], axis=3)
n = channels + filters
m = gates
W = tf.get_variable('kernel', self._kernel + [n, m], initializer=self._initializer)
y = tf.nn.convolution(x, W, 'SAME')
y += tf.get_variable('bias', [m], initializer=tf.constant_initializer(0.0))
input, input_gate, forget_gate, output_gate = tf.split(y, 4, axis=3)
input_contribution, input_gate, forget_gate, output_gate = tf.split(y, 4, axis=3)

with tf.variable_scope('LSTM'):
memory = (previous_memory
* tf.sigmoid(forget_gate + self._forget_bias)
+ tf.sigmoid(input_gate) * self._activation(input))
+ tf.sigmoid(input_gate) * self._activation(input_contribution))
output = self._activation(memory) * tf.sigmoid(output_gate)

with tf.variable_scope('Flatten'):
Expand Down Expand Up @@ -82,32 +82,32 @@ def state_size(self):
def output_size(self):
return self._size

def __call__(self, input, state, scope=None):
def __call__(self, x, h, scope=None):
with tf.variable_scope(scope or self.__class__.__name__):

with tf.variable_scope('Expand'):
samples = input.get_shape()[0].value
samples = x.get_shape()[0].value
shape = [samples, self._height, self._width, -1]
input = tf.reshape(input, shape)
state = tf.reshape(state, shape)
x = tf.reshape(x, shape)
h = tf.reshape(h, shape)

with tf.variable_scope('Gates'):
channels = input.get_shape()[-1].value
x = tf.concat([input, state], axis=3)
inputs = tf.concat([x, h], axis=3)
n = channels + self._filters
m = 2 * self._filters if self._filters > 1 else 2
W = tf.get_variable('kernel', self._kernel + [n, m], initializer=self._initializer)
y = tf.nn.convolution(x, W, 'SAME')
y = tf.nn.convolution(inputs, W, 'SAME')
y += tf.get_variable('bias', [m], initializer=tf.constant_initializer(1.0))
reset_gate, update_gate = tf.split(y, 2, axis=3)
reset_gate, update_gate = tf.sigmoid(reset_gate), tf.sigmoid(update_gate)

with tf.variable_scope('Output'):
x = tf.concat([input, reset_gate * state], axis=3)
inputs = tf.concat([x, reset_gate * h], axis=3)
n = channels + self._filters
m = self._filters
W = tf.get_variable('kernel', self._kernel + [n, m], initializer=self._initializer)
y = tf.nn.convolution(x, W, 'SAME')
y = tf.nn.convolution(inputs, W, 'SAME')
y += tf.get_variable('bias', [m], initializer=tf.constant_initializer(0.0))
y = self._activation(y)
output = update_gate * state + (1 - update_gate) * y
Expand Down

0 comments on commit 6a32e3d

Please sign in to comment.