Skip to content

Commit

Permalink
Updating some deprecated method parameters
Browse files Browse the repository at this point in the history
  • Loading branch information
ludwigschubert committed May 21, 2018
1 parent 24c0b92 commit d80a1ce
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
6 changes: 3 additions & 3 deletions lucid/optvis/objectives.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ def inner(T):
shape = tf.shape(layer)
x_ = shape[1] // 2 if x is None else x
y_ = shape[2] // 2 if y is None else y

if batch is None:
return layer[:, x_, y_, channel_n]
else:
Expand Down Expand Up @@ -199,7 +199,7 @@ def inner(T):
def direction_cossim(layer, vec, batch=None):
"""Visualize a direction (cossine similarity)"""
def inner(T):
act_mags = tf.sqrt(tf.reduce_sum(T(layer)**2, -1, keep_dims=True))
act_mags = tf.sqrt(tf.reduce_sum(T(layer)**2, -1, keepdims=True))
vec_mag = tf.sqrt(tf.reduce_sum(vec**2))
mags = act_mags * vec_mag
if batch is None:
Expand Down Expand Up @@ -411,7 +411,7 @@ def inner(T):

flattened = tf.reshape(layer_t, [batch_n, -1, channels])
grams = tf.matmul(flattened, flattened, transpose_a=True)
grams = tf.nn.l2_normalize(grams, dim=[1,2], epsilon=1e-10)
grams = tf.nn.l2_normalize(grams, axis=[1,2], epsilon=1e-10)

return sum([ sum([ tf.reduce_sum(grams[i]*grams[j])
for j in range(batch_n) if j != i])
Expand Down
2 changes: 1 addition & 1 deletion lucid/optvis/transform.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ def normalize_gradient(grad_scales=None):
op_name = "NormalizeGrad_" + str(uuid.uuid4())
@tf.RegisterGradient(op_name)
def _NormalizeGrad(op, grad):
grad_norm = tf.sqrt(tf.reduce_sum(grad**2, [1, 2, 3], keep_dims=True))
grad_norm = tf.sqrt(tf.reduce_sum(grad**2, [1, 2, 3], keepdims=True))
if grad_scales is not None:
grad *= grad_scales[:, None, None, None]
return grad / grad_norm
Expand Down

0 comments on commit d80a1ce

Please sign in to comment.