Skip to content

Commit

Permalink
Updated BetaTCVAE results
Browse files Browse the repository at this point in the history
  • Loading branch information
AntixK committed Feb 28, 2020
1 parent a36d705 commit 4510762
Show file tree
Hide file tree
Showing 4 changed files with 6 additions and 6 deletions.
Binary file added assets/BetaTCVAE_49.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added assets/recons_BetaTCVAE_49.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
4 changes: 2 additions & 2 deletions configs/dip_vae.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@ model_params:
name: 'DIPVAE'
in_channels: 3
latent_dim: 128
lambda_diag: 10.
lambda_offdiag: 5.
lambda_diag: 4.
lambda_offdiag: 2.


exp_params:
Expand Down
8 changes: 4 additions & 4 deletions models/dip_vae.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,8 +137,8 @@ def loss_function(self,
mu = args[2]
log_var = args[3]

kld_weight = 1. #kwargs['M_N'] # Account for the minibatch samples from the dataset
recons_loss =F.mse_loss(recons, input)
kld_weight = 1 #* kwargs['M_N'] # Account for the minibatch samples from the dataset
recons_loss =F.mse_loss(recons, input, reduction='sum')


kld_loss = torch.mean(-0.5 * torch.sum(1 + log_var - mu ** 2 - log_var.exp(), dim = 1), dim = 0)
Expand All @@ -150,8 +150,8 @@ def loss_function(self,

cov_diag = torch.diag(cov_z) # [D]
cov_offdiag = cov_z - torch.diag(cov_diag) # [D x D]
dip_loss = self.lambda_offdiag * torch.mean(cov_offdiag ** 2) + \
self.lambda_diag * torch.mean((cov_diag - 1) ** 2)
dip_loss = self.lambda_offdiag * torch.sum(cov_offdiag ** 2) + \
self.lambda_diag * torch.sum((cov_diag - 1) ** 2)

loss = recons_loss + kld_weight * kld_loss + dip_loss
return {'loss': loss,
Expand Down

0 comments on commit 4510762

Please sign in to comment.