Skip to content

Commit

Permalink
Added support for latest pytorch lightning version
Browse files Browse the repository at this point in the history
  • Loading branch information
AntixK committed Dec 22, 2021
1 parent 8700d24 commit af3f9a5
Show file tree
Hide file tree
Showing 31 changed files with 543 additions and 305 deletions.
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@

data/
Data/
logs/

VanillaVAE/version_0/
Expand Down
20 changes: 14 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ $ cd PyTorch-VAE
$ python run.py -c configs/<config-file-name.yaml>
```
**Config file template**

```yaml
model_params:
name: "<name of VAE model>"
Expand All @@ -48,10 +49,15 @@ model_params:
.
.

exp_params:
data_params:
data_path: "<path to the celebA dataset>"
img_size: 64 # Models are designed to work for this size
batch_size: 64 # Better to have a square number
train_batch_size: 64 # Better to have a square number
val_batch_size: 64
patch_size: 64 # Models are designed to work for this size
num_workers: 4

exp_params:
manual_seed: 1265
LR: 0.005
weight_decay:
. # Other arguments required for training, like scheduler etc.
Expand All @@ -60,7 +66,7 @@ exp_params:

trainer_params:
gpus: 1
max_nb_epochs: 50
max_epochs: 100
gradient_clip_val: 1.5
.
.
Expand All @@ -69,15 +75,17 @@ trainer_params:
logging_params:
save_dir: "logs/"
name: "<experiment name>"
manual_seed:
```
**View TensorBoard Logs**
```
$ cd logs/<experiment name>/version_<the version you want>
$ tensorboard --logdir tf
$ tensorboard --logdir .
```

**Note:** The default dataset is CelebA. However, there has been many issues with downloading the dataset from google drive (owing to some file structure changes). So, the recommendation is to download the [file](https://drive.google.com/file/d/1m8-EBPgi5MRubrm6iQjafK2QMHDBMSfJ/view?usp=sharing) from google drive directly and extract to the path of your choice. The default path assumed in the config files is `Data/celeba/img_align_celeba'. But you can change it acording to your preference.


----
<h2 align="center">
<b>Results</b><br>
Expand Down
22 changes: 13 additions & 9 deletions configs/bbvae.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,21 +7,25 @@ model_params:
max_capacity: 25
Capacity_max_iter: 10000

data_params:
data_path: "Data/"
train_batch_size: 64
val_batch_size: 64
patch_size: 64
num_workers: 4

exp_params:
dataset: celeba
data_path: "../../shared/Data/"
img_size: 64
batch_size: 144 # Better to have a square number
LR: 0.0005
LR: 0.005
weight_decay: 0.0
scheduler_gamma: 0.95
kld_weight: 0.00025
manual_seed: 1265

trainer_params:
gpus: 1
max_nb_epochs: 50
max_epochs: 50
gpus: [1]
max_epochs: 10

logging_params:
save_dir: "logs/"
name: "BetaVAE_B"
manual_seed: 1265
name: 'BetaVAE'
26 changes: 15 additions & 11 deletions configs/betatc_vae.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,21 +7,25 @@ model_params:
beta: 6.
gamma: 1.

data_params:
data_path: "Data/"
train_batch_size: 64
val_batch_size: 64
patch_size: 64
num_workers: 4


exp_params:
dataset: celeba
data_path: "../../shared/momo/Data/"
img_size: 64
batch_size: 144 # Better to have a square number
LR: 0.001
LR: 0.005
weight_decay: 0.0
# scheduler_gamma: 0.99
scheduler_gamma: 0.95
kld_weight: 0.00025
manual_seed: 1265

trainer_params:
gpus: 1
max_nb_epochs: 50
max_epochs: 50
gpus: [1]
max_epochs: 10

logging_params:
save_dir: "logs/"
name: "BetaTCVAE"
manual_seed: 1265
name: 'BetaTCVAE'
24 changes: 14 additions & 10 deletions configs/bhvae.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,21 +5,25 @@ model_params:
loss_type: 'H'
beta: 10.

data_params:
data_path: "Data/"
train_batch_size: 64
val_batch_size: 64
patch_size: 64
num_workers: 4


exp_params:
dataset: celeba
data_path: "../../shared/Data/"
img_size: 64
batch_size: 144 # Better to have a square number
LR: 0.0005
LR: 0.005
weight_decay: 0.0
scheduler_gamma: 0.95
kld_weight: 0.00025
manual_seed: 1265

trainer_params:
gpus: 1
max_nb_epochs: 50
max_epochs: 50
gpus: [1]
max_epochs: 10

logging_params:
save_dir: "logs/"
name: "BetaVAE_H"
manual_seed: 1265
name: 'BetaVAE'
18 changes: 11 additions & 7 deletions configs/cat_vae.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,21 +8,25 @@ model_params:
anneal_interval: 100
alpha: 1.0

data_params:
data_path: "Data/"
train_batch_size: 64
val_batch_size: 64
patch_size: 64
num_workers: 4


exp_params:
dataset: celeba
data_path: "../../shared/Data/"
img_size: 64
batch_size: 144 # Better to have a square number
LR: 0.005
weight_decay: 0.0
scheduler_gamma: 0.95
kld_weight: 0.00025
manual_seed: 1265

trainer_params:
gpus: [1]
max_nb_epochs: 50
max_epochs: 50
max_epochs: 10

logging_params:
save_dir: "logs/"
name: "CategoricalVAE"
manual_seed: 1265
22 changes: 13 additions & 9 deletions configs/cvae.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,21 +4,25 @@ model_params:
num_classes: 40
latent_dim: 128

data_params:
data_path: "Data/"
train_batch_size: 64
val_batch_size: 64
patch_size: 64
num_workers: 4


exp_params:
dataset: celeba
data_path: "../../shared/Data/"
img_size: 64
batch_size: 144 # Better to have a square number
LR: 0.005
weight_decay: 0.0
scheduler_gamma: 0.95
kld_weight: 0.00025
manual_seed: 1265

trainer_params:
gpus: 1
max_nb_epochs: 50
max_epochs: 50
gpus: [1]
max_epochs: 10

logging_params:
save_dir: "logs/"
name: "ConditionalVAE"
manual_seed: 1265
name: "ConditionalVAE"
20 changes: 12 additions & 8 deletions configs/dfc_vae.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,21 +3,25 @@ model_params:
in_channels: 3
latent_dim: 128

data_params:
data_path: "Data/"
train_batch_size: 64
val_batch_size: 64
patch_size: 64
num_workers: 4


exp_params:
dataset: celeba
data_path: "../../shared/Data/"
img_size: 64
batch_size: 144 # Better to have a square number
LR: 0.005
weight_decay: 0.0
scheduler_gamma: 0.95
kld_weight: 0.00025
manual_seed: 1265

trainer_params:
gpus: 1
max_nb_epochs: 50
max_epochs: 50
gpus: [1]
max_epochs: 10

logging_params:
save_dir: "logs/"
name: "DFCVAE"
manual_seed: 1265
19 changes: 12 additions & 7 deletions configs/dip_vae.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,19 +6,24 @@ model_params:
lambda_offdiag: 0.1


data_params:
data_path: "Data/"
train_batch_size: 64
val_batch_size: 64
patch_size: 64
num_workers: 4


exp_params:
dataset: celeba
data_path: "../../shared/momo/Data/"
img_size: 64
batch_size: 144 # Better to have a square number
LR: 0.001
weight_decay: 0.0
scheduler_gamma: 0.97
kld_weight: 1
manual_seed: 1265

trainer_params:
gpus: 1
max_nb_epochs: 50
max_epochs: 50
gpus: [1]
max_epochs: 10

logging_params:
save_dir: "logs/"
Expand Down
26 changes: 16 additions & 10 deletions configs/factorvae.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,25 +4,31 @@ model_params:
latent_dim: 128
gamma: 6.4

data_params:
data_path: "Data/"
train_batch_size: 64
val_batch_size: 64
patch_size: 64
num_workers: 4


exp_params:
dataset: celeba
data_path: "../../shared/Data/"
submodel: 'discriminator'
retain_first_backpass: True
img_size: 64
batch_size: 144 # Better to have a square number
LR: 0.005
weight_decay: 0.0
scheduler_gamma: 0.95
LR_2: 0.005
scheduler_gamma_2: 0.95
scheduler_gamma: 0.95
kld_weight: 0.00025
manual_seed: 1265

trainer_params:
gpus: [3]
max_nb_epochs: 30
max_epochs: 50
gpus: [1]
max_epochs: 10

logging_params:
save_dir: "logs/"
name: "FactorVAE"
manual_seed: 1265
name: "FactorVAE"


25 changes: 15 additions & 10 deletions configs/gammavae.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,22 +6,27 @@ model_params:
prior_shape: 2.
prior_rate: 1.


data_params:
data_path: "Data/"
train_batch_size: 64
val_batch_size: 64
patch_size: 64
num_workers: 4


exp_params:
dataset: celeba
data_path: "../../shared/Data/"
img_size: 64
batch_size: 144 # Better to have a square number
LR: 0.003
weight_decay: 0.0005

weight_decay: 0.00005
scheduler_gamma: 0.95
kld_weight: 0.00025
manual_seed: 1265

trainer_params:
gpus: 1
max_nb_epochs: 50
gpus: [1]
max_epochs: 10
gradient_clip_val: 0.8
max_epochs: 50

logging_params:
save_dir: "logs/"
name: "GammaVAE"
manual_seed: 1265
Loading

0 comments on commit af3f9a5

Please sign in to comment.