Skip to content

Commit

Permalink
Merge branch 'main' of https://github.com/openvpi/SOME
Browse files Browse the repository at this point in the history
  • Loading branch information
yqzhishen committed Sep 17, 2023
2 parents 23ac56a + 8f1527e commit fc142ec
Showing 1 changed file with 109 additions and 0 deletions.
109 changes: 109 additions & 0 deletions configs/quant_tow_head_model.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
# preprocessing
binarizer_cls: preprocessing.MIDIExtractionBinarizer
raw_data_dir: []
binary_data_dir: data/some_dataset_mel/binary
binarization_args:
num_workers: 0
shuffle: true
valid_set_name: valid
train_set_name: train
use_buond_loss: true
use_midi_loss: true


hop_size: 160 # 320
win_size: 1024
audio_sample_rate: 16000
test_prefixes:
units_encoder: mel # contentvec768l12
units_encoder_ckpt: pretrained/contentvec/checkpoint_best_legacy_500.pt
pe: rmvpe
pe_ckpt: pretrained/rmvpe/model.pt

# global constants
midi_min: 0
midi_max: 128
midi_prob_deviation: 0.5
midi_shift_proportion: 0.0
midi_shift_range: [-6, 6]
rest_threshold: 0.1

# neural networks
sort_by_len: true
units_dim: 80 # 768
midi_num_bins: 129
model_cls: modules.model.Gmidi_conform.midi_conforms
midi_extractor_args:
lay: 3
dim: 512

use_lay_skip: true
kernel_size: 31
conv_drop: 0.1
ffn_latent_drop: 0.1
ffn_out_drop: 0.1
attention_drop: 0.1
attention_heads: 8
attention_heads_dim: 64

# training
task_cls: training.QuantizedMIDIExtractionTask
optimizer_args:
optimizer_cls: torch.optim.AdamW
lr: 0.0001
beta1: 0.9
beta2: 0.98
weight_decay: 0
#lr_scheduler_args:
# scheduler_cls: torch.optim.lr_scheduler.StepLR
# step_size: 50000
# gamma: 0.5

lr_scheduler_args:
scheduler_cls: lr_scheduler.scheduler.WarmupLR
warmup_steps: 10000
min_lr: 0.00001

clip_grad_norm: 1
accumulate_grad_batches: 1
sampler_frame_count_grid: 6
ds_workers: 4
dataloader_prefetch_factor: 2

max_batch_size: 8
max_batch_frames: 80000
max_val_batch_size: 1
max_val_batch_frames: 10000
num_valid_plots: 10
log_interval: 100
num_sanity_val_steps: 1 # steps of validation at the beginning
val_check_interval: 1000
num_ckpt_keep: 5
max_updates: 100000
permanent_ckpt_start: 200000
permanent_ckpt_interval: 40000

###########
# pytorch lightning
# Read https://lightning.ai/docs/pytorch/stable/common/trainer.html#trainer-class-api for possible values
###########
#pl_trainer_accelerator: 'cpu'
pl_trainer_accelerator: 'auto'
pl_trainer_devices: 'auto'
pl_trainer_precision: '32-true'
pl_trainer_num_nodes: 1
pl_trainer_strategy: 'auto'
ddp_backend: 'nccl' # choose from 'gloo', 'nccl', 'nccl_no_p2p'
seed: 114514

###########
# finetune
###########

finetune_enabled: false
finetune_ckpt_path: null
finetune_ignored_params: []
finetune_strict_shapes: true

freezing_enabled: false
frozen_params: []

0 comments on commit fc142ec

Please sign in to comment.