Skip to content

Commit

Permalink
Merge pull request bmaltais#630 from ddPn08/sdxl
Browse files Browse the repository at this point in the history
make tracker init_kwargs configurable
  • Loading branch information
kohya-ss authored Jul 20, 2023
2 parents c45d2f2 + b841dd7 commit 73a08c0
Show file tree
Hide file tree
Showing 8 changed files with 40 additions and 7 deletions.
6 changes: 5 additions & 1 deletion fine_tune.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import math
import os
from multiprocessing import Value
import toml

from tqdm import tqdm
import torch
Expand Down Expand Up @@ -275,7 +276,10 @@ def fn_recursive_set_mem_eff(module: torch.nn.Module):
custom_train_functions.fix_noise_scheduler_betas_for_zero_terminal_snr(noise_scheduler)

if accelerator.is_main_process:
accelerator.init_trackers("finetuning" if args.log_tracker_name is None else args.log_tracker_name)
init_kwargs = {}
if args.log_tracker_config is not None:
init_kwargs = toml.load(args.log_tracker_config)
accelerator.init_trackers("finetuning" if args.log_tracker_name is None else args.log_tracker_name, init_kwargs=init_kwargs)

for epoch in range(num_train_epochs):
accelerator.print(f"\nepoch {epoch+1}/{num_train_epochs}")
Expand Down
6 changes: 6 additions & 0 deletions library/train_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -2716,6 +2716,12 @@ def add_training_arguments(parser: argparse.ArgumentParser, support_dreambooth:
default=None,
help="name of tracker to use for logging, default is script-specific default name / ログ出力に使用するtrackerの名前、省略時はスクリプトごとのデフォルト名",
)
parser.add_argument(
"--log_tracker_config",
type=str,
default=None,
help="path to tracker config file to use for logging / ログ出力に使用するtrackerの設定ファイルのパス",
)
parser.add_argument(
"--wandb_api_key",
type=str,
Expand Down
6 changes: 5 additions & 1 deletion sdxl_train.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import math
import os
from multiprocessing import Value
import toml

from tqdm import tqdm
import torch
Expand Down Expand Up @@ -355,7 +356,10 @@ def fn_recursive_set_mem_eff(module: torch.nn.Module):
custom_train_functions.fix_noise_scheduler_betas_for_zero_terminal_snr(noise_scheduler)

if accelerator.is_main_process:
accelerator.init_trackers("finetuning" if args.log_tracker_name is None else args.log_tracker_name)
init_kwargs = {}
if args.log_tracker_config is not None:
init_kwargs = toml.load(args.log_tracker_config)
accelerator.init_trackers("finetuning" if args.log_tracker_name is None else args.log_tracker_name, init_kwargs=init_kwargs)

for epoch in range(num_train_epochs):
accelerator.print(f"\nepoch {epoch+1}/{num_train_epochs}")
Expand Down
6 changes: 5 additions & 1 deletion train_controlnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import time
from multiprocessing import Value
from types import SimpleNamespace
import toml

from tqdm import tqdm
import torch
Expand Down Expand Up @@ -324,7 +325,10 @@ def train(args):
clip_sample=False,
)
if accelerator.is_main_process:
accelerator.init_trackers("controlnet_train" if args.log_tracker_name is None else args.log_tracker_name)
init_kwargs = {}
if args.log_tracker_config is not None:
init_kwargs = toml.load(args.log_tracker_config)
accelerator.init_trackers("controlnet_train" if args.log_tracker_name is None else args.log_tracker_name, init_kwargs=init_kwargs)

loss_list = []
loss_total = 0.0
Expand Down
6 changes: 5 additions & 1 deletion train_db.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import math
import os
from multiprocessing import Value
import toml

from tqdm import tqdm
import torch
Expand Down Expand Up @@ -250,7 +251,10 @@ def train(args):
custom_train_functions.fix_noise_scheduler_betas_for_zero_terminal_snr(noise_scheduler)

if accelerator.is_main_process:
accelerator.init_trackers("dreambooth" if args.log_tracker_name is None else args.log_tracker_name)
init_kwargs = {}
if args.log_tracker_config is not None:
init_kwargs = toml.load(args.log_tracker_config)
accelerator.init_trackers("dreambooth" if args.log_tracker_name is None else args.log_tracker_name, init_kwargs=init_kwargs)

loss_list = []
loss_total = 0.0
Expand Down
6 changes: 5 additions & 1 deletion train_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import time
import json
from multiprocessing import Value
import toml

from tqdm import tqdm
import torch
Expand Down Expand Up @@ -682,7 +683,10 @@ def train(self, args):
custom_train_functions.fix_noise_scheduler_betas_for_zero_terminal_snr(noise_scheduler)

if accelerator.is_main_process:
accelerator.init_trackers("network_train" if args.log_tracker_name is None else args.log_tracker_name)
init_kwargs = {}
if args.log_tracker_config is not None:
init_kwargs = toml.load(args.log_tracker_config)
accelerator.init_trackers("network_train" if args.log_tracker_name is None else args.log_tracker_name, init_kwargs=init_kwargs)

loss_list = []
loss_total = 0.0
Expand Down
6 changes: 5 additions & 1 deletion train_textual_inversion.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import math
import os
from multiprocessing import Value
import toml

from tqdm import tqdm
import torch
Expand Down Expand Up @@ -492,7 +493,10 @@ def train(self, args):
custom_train_functions.fix_noise_scheduler_betas_for_zero_terminal_snr(noise_scheduler)

if accelerator.is_main_process:
accelerator.init_trackers("textual_inversion" if args.log_tracker_name is None else args.log_tracker_name)
init_kwargs = {}
if args.log_tracker_config is not None:
init_kwargs = toml.load(args.log_tracker_config)
accelerator.init_trackers("textual_inversion" if args.log_tracker_name is None else args.log_tracker_name, init_kwargs=init_kwargs)

# function for saving/removing
def save_model(ckpt_name, embs_list, steps, epoch_no, force_sync_upload=False):
Expand Down
5 changes: 4 additions & 1 deletion train_textual_inversion_XTI.py
Original file line number Diff line number Diff line change
Expand Up @@ -388,7 +388,10 @@ def train(args):
custom_train_functions.fix_noise_scheduler_betas_for_zero_terminal_snr(noise_scheduler)

if accelerator.is_main_process:
accelerator.init_trackers("textual_inversion" if args.log_tracker_name is None else args.log_tracker_name)
init_kwargs = {}
if args.log_tracker_config is not None:
init_kwargs = toml.load(args.log_tracker_config)
accelerator.init_trackers("textual_inversion" if args.log_tracker_name is None else args.log_tracker_name, init_kwargs=init_kwargs)

# function for saving/removing
def save_model(ckpt_name, embs, steps, epoch_no, force_sync_upload=False):
Expand Down

0 comments on commit 73a08c0

Please sign in to comment.