diff --git a/training_scripts/train_lora_dreambooth.py b/training_scripts/train_lora_dreambooth.py index a0c0544..bc437c2 100644 --- a/training_scripts/train_lora_dreambooth.py +++ b/training_scripts/train_lora_dreambooth.py @@ -490,7 +490,7 @@ def main(args): gradient_accumulation_steps=args.gradient_accumulation_steps, mixed_precision=args.mixed_precision, log_with="tensorboard", - logging_dir=logging_dir, + project_dir=logging_dir, ) # Currently, it's not possible to do gradient accumulation when training two models with accelerate.accumulate diff --git a/training_scripts/train_lora_pt_caption.py b/training_scripts/train_lora_pt_caption.py index ae7aafc..d8034de 100644 --- a/training_scripts/train_lora_pt_caption.py +++ b/training_scripts/train_lora_pt_caption.py @@ -527,7 +527,7 @@ def main(args): gradient_accumulation_steps=args.gradient_accumulation_steps, mixed_precision=args.mixed_precision, log_with="tensorboard", - logging_dir=logging_dir, + project_dir=logging_dir, ) # Currently, it's not possible to do gradient accumulation when training two models with accelerate.accumulate diff --git a/training_scripts/train_lora_w_ti.py b/training_scripts/train_lora_w_ti.py index 8f63280..356021a 100644 --- a/training_scripts/train_lora_w_ti.py +++ b/training_scripts/train_lora_w_ti.py @@ -643,7 +643,7 @@ def main(args): gradient_accumulation_steps=args.gradient_accumulation_steps, mixed_precision=args.mixed_precision, log_with="tensorboard", - logging_dir=logging_dir, + project_dir=logging_dir, ) # Currently, it's not possible to do gradient accumulation when training two models with accelerate.accumulate