From 77308a10aec9f7d70ca8ad11f218e6e1125ee53e Mon Sep 17 00:00:00 2001 From: nafiturgut Date: Sun, 25 Jun 2023 00:45:35 +0300 Subject: [PATCH] Newer versions of accelerate use project_dir instead of logging_dir as init param. It causes error in training --- training_scripts/train_lora_dreambooth.py | 2 +- training_scripts/train_lora_pt_caption.py | 2 +- training_scripts/train_lora_w_ti.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/training_scripts/train_lora_dreambooth.py b/training_scripts/train_lora_dreambooth.py index a0c0544..bc437c2 100644 --- a/training_scripts/train_lora_dreambooth.py +++ b/training_scripts/train_lora_dreambooth.py @@ -490,7 +490,7 @@ def main(args): gradient_accumulation_steps=args.gradient_accumulation_steps, mixed_precision=args.mixed_precision, log_with="tensorboard", - logging_dir=logging_dir, + project_dir=logging_dir, ) # Currently, it's not possible to do gradient accumulation when training two models with accelerate.accumulate diff --git a/training_scripts/train_lora_pt_caption.py b/training_scripts/train_lora_pt_caption.py index ae7aafc..d8034de 100644 --- a/training_scripts/train_lora_pt_caption.py +++ b/training_scripts/train_lora_pt_caption.py @@ -527,7 +527,7 @@ def main(args): gradient_accumulation_steps=args.gradient_accumulation_steps, mixed_precision=args.mixed_precision, log_with="tensorboard", - logging_dir=logging_dir, + project_dir=logging_dir, ) # Currently, it's not possible to do gradient accumulation when training two models with accelerate.accumulate diff --git a/training_scripts/train_lora_w_ti.py b/training_scripts/train_lora_w_ti.py index 8f63280..356021a 100644 --- a/training_scripts/train_lora_w_ti.py +++ b/training_scripts/train_lora_w_ti.py @@ -643,7 +643,7 @@ def main(args): gradient_accumulation_steps=args.gradient_accumulation_steps, mixed_precision=args.mixed_precision, log_with="tensorboard", - logging_dir=logging_dir, + project_dir=logging_dir, ) # Currently, it's not possible to do gradient accumulation when training two models with accelerate.accumulate