Skip to content

Commit e03b546

Browse files
committed
update settings
1 parent 244de8b commit e03b546

File tree

2 files changed

+4
-3
lines changed

2 files changed

+4
-3
lines changed

config/config_template.yaml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ gradient_accumulation_steps: 4
2121
gradient_checkpointing: true
2222
id_token: afkx
2323
layerwise_upcasting_modules: [none, transformer]
24-
layerwise_upcasting_granularity: [pytorch_layer, diffusers_layer]
24+
layerwise_upcasting_skip_modules_pattern: 'patch_embed pos_embed x_embedder context_embedder ^proj_in$ ^proj_out$ norm'
2525
layerwise_upcasting_storage_dtype: [float8_e4m3fn, float8_e5m2]
2626
image_resolution_buckets: 512x768
2727
lora_alpha: 128
@@ -47,6 +47,7 @@ text_encoder_dtype: [bf16, fp16, fp32, fp8]
4747
text_encoder_2_dtype: [bf16, fp16, fp32, fp8]
4848
text_encoder_3_dtype: [bf16, fp16, fp32, fp8]
4949
tracker_name: finetrainers
50+
transformer_dtype: [bf16, fp16, fp32, fp8]
5051
train_steps: 3000
5152
training_type: lora
5253
use_8bit_bnb: false

run_trainer.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ def run(self, config: Config, finetrainers_path: str, log_file: str):
2929
if config.get('layerwise_upcasting_modules') != 'none':
3030
model_cmd +=["--layerwise_upcasting_modules", config.get('layerwise_upcasting_modules'),
3131
"--layerwise_upcasting_storage_dtype", config.get('layerwise_upcasting_storage_dtype'),
32-
"--layerwise_upcasting_granularity", config.get('layerwise_upcasting_granularity')]
32+
"--layerwise_upcasting_skip_modules_pattern", config.get('layerwise_upcasting_skip_modules_pattern')]
3333

3434
dataset_cmd = ["--data_root", config.get('data_root'),
3535
"--video_column", config.get('video_column'),
@@ -45,6 +45,7 @@ def run(self, config: Config, finetrainers_path: str, log_file: str):
4545
"--text_encoder_2_dtype", config.get('text_encoder_2_dtype'),
4646
"--text_encoder_3_dtype", config.get('text_encoder_3_dtype'),
4747
"--vae_dtype", config.get('vae_dtype'),
48+
"--transformer_dtype", config.get('transformer_dtype'),
4849
'--precompute_conditions' if config.get('precompute_conditions') else '']
4950
if config.get('dataset_file'):
5051
dataset_cmd += ["--dataset_file", config.get('dataset_file')]
@@ -56,7 +57,6 @@ def run(self, config: Config, finetrainers_path: str, log_file: str):
5657

5758
training_cmd = ["--training_type", config.get('training_type'),
5859
"--seed", config.get('seed'),
59-
"--mixed_precision", config.get('mixed_precision'),
6060
"--batch_size", config.get('batch_size'),
6161
"--train_steps", config.get('train_steps'),
6262
"--rank", config.get('rank'),

0 commit comments

Comments
 (0)