Skip to content

Commit 70b156d

Browse files
authored
Parallel flux on diffusers version 0.32 (#413)
1 parent 4d6a038 commit 70b156d

File tree

1 file changed

+5
-1
lines changed

1 file changed

+5
-1
lines changed

xfuser/core/distributed/runtime_state.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44

55
import numpy as np
66
import torch
7+
import diffusers
78
from diffusers import DiffusionPipeline
89
import torch.distributed
910

@@ -121,8 +122,11 @@ def __init__(self, pipeline: DiffusionPipeline, config: EngineConfig):
121122
* pipeline.transformer.config.attention_head_dim,
122123
)
123124
else:
125+
vae_scale_factor = pipeline.vae_scale_factor
126+
if pipeline.__class__.__name__.startswith("Flux") and diffusers.__version__ >= '0.32':
127+
vae_scale_factor *= 2
124128
self._set_model_parameters(
125-
vae_scale_factor=pipeline.vae_scale_factor,
129+
vae_scale_factor=vae_scale_factor,
126130
backbone_patch_size=pipeline.transformer.config.patch_size,
127131
backbone_in_channel=pipeline.transformer.config.in_channels,
128132
backbone_inner_dim=pipeline.transformer.config.num_attention_heads

0 commit comments

Comments
 (0)