Skip to content

Commit f430a0c

Browse files
AlanPonnachanhlky
andauthored
Add torch_xla support to pipeline_aura_flow.py (#10365)
* Add torch_xla support to pipeline_aura_flow.py * make style --------- Co-authored-by: hlky <[email protected]>
1 parent 1b202c5 commit f430a0c

File tree

1 file changed

+11
-1
lines changed

1 file changed

+11
-1
lines changed

src/diffusers/pipelines/aura_flow/pipeline_aura_flow.py

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,11 +21,18 @@
2121
from ...models import AuraFlowTransformer2DModel, AutoencoderKL
2222
from ...models.attention_processor import AttnProcessor2_0, FusedAttnProcessor2_0, XFormersAttnProcessor
2323
from ...schedulers import FlowMatchEulerDiscreteScheduler
24-
from ...utils import logging, replace_example_docstring
24+
from ...utils import is_torch_xla_available, logging, replace_example_docstring
2525
from ...utils.torch_utils import randn_tensor
2626
from ..pipeline_utils import DiffusionPipeline, ImagePipelineOutput
2727

2828

29+
if is_torch_xla_available():
30+
import torch_xla.core.xla_model as xm
31+
32+
XLA_AVAILABLE = True
33+
else:
34+
XLA_AVAILABLE = False
35+
2936
logger = logging.get_logger(__name__) # pylint: disable=invalid-name
3037

3138

@@ -564,6 +571,9 @@ def __call__(
564571
if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):
565572
progress_bar.update()
566573

574+
if XLA_AVAILABLE:
575+
xm.mark_step()
576+
567577
if output_type == "latent":
568578
image = latents
569579
else:

0 commit comments

Comments
 (0)