Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,11 @@
import logging
import os
import sys
import tempfile

import safetensors

from diffusers.utils.testing_utils import TemporaryDirectory


sys.path.append("..")
from test_examples_utils import ExamplesTestsAccelerate, run_command # noqa: E402
Expand All @@ -39,7 +40,7 @@ class DreamBoothLoRAFluxAdvanced(ExamplesTestsAccelerate):
script_path = "examples/advanced_diffusion_training/train_dreambooth_lora_flux_advanced.py"

def test_dreambooth_lora_flux(self):
with tempfile.TemporaryDirectory() as tmpdir:
with TemporaryDirectory(ignore_cleanup_errors=True) as tmpdir:
test_args = f"""
{self.script_path}
--pretrained_model_name_or_path {self.pretrained_model_name_or_path}
Expand Down Expand Up @@ -71,7 +72,7 @@ def test_dreambooth_lora_flux(self):
self.assertTrue(starts_with_transformer)

def test_dreambooth_lora_text_encoder_flux(self):
with tempfile.TemporaryDirectory() as tmpdir:
with TemporaryDirectory(ignore_cleanup_errors=True) as tmpdir:
test_args = f"""
{self.script_path}
--pretrained_model_name_or_path {self.pretrained_model_name_or_path}
Expand Down Expand Up @@ -104,7 +105,7 @@ def test_dreambooth_lora_text_encoder_flux(self):
self.assertTrue(starts_with_expected_prefix)

def test_dreambooth_lora_pivotal_tuning_flux_clip(self):
with tempfile.TemporaryDirectory() as tmpdir:
with TemporaryDirectory(ignore_cleanup_errors=True) as tmpdir:
test_args = f"""
{self.script_path}
--pretrained_model_name_or_path {self.pretrained_model_name_or_path}
Expand Down Expand Up @@ -146,7 +147,7 @@ def test_dreambooth_lora_pivotal_tuning_flux_clip(self):
self.assertTrue(starts_with_transformer)

def test_dreambooth_lora_pivotal_tuning_flux_clip_t5(self):
with tempfile.TemporaryDirectory() as tmpdir:
with TemporaryDirectory(ignore_cleanup_errors=True) as tmpdir:
test_args = f"""
{self.script_path}
--pretrained_model_name_or_path {self.pretrained_model_name_or_path}
Expand Down Expand Up @@ -189,7 +190,7 @@ def test_dreambooth_lora_pivotal_tuning_flux_clip_t5(self):
self.assertTrue(starts_with_transformer)

def test_dreambooth_lora_latent_caching(self):
with tempfile.TemporaryDirectory() as tmpdir:
with TemporaryDirectory(ignore_cleanup_errors=True) as tmpdir:
test_args = f"""
{self.script_path}
--pretrained_model_name_or_path {self.pretrained_model_name_or_path}
Expand Down Expand Up @@ -222,7 +223,7 @@ def test_dreambooth_lora_latent_caching(self):
self.assertTrue(starts_with_transformer)

def test_dreambooth_lora_flux_checkpointing_checkpoints_total_limit(self):
with tempfile.TemporaryDirectory() as tmpdir:
with TemporaryDirectory(ignore_cleanup_errors=True) as tmpdir:
test_args = f"""
{self.script_path}
--pretrained_model_name_or_path={self.pretrained_model_name_or_path}
Expand All @@ -245,7 +246,7 @@ def test_dreambooth_lora_flux_checkpointing_checkpoints_total_limit(self):
)

def test_dreambooth_lora_flux_checkpointing_checkpoints_total_limit_removes_multiple_checkpoints(self):
with tempfile.TemporaryDirectory() as tmpdir:
with TemporaryDirectory(ignore_cleanup_errors=True) as tmpdir:
test_args = f"""
{self.script_path}
--pretrained_model_name_or_path={self.pretrained_model_name_or_path}
Expand Down
4 changes: 2 additions & 2 deletions examples/community/sde_drag.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import math
import tempfile
from typing import List, Optional

import numpy as np
Expand All @@ -21,6 +20,7 @@
SlicedAttnAddedKVProcessor,
)
from diffusers.optimization import get_scheduler
from diffusers.utils.testing_utils import TemporaryDirectory


class SdeDragPipeline(DiffusionPipeline):
Expand Down Expand Up @@ -320,7 +320,7 @@ def train_lora(self, prompt, image, lora_step=100, lora_rank=16, generator=None)
lr_scheduler.step()
optimizer.zero_grad()

with tempfile.TemporaryDirectory() as save_lora_dir:
with TemporaryDirectory(ignore_cleanup_errors=True) as save_lora_dir:
StableDiffusionLoraLoaderMixin.save_lora_weights(
save_directory=save_lora_dir,
unet_lora_layers=unet_lora_layers,
Expand Down
7 changes: 4 additions & 3 deletions examples/consistency_distillation/test_lcm_lora.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,11 @@
import logging
import os
import sys
import tempfile

import safetensors

from diffusers.utils.testing_utils import TemporaryDirectory


sys.path.append("..")
from test_examples_utils import ExamplesTestsAccelerate, run_command # noqa: E402
Expand All @@ -34,7 +35,7 @@

class TextToImageLCM(ExamplesTestsAccelerate):
def test_text_to_image_lcm_lora_sdxl(self):
with tempfile.TemporaryDirectory() as tmpdir:
with TemporaryDirectory(ignore_cleanup_errors=True) as tmpdir:
test_args = f"""
examples/consistency_distillation/train_lcm_distill_lora_sdxl.py
--pretrained_teacher_model hf-internal-testing/tiny-stable-diffusion-xl-pipe
Expand All @@ -61,7 +62,7 @@ def test_text_to_image_lcm_lora_sdxl(self):
self.assertTrue(is_lora)

def test_text_to_image_lcm_lora_sdxl_checkpointing(self):
with tempfile.TemporaryDirectory() as tmpdir:
with TemporaryDirectory(ignore_cleanup_errors=True) as tmpdir:
test_args = f"""
examples/consistency_distillation/train_lcm_distill_lora_sdxl.py
--pretrained_teacher_model hf-internal-testing/tiny-stable-diffusion-xl-pipe
Expand Down
15 changes: 8 additions & 7 deletions examples/controlnet/test_controlnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,8 @@
import logging
import os
import sys
import tempfile

from diffusers.utils.testing_utils import TemporaryDirectory


sys.path.append("..")
Expand All @@ -32,7 +33,7 @@

class ControlNet(ExamplesTestsAccelerate):
def test_controlnet_checkpointing_checkpoints_total_limit(self):
with tempfile.TemporaryDirectory() as tmpdir:
with TemporaryDirectory(ignore_cleanup_errors=True) as tmpdir:
test_args = f"""
examples/controlnet/train_controlnet.py
--pretrained_model_name_or_path=hf-internal-testing/tiny-stable-diffusion-pipe
Expand All @@ -55,7 +56,7 @@ def test_controlnet_checkpointing_checkpoints_total_limit(self):
)

def test_controlnet_checkpointing_checkpoints_total_limit_removes_multiple_checkpoints(self):
with tempfile.TemporaryDirectory() as tmpdir:
with TemporaryDirectory(ignore_cleanup_errors=True) as tmpdir:
test_args = f"""
examples/controlnet/train_controlnet.py
--pretrained_model_name_or_path=hf-internal-testing/tiny-stable-diffusion-pipe
Expand Down Expand Up @@ -98,7 +99,7 @@ def test_controlnet_checkpointing_checkpoints_total_limit_removes_multiple_check

class ControlNetSDXL(ExamplesTestsAccelerate):
def test_controlnet_sdxl(self):
with tempfile.TemporaryDirectory() as tmpdir:
with TemporaryDirectory(ignore_cleanup_errors=True) as tmpdir:
test_args = f"""
examples/controlnet/train_controlnet_sdxl.py
--pretrained_model_name_or_path=hf-internal-testing/tiny-stable-diffusion-xl-pipe
Expand All @@ -119,7 +120,7 @@ def test_controlnet_sdxl(self):

class ControlNetSD3(ExamplesTestsAccelerate):
def test_controlnet_sd3(self):
with tempfile.TemporaryDirectory() as tmpdir:
with TemporaryDirectory(ignore_cleanup_errors=True) as tmpdir:
test_args = f"""
examples/controlnet/train_controlnet_sd3.py
--pretrained_model_name_or_path=DavyMorgan/tiny-sd3-pipe
Expand All @@ -140,7 +141,7 @@ def test_controlnet_sd3(self):

class ControlNetSD35(ExamplesTestsAccelerate):
def test_controlnet_sd3(self):
with tempfile.TemporaryDirectory() as tmpdir:
with TemporaryDirectory(ignore_cleanup_errors=True) as tmpdir:
test_args = f"""
examples/controlnet/train_controlnet_sd3.py
--pretrained_model_name_or_path=hf-internal-testing/tiny-sd35-pipe
Expand All @@ -161,7 +162,7 @@ def test_controlnet_sd3(self):

class ControlNetflux(ExamplesTestsAccelerate):
def test_controlnet_flux(self):
with tempfile.TemporaryDirectory() as tmpdir:
with TemporaryDirectory(ignore_cleanup_errors=True) as tmpdir:
test_args = f"""
examples/controlnet/train_controlnet_flux.py
--pretrained_model_name_or_path=hf-internal-testing/tiny-flux-pipe
Expand Down
9 changes: 5 additions & 4 deletions examples/custom_diffusion/test_custom_diffusion.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,8 @@
import logging
import os
import sys
import tempfile

from diffusers.utils.testing_utils import TemporaryDirectory


sys.path.append("..")
Expand All @@ -32,7 +33,7 @@

class CustomDiffusion(ExamplesTestsAccelerate):
def test_custom_diffusion(self):
with tempfile.TemporaryDirectory() as tmpdir:
with TemporaryDirectory(ignore_cleanup_errors=True) as tmpdir:
test_args = f"""
examples/custom_diffusion/train_custom_diffusion.py
--pretrained_model_name_or_path hf-internal-testing/tiny-stable-diffusion-pipe
Expand All @@ -57,7 +58,7 @@ def test_custom_diffusion(self):
self.assertTrue(os.path.isfile(os.path.join(tmpdir, "<new1>.bin")))

def test_custom_diffusion_checkpointing_checkpoints_total_limit(self):
with tempfile.TemporaryDirectory() as tmpdir:
with TemporaryDirectory(ignore_cleanup_errors=True) as tmpdir:
test_args = f"""
examples/custom_diffusion/train_custom_diffusion.py
--pretrained_model_name_or_path=hf-internal-testing/tiny-stable-diffusion-pipe
Expand All @@ -79,7 +80,7 @@ def test_custom_diffusion_checkpointing_checkpoints_total_limit(self):
self.assertEqual({x for x in os.listdir(tmpdir) if "checkpoint" in x}, {"checkpoint-4", "checkpoint-6"})

def test_custom_diffusion_checkpointing_checkpoints_total_limit_removes_multiple_checkpoints(self):
with tempfile.TemporaryDirectory() as tmpdir:
with TemporaryDirectory(ignore_cleanup_errors=True) as tmpdir:
test_args = f"""
examples/custom_diffusion/train_custom_diffusion.py
--pretrained_model_name_or_path=hf-internal-testing/tiny-stable-diffusion-pipe
Expand Down
12 changes: 6 additions & 6 deletions examples/dreambooth/test_dreambooth.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@
import os
import shutil
import sys
import tempfile

from diffusers import DiffusionPipeline, UNet2DConditionModel
from diffusers.utils.testing_utils import TemporaryDirectory


sys.path.append("..")
Expand All @@ -35,7 +35,7 @@

class DreamBooth(ExamplesTestsAccelerate):
def test_dreambooth(self):
with tempfile.TemporaryDirectory() as tmpdir:
with TemporaryDirectory(ignore_cleanup_errors=True) as tmpdir:
test_args = f"""
examples/dreambooth/train_dreambooth.py
--pretrained_model_name_or_path hf-internal-testing/tiny-stable-diffusion-pipe
Expand All @@ -58,7 +58,7 @@ def test_dreambooth(self):
self.assertTrue(os.path.isfile(os.path.join(tmpdir, "scheduler", "scheduler_config.json")))

def test_dreambooth_if(self):
with tempfile.TemporaryDirectory() as tmpdir:
with TemporaryDirectory(ignore_cleanup_errors=True) as tmpdir:
test_args = f"""
examples/dreambooth/train_dreambooth.py
--pretrained_model_name_or_path hf-internal-testing/tiny-if-pipe
Expand Down Expand Up @@ -87,7 +87,7 @@ def test_dreambooth_checkpointing(self):
instance_prompt = "photo"
pretrained_model_name_or_path = "hf-internal-testing/tiny-stable-diffusion-pipe"

with tempfile.TemporaryDirectory() as tmpdir:
with TemporaryDirectory(ignore_cleanup_errors=True) as tmpdir:
# Run training script with checkpointing
# max_train_steps == 4, checkpointing_steps == 2
# Should create checkpoints at steps 2, 4
Expand Down Expand Up @@ -163,7 +163,7 @@ def test_dreambooth_checkpointing(self):
self.assertTrue(os.path.isdir(os.path.join(tmpdir, "checkpoint-6")))

def test_dreambooth_checkpointing_checkpoints_total_limit(self):
with tempfile.TemporaryDirectory() as tmpdir:
with TemporaryDirectory(ignore_cleanup_errors=True) as tmpdir:
test_args = f"""
examples/dreambooth/train_dreambooth.py
--pretrained_model_name_or_path=hf-internal-testing/tiny-stable-diffusion-pipe
Expand All @@ -186,7 +186,7 @@ def test_dreambooth_checkpointing_checkpoints_total_limit(self):
)

def test_dreambooth_checkpointing_checkpoints_total_limit_removes_multiple_checkpoints(self):
with tempfile.TemporaryDirectory() as tmpdir:
with TemporaryDirectory(ignore_cleanup_errors=True) as tmpdir:
test_args = f"""
examples/dreambooth/train_dreambooth.py
--pretrained_model_name_or_path=hf-internal-testing/tiny-stable-diffusion-pipe
Expand Down
10 changes: 5 additions & 5 deletions examples/dreambooth/test_dreambooth_flux.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@
import os
import shutil
import sys
import tempfile

from diffusers import DiffusionPipeline, FluxTransformer2DModel
from diffusers.utils.testing_utils import TemporaryDirectory


sys.path.append("..")
Expand All @@ -40,7 +40,7 @@ class DreamBoothFlux(ExamplesTestsAccelerate):
script_path = "examples/dreambooth/train_dreambooth_flux.py"

def test_dreambooth(self):
with tempfile.TemporaryDirectory() as tmpdir:
with TemporaryDirectory(ignore_cleanup_errors=True) as tmpdir:
test_args = f"""
{self.script_path}
--pretrained_model_name_or_path {self.pretrained_model_name_or_path}
Expand All @@ -63,7 +63,7 @@ def test_dreambooth(self):
self.assertTrue(os.path.isfile(os.path.join(tmpdir, "scheduler", "scheduler_config.json")))

def test_dreambooth_checkpointing(self):
with tempfile.TemporaryDirectory() as tmpdir:
with TemporaryDirectory(ignore_cleanup_errors=True) as tmpdir:
# Run training script with checkpointing
# max_train_steps == 4, checkpointing_steps == 2
# Should create checkpoints at steps 2, 4
Expand Down Expand Up @@ -139,7 +139,7 @@ def test_dreambooth_checkpointing(self):
self.assertTrue(os.path.isdir(os.path.join(tmpdir, "checkpoint-6")))

def test_dreambooth_checkpointing_checkpoints_total_limit(self):
with tempfile.TemporaryDirectory() as tmpdir:
with TemporaryDirectory(ignore_cleanup_errors=True) as tmpdir:
test_args = f"""
{self.script_path}
--pretrained_model_name_or_path={self.pretrained_model_name_or_path}
Expand All @@ -162,7 +162,7 @@ def test_dreambooth_checkpointing_checkpoints_total_limit(self):
)

def test_dreambooth_checkpointing_checkpoints_total_limit_removes_multiple_checkpoints(self):
with tempfile.TemporaryDirectory() as tmpdir:
with TemporaryDirectory(ignore_cleanup_errors=True) as tmpdir:
test_args = f"""
{self.script_path}
--pretrained_model_name_or_path={self.pretrained_model_name_or_path}
Expand Down
Loading
Loading