Skip to content

Commit 7c2f7b4

Browse files
committed
up
1 parent 628306b commit 7c2f7b4

File tree

1 file changed

+2
-1
lines changed

1 file changed

+2
-1
lines changed

tests/models/transformers/test_models_transformer_flux.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
from diffusers import FluxTransformer2DModel
2121
from diffusers.models.attention_processor import FluxIPAdapterJointAttnProcessor2_0
2222
from diffusers.models.embeddings import ImageProjection
23-
from diffusers.utils.testing_utils import enable_full_determinism, torch_device
23+
from diffusers.utils.testing_utils import enable_full_determinism, is_peft_available, torch_device
2424

2525
from ..test_modeling_common import LoraHotSwappingForModelTesterMixin, ModelTesterMixin, TorchCompileTesterMixin
2626

@@ -174,6 +174,7 @@ def test_gradient_checkpointing_is_applied(self):
174174

175175
# The test exists for cases like
176176
# https://github.com/huggingface/diffusers/issues/11874
177+
@unittest.skipIf(not is_peft_available(), "Only with PEFT")
177178
def test_lora_exclude_modules(self):
178179
from peft import LoraConfig, get_peft_model_state_dict, inject_adapter_in_model, set_peft_model_state_dict
179180

0 commit comments

Comments
 (0)