Skip to content

Commit f0955a1

Browse files
committed
move lora integration tests to nightly./
1 parent 24c7d57 commit f0955a1

File tree

3 files changed

+6
-1
lines changed

3 files changed

+6
-1
lines changed

tests/lora/test_lora_layers_flux.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@
2727
from diffusers.utils.testing_utils import (
2828
floats_tensor,
2929
is_peft_available,
30+
nightly,
3031
numpy_cosine_similarity_distance,
3132
require_peft_backend,
3233
require_torch_gpu,
@@ -165,9 +166,10 @@ def test_modify_padding_mode(self):
165166

166167

167168
@slow
169+
@nightly
168170
@require_torch_gpu
169171
@require_peft_backend
170-
# @unittest.skip("We cannot run inference on this model with the current CI hardware")
172+
@unittest.skip("We cannot run inference on this model with the current CI hardware")
171173
# TODO (DN6, sayakpaul): move these tests to a beefier GPU
172174
class FluxLoRAIntegrationTests(unittest.TestCase):
173175
"""internal note: The integration slices were obtained on audace.

tests/lora/test_lora_layers_sd.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@
3434
from diffusers.utils.import_utils import is_accelerate_available
3535
from diffusers.utils.testing_utils import (
3636
load_image,
37+
nightly,
3738
numpy_cosine_similarity_distance,
3839
require_peft_backend,
3940
require_torch_gpu,
@@ -207,6 +208,7 @@ def test_integration_move_lora_dora_cpu(self):
207208

208209

209210
@slow
211+
@nightly
210212
@require_torch_gpu
211213
@require_peft_backend
212214
class LoraIntegrationTests(unittest.TestCase):

tests/lora/test_lora_layers_sdxl.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -113,6 +113,7 @@ def tearDown(self):
113113

114114

115115
@slow
116+
@nightly
116117
@require_torch_gpu
117118
@require_peft_backend
118119
class LoraSDXLIntegrationTests(unittest.TestCase):

0 commit comments

Comments
 (0)