We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent ab5959e commit 3b9b343Copy full SHA for 3b9b343
tests/lora/test_lora_layers_flux.py
@@ -158,6 +158,8 @@ def test_with_alpha_in_state_dict(self):
158
@slow
159
@require_torch_gpu
160
@require_peft_backend
161
+@unittest.skip("We cannot run inference on this model with the current CI hardware")
162
+# TODO (DN6, sayakpaul): move these tests to a beefier GPU
163
class FluxLoRAIntegrationTests(unittest.TestCase):
164
"""internal note: The integration slices were obtained on audace."""
165
0 commit comments