We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 185a78f commit 679c18cCopy full SHA for 679c18c
tests/pipelines/test_pipelines.py
@@ -1802,6 +1802,17 @@ def test_pipe_same_device_id_offload(self):
1802
sd.maybe_free_model_hooks()
1803
assert sd._offload_gpu_id == 5
1804
1805
+ def test_wrong_model(self):
1806
+ tokenizer = CLIPTokenizer.from_pretrained("hf-internal-testing/tiny-random-clip")
1807
+ with self.assertRaises(ValueError) as error_context:
1808
+ _ = StableDiffusionPipeline.from_pretrained(
1809
+ "hf-internal-testing/diffusers-stable-diffusion-tiny-all", text_encoder=tokenizer
1810
+ )
1811
+
1812
+ assert "Expected" in str(error_context.exception)
1813
+ assert "text_encoder" in str(error_context.exception)
1814
+ assert "CLIPTokenizer" in str(error_context.exception)
1815
1816
1817
@slow
1818
@require_torch_gpu
0 commit comments