From d9c4f1e0e62097c43d3a428c66bda52b3e1ff566 Mon Sep 17 00:00:00 2001 From: Aryan Date: Mon, 30 Jun 2025 18:33:28 +0200 Subject: [PATCH 1/2] update --- tests/pipelines/test_pipelines_common.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/pipelines/test_pipelines_common.py b/tests/pipelines/test_pipelines_common.py index 69dd79bb5627..b4f07057a489 100644 --- a/tests/pipelines/test_pipelines_common.py +++ b/tests/pipelines/test_pipelines_common.py @@ -1387,14 +1387,14 @@ def test_float16_inference(self, expected_max_diff=5e-2): if "generator" in inputs: inputs["generator"] = self.get_generator(0) - output = pipe(**inputs)[0] + output = pipe(**inputs)[0].cpu() fp16_inputs = self.get_dummy_inputs(torch_device) # Reset generator in case it is used inside dummy inputs if "generator" in fp16_inputs: fp16_inputs["generator"] = self.get_generator(0) - output_fp16 = pipe_fp16(**fp16_inputs)[0] + output_fp16 = pipe_fp16(**fp16_inputs)[0].cpu() max_diff = numpy_cosine_similarity_distance(output.flatten(), output_fp16.flatten()) assert max_diff < 1e-2 From b1f2fd18c0d2172fc542e5b420981b657846133c Mon Sep 17 00:00:00 2001 From: Aryan Date: Mon, 30 Jun 2025 18:39:36 +0200 Subject: [PATCH 2/2] update --- tests/pipelines/test_pipelines_common.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/tests/pipelines/test_pipelines_common.py b/tests/pipelines/test_pipelines_common.py index b4f07057a489..f87778b260c9 100644 --- a/tests/pipelines/test_pipelines_common.py +++ b/tests/pipelines/test_pipelines_common.py @@ -1378,7 +1378,6 @@ def test_float16_inference(self, expected_max_diff=5e-2): for component in pipe_fp16.components.values(): if hasattr(component, "set_default_attn_processor"): component.set_default_attn_processor() - pipe_fp16.to(torch_device, torch.float16) pipe_fp16.set_progress_bar_config(disable=None) @@ -1386,17 +1385,20 @@ def test_float16_inference(self, expected_max_diff=5e-2): # Reset generator in case it is used inside dummy inputs if "generator" in inputs: inputs["generator"] = self.get_generator(0) - - output = pipe(**inputs)[0].cpu() + output = pipe(**inputs)[0] fp16_inputs = self.get_dummy_inputs(torch_device) # Reset generator in case it is used inside dummy inputs if "generator" in fp16_inputs: fp16_inputs["generator"] = self.get_generator(0) + output_fp16 = pipe_fp16(**fp16_inputs)[0] + + if isinstance(output, torch.Tensor): + output = output.cpu() + output_fp16 = output_fp16.cpu() - output_fp16 = pipe_fp16(**fp16_inputs)[0].cpu() max_diff = numpy_cosine_similarity_distance(output.flatten(), output_fp16.flatten()) - assert max_diff < 1e-2 + assert max_diff < expected_max_diff @unittest.skipIf(torch_device not in ["cuda", "xpu"], reason="float16 requires CUDA or XPU") @require_accelerator