Skip to content

Commit 683569d

Browse files
Only enable fp16 on ZImage on newer pytorch. (#11344)
1 parent ea2c117 commit 683569d

File tree

1 file changed

+8
-1
lines changed

1 file changed

+8
-1
lines changed

comfy/supported_models.py

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@
2828
from . import latent_formats
2929

3030
from . import diffusers_convert
31+
import comfy.model_management
3132

3233
class SD15(supported_models_base.BASE):
3334
unet_config = {
@@ -1028,7 +1029,13 @@ class ZImage(Lumina2):
10281029

10291030
memory_usage_factor = 2.0
10301031

1031-
supported_inference_dtypes = [torch.bfloat16, torch.float16, torch.float32]
1032+
supported_inference_dtypes = [torch.bfloat16, torch.float32]
1033+
1034+
def __init__(self, unet_config):
1035+
super().__init__(unet_config)
1036+
if comfy.model_management.extended_fp16_support():
1037+
self.supported_inference_dtypes = self.supported_inference_dtypes.copy()
1038+
self.supported_inference_dtypes.insert(1, torch.float16)
10321039

10331040
def clip_target(self, state_dict={}):
10341041
pref = self.text_encoder_key_prefix[0]

0 commit comments

Comments
 (0)