Skip to content

Commit b486885

Browse files
Disable bfloat16 on older mac.
1 parent 0229228 commit b486885

File tree

1 file changed

+13
-6
lines changed

1 file changed

+13
-6
lines changed

comfy/model_management.py

Lines changed: 13 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -886,14 +886,19 @@ def pytorch_attention_flash_attention():
886886
return True
887887
return False
888888

889-
def force_upcast_attention_dtype():
890-
upcast = args.force_upcast_attention
889+
def mac_version():
891890
try:
892-
macos_version = tuple(int(n) for n in platform.mac_ver()[0].split("."))
893-
if (14, 5) <= macos_version <= (15, 2): # black image bug on recent versions of macOS
894-
upcast = True
891+
return tuple(int(n) for n in platform.mac_ver()[0].split("."))
895892
except:
896-
pass
893+
return None
894+
895+
def force_upcast_attention_dtype():
896+
upcast = args.force_upcast_attention
897+
898+
macos_version = mac_version()
899+
if macos_version is not None and ((14, 5) <= macos_version <= (15, 2)): # black image bug on recent versions of macOS
900+
upcast = True
901+
897902
if upcast:
898903
return torch.float32
899904
else:
@@ -1034,6 +1039,8 @@ def should_use_bf16(device=None, model_params=0, prioritize_performance=True, ma
10341039
return False
10351040

10361041
if mps_mode():
1042+
if mac_version() < (14,):
1043+
return False
10371044
return True
10381045

10391046
if cpu_mode():

0 commit comments

Comments
 (0)