File tree Expand file tree Collapse file tree 2 files changed +6
-1
lines changed
ldm/modules/diffusionmodules Expand file tree Collapse file tree 2 files changed +6
-1
lines changed Original file line number Diff line number Diff line change @@ -297,7 +297,7 @@ def vae_attention():
297297 if model_management .xformers_enabled_vae ():
298298 logging .info ("Using xformers attention in VAE" )
299299 return xformers_attention
300- elif model_management .pytorch_attention_enabled ():
300+ elif model_management .pytorch_attention_enabled_vae ():
301301 logging .info ("Using pytorch attention in VAE" )
302302 return pytorch_attention
303303 else :
Original file line number Diff line number Diff line change @@ -912,6 +912,11 @@ def pytorch_attention_enabled():
912912 global ENABLE_PYTORCH_ATTENTION
913913 return ENABLE_PYTORCH_ATTENTION
914914
915+ def pytorch_attention_enabled_vae ():
916+ if is_amd ():
917+ return False # enabling pytorch attention on AMD currently causes crash when doing high res
918+ return pytorch_attention_enabled ()
919+
915920def pytorch_attention_flash_attention ():
916921 global ENABLE_PYTORCH_ATTENTION
917922 if ENABLE_PYTORCH_ATTENTION :
You can’t perform that action at this time.
0 commit comments