mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2025-07-02 05:17:10 +08:00
Apple most likely is never fixing the fp16 attention bug. (#8485)
This commit is contained in:
parent
c7b25784b1
commit
6e28a46454
@ -1052,7 +1052,7 @@ def pytorch_attention_flash_attention():
|
||||
global ENABLE_PYTORCH_ATTENTION
|
||||
if ENABLE_PYTORCH_ATTENTION:
|
||||
#TODO: more reliable way of checking for flash attention?
|
||||
if is_nvidia(): #pytorch flash attention only works on Nvidia
|
||||
if is_nvidia():
|
||||
return True
|
||||
if is_intel_xpu():
|
||||
return True
|
||||
@ -1068,7 +1068,7 @@ def force_upcast_attention_dtype():
|
||||
upcast = args.force_upcast_attention
|
||||
|
||||
macos_version = mac_version()
|
||||
if macos_version is not None and ((14, 5) <= macos_version < (16,)): # black image bug on recent versions of macOS
|
||||
if macos_version is not None and ((14, 5) <= macos_version): # black image bug on recent versions of macOS, I don't think it's ever getting fixed
|
||||
upcast = True
|
||||
|
||||
if upcast:
|
||||
|
Loading…
x
Reference in New Issue
Block a user