mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2025-03-14 13:17:32 +00:00
Work around black image bug on Mac 14.5 by forcing attention upcasting.
This commit is contained in:
parent
83d969e397
commit
8508df2569
comfy
@ -19,12 +19,13 @@ from comfy.cli_args import args
|
|||||||
import comfy.ops
|
import comfy.ops
|
||||||
ops = comfy.ops.disable_weight_init
|
ops = comfy.ops.disable_weight_init
|
||||||
|
|
||||||
|
FORCE_UPCAST_ATTENTION_DTYPE = model_management.force_upcast_attention_dtype()
|
||||||
|
|
||||||
def get_attn_precision(attn_precision):
|
def get_attn_precision(attn_precision):
|
||||||
if args.dont_upcast_attention:
|
if args.dont_upcast_attention:
|
||||||
return None
|
return None
|
||||||
if attn_precision is None and args.force_upcast_attention:
|
if FORCE_UPCAST_ATTENTION_DTYPE is not None:
|
||||||
return torch.float32
|
return FORCE_UPCAST_ATTENTION_DTYPE
|
||||||
return attn_precision
|
return attn_precision
|
||||||
|
|
||||||
def exists(val):
|
def exists(val):
|
||||||
|
@ -5,6 +5,7 @@ from comfy.cli_args import args
|
|||||||
import comfy.utils
|
import comfy.utils
|
||||||
import torch
|
import torch
|
||||||
import sys
|
import sys
|
||||||
|
import platform
|
||||||
|
|
||||||
class VRAMState(Enum):
|
class VRAMState(Enum):
|
||||||
DISABLED = 0 #No vram present: no need to move models to vram
|
DISABLED = 0 #No vram present: no need to move models to vram
|
||||||
@ -685,6 +686,18 @@ def pytorch_attention_flash_attention():
|
|||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def force_upcast_attention_dtype():
|
||||||
|
upcast = args.force_upcast_attention
|
||||||
|
try:
|
||||||
|
if platform.mac_ver()[0] in ['14.5']: #black image bug on OSX Sonoma 14.5
|
||||||
|
upcast = True
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
if upcast:
|
||||||
|
return torch.float32
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
def get_free_memory(dev=None, torch_free_too=False):
|
def get_free_memory(dev=None, torch_free_too=False):
|
||||||
global directml_enabled
|
global directml_enabled
|
||||||
if dev is None:
|
if dev is None:
|
||||||
|
Loading…
Reference in New Issue
Block a user