Enable pytorch attention by default on xpu.

This commit is contained in:
comfyanonymous 2023-09-17 04:09:19 -04:00
parent 0665749b1a
commit 321c5fa295

View File

@ -165,6 +165,9 @@ try:
ENABLE_PYTORCH_ATTENTION = True
if torch.cuda.is_bf16_supported():
VAE_DTYPE = torch.bfloat16
if is_intel_xpu():
if args.use_split_cross_attention == False and args.use_quad_cross_attention == False:
ENABLE_PYTORCH_ATTENTION = True
except:
pass