Add a command line option to disable upcasting in some cross attention ops.

This commit is contained in:
comfyanonymous 2023-01-29 13:12:22 -05:00
parent 50db297cf6
commit 3661e10648

View File

@ -6,6 +6,10 @@ import threading
import queue
import traceback
if '--dont-upcast-attention' in sys.argv:
print("disabling upcasting of attention")
os.environ['ATTN_PRECISION'] = "fp16"
import torch
import nodes