Make --force-fp16 actually force the diffusion model to be fp16.

This commit is contained in:
comfyanonymous 2025-02-11 08:31:46 -05:00
parent e57d2282d1
commit af4b7c91be
2 changed files with 4 additions and 6 deletions

View File

@ -191,3 +191,6 @@ if args.windows_standalone_build:
if args.disable_auto_launch:
args.auto_launch = False
if args.force_fp16:
args.fp16_unet = True

View File

@ -262,15 +262,10 @@ elif args.highvram or args.gpu_only:
vram_state = VRAMState.HIGH_VRAM
FORCE_FP32 = False
FORCE_FP16 = False
if args.force_fp32:
logging.info("Forcing FP32, if this improves things please report it.")
FORCE_FP32 = True
if args.force_fp16:
logging.info("Forcing FP16.")
FORCE_FP16 = True
if lowvram_available:
if set_vram_to in (VRAMState.LOW_VRAM, VRAMState.NO_VRAM):
vram_state = set_vram_to
@ -1003,7 +998,7 @@ def should_use_fp16(device=None, model_params=0, prioritize_performance=True, ma
if is_device_cpu(device):
return False
if FORCE_FP16:
if args.force_fp16:
return True
if FORCE_FP32: