mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2025-01-25 15:55:18 +00:00
Empty cache on mps.
This commit is contained in:
parent
d200fa1314
commit
94680732d3
@ -389,7 +389,10 @@ def should_use_fp16():
|
||||
|
||||
def soft_empty_cache():
|
||||
global xpu_available
|
||||
if xpu_available:
|
||||
global vram_state
|
||||
if vram_state == VRAMState.MPS:
|
||||
torch.mps.empty_cache()
|
||||
elif xpu_available:
|
||||
torch.xpu.empty_cache()
|
||||
elif torch.cuda.is_available():
|
||||
if torch.version.cuda: #This seems to make things worse on ROCm so I only do it for cuda
|
||||
|
Loading…
Reference in New Issue
Block a user