From 31b6852f1963d8c85bbf5a9ad0228217faf9753c Mon Sep 17 00:00:00 2001 From: rickard Date: Mon, 23 Dec 2024 08:19:34 +0100 Subject: [PATCH] load ltx loras trained with finetrainers --- comfy/lora.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/comfy/lora.py b/comfy/lora.py index ec3da6f4..ccd52a66 100644 --- a/comfy/lora.py +++ b/comfy/lora.py @@ -64,6 +64,7 @@ def load_lora(lora, to_load, log_missing=True): diffusers3_lora = "{}.lora.up.weight".format(x) mochi_lora = "{}.lora_B".format(x) transformers_lora = "{}.lora_linear_layer.up.weight".format(x) + ltx_lora = "transformer.{}.lora_B.weight".format(x) A_name = None if regular_lora in lora.keys(): @@ -90,6 +91,10 @@ def load_lora(lora, to_load, log_missing=True): A_name = transformers_lora B_name ="{}.lora_linear_layer.down.weight".format(x) mid_name = None + elif ltx_lora in lora.keys(): + A_name = ltx_lora + B_name = "transformer.{}.lora_A.weight".format(x) + mid_name = None if A_name is not None: mid = None