load ltx loras trained with finetrainers

This commit is contained in:
rickard 2024-12-23 08:19:34 +01:00
parent e44d0ac7f7
commit 31b6852f19

View File

@ -64,6 +64,7 @@ def load_lora(lora, to_load, log_missing=True):
diffusers3_lora = "{}.lora.up.weight".format(x)
mochi_lora = "{}.lora_B".format(x)
transformers_lora = "{}.lora_linear_layer.up.weight".format(x)
ltx_lora = "transformer.{}.lora_B.weight".format(x)
A_name = None
if regular_lora in lora.keys():
@ -90,6 +91,10 @@ def load_lora(lora, to_load, log_missing=True):
A_name = transformers_lora
B_name ="{}.lora_linear_layer.down.weight".format(x)
mid_name = None
elif ltx_lora in lora.keys():
A_name = ltx_lora
B_name = "transformer.{}.lora_A.weight".format(x)
mid_name = None
if A_name is not None:
mid = None