From 15c39ea75774b5d36065eb8caa5ecbbfcd168b71 Mon Sep 17 00:00:00 2001 From: comfyanonymous Date: Tue, 26 Nov 2024 03:34:36 -0500 Subject: [PATCH] Support for the official mochi lora format. --- comfy/lora.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/comfy/lora.py b/comfy/lora.py index 18602f24..1080169b 100644 --- a/comfy/lora.py +++ b/comfy/lora.py @@ -62,6 +62,7 @@ def load_lora(lora, to_load): diffusers_lora = "{}_lora.up.weight".format(x) diffusers2_lora = "{}.lora_B.weight".format(x) diffusers3_lora = "{}.lora.up.weight".format(x) + mochi_lora = "{}.lora_B".format(x) transformers_lora = "{}.lora_linear_layer.up.weight".format(x) A_name = None @@ -81,6 +82,10 @@ def load_lora(lora, to_load): A_name = diffusers3_lora B_name = "{}.lora.down.weight".format(x) mid_name = None + elif mochi_lora in lora.keys(): + A_name = mochi_lora + B_name = "{}.lora_A".format(x) + mid_name = None elif transformers_lora in lora.keys(): A_name = transformers_lora B_name ="{}.lora_linear_layer.down.weight".format(x) @@ -362,6 +367,12 @@ def model_lora_keys_unet(model, key_map={}): key_map["lycoris_{}".format(k[:-len(".weight")].replace(".", "_"))] = to #simpletrainer lycoris key_map["lora_transformer_{}".format(k[:-len(".weight")].replace(".", "_"))] = to #onetrainer + if isinstance(model, comfy.model_base.GenmoMochi): + for k in sdk: + if k.startswith("diffusion_model.") and k.endswith(".weight"): #Official Mochi lora format + key_lora = k[len("diffusion_model."):-len(".weight")] + key_map["{}".format(key_lora)] = k + return key_map