From a2a720267eebe1ede7a1c8fed3ae1832a7056d23 Mon Sep 17 00:00:00 2001 From: Artiprocher Date: Wed, 2 Apr 2025 12:47:52 +0800 Subject: [PATCH] wan lora converter --- diffsynth/models/lora.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/diffsynth/models/lora.py b/diffsynth/models/lora.py index 7d4f52d..0a12b2d 100644 --- a/diffsynth/models/lora.py +++ b/diffsynth/models/lora.py @@ -366,6 +366,21 @@ class FluxLoRAConverter: state_dict_[name] = param return state_dict_ + +class WanLoRAConverter: + def __init__(self): + pass + + @staticmethod + def align_to_opensource_format(state_dict, **kwargs): + state_dict = {"diffusion_model." + name.replace(".default.", "."): param for name, param in state_dict.items()} + return state_dict + + @staticmethod + def align_to_diffsynth_format(state_dict, **kwargs): + state_dict = {name.replace("diffusion_model.", "").replace(".lora_A.weight", ".lora_A.default.weight").replace(".lora_B.weight", ".lora_B.default.weight"): param for name, param in state_dict.items()} + return state_dict + def get_lora_loaders(): return [SDLoRAFromCivitai(), SDXLLoRAFromCivitai(), FluxLoRAFromCivitai(), HunyuanVideoLoRAFromCivitai(), GeneralLoRAFromPeft()]