From f0106cd48c1cf6ff7964b9c6ff06b109f906e66e Mon Sep 17 00:00:00 2001 From: "lzw478614@alibaba-inc.com" Date: Wed, 9 Jul 2025 14:01:49 +0800 Subject: [PATCH] support other lora forma --- diffsynth/lora/flux_lora.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/diffsynth/lora/flux_lora.py b/diffsynth/lora/flux_lora.py index c6e5115..8d0372d 100644 --- a/diffsynth/lora/flux_lora.py +++ b/diffsynth/lora/flux_lora.py @@ -6,10 +6,6 @@ from diffsynth.models.lora import FluxLoRAFromCivitai class FluxLoRALoader(GeneralLoRALoader): def __init__(self, device="cpu", torch_dtype=torch.float32): super().__init__(device=device, torch_dtype=torch_dtype) - - def load(self, model: torch.nn.Module, state_dict_lora, alpha=1.0): - super().load(model, state_dict_lora, alpha) - self.diffusers_rename_dict = { "transformer.single_transformer_blocks.blockid.attn.to_k.lora_A.weight":"single_blocks.blockid.a_to_k.lora_A.default.weight",