From 76ebb175ca996e93c063e7109c9f478a268952b6 Mon Sep 17 00:00:00 2001 From: AUTOMATIC1111 <16777216c@gmail.com> Date: Thu, 13 Jul 2023 12:59:31 +0300 Subject: lora support --- extensions-builtin/Lora/lora.py | 5 +++++ 1 file changed, 5 insertions(+) (limited to 'extensions-builtin/Lora/lora.py') diff --git a/extensions-builtin/Lora/lora.py b/extensions-builtin/Lora/lora.py index cd46e6c7..03f1ef85 100644 --- a/extensions-builtin/Lora/lora.py +++ b/extensions-builtin/Lora/lora.py @@ -179,6 +179,11 @@ def load_lora(name, lora_on_disk): if m: sd_module = shared.sd_model.lora_layer_mapping.get(m.group(1), None) + # SDXL loras seem to already have correct compvis keys, so only need to replace "lora_unet" with "diffusion_model" + if sd_module is None and "lora_unet" in key_diffusers_without_lora_parts: + key = key_diffusers_without_lora_parts.replace("lora_unet", "diffusion_model") + sd_module = shared.sd_model.lora_layer_mapping.get(key, None) + if sd_module is None: keys_failed_to_match[key_diffusers] = key continue -- cgit v1.2.1 From 6c5f83b19b331d51bde28c5033d13d0d64c11e54 Mon Sep 17 00:00:00 2001 From: AUTOMATIC1111 <16777216c@gmail.com> Date: Thu, 13 Jul 2023 21:17:50 +0300 Subject: add support for SDXL loras with te1/te2 modules --- extensions-builtin/Lora/lora.py | 41 +++++++++++++++++++++++++++++++---------- 1 file changed, 31 insertions(+), 10 deletions(-) (limited to 'extensions-builtin/Lora/lora.py') diff --git a/extensions-builtin/Lora/lora.py b/extensions-builtin/Lora/lora.py index 03f1ef85..4b5da7b5 100644 --- a/extensions-builtin/Lora/lora.py +++ b/extensions-builtin/Lora/lora.py @@ -68,6 +68,14 @@ def convert_diffusers_name_to_compvis(key, is_sd2): return f"transformer_text_model_encoder_layers_{m[0]}_{m[1]}" + if match(m, r"lora_te2_text_model_encoder_layers_(\d+)_(.+)"): + if 'mlp_fc1' in m[1]: + return f"1_model_transformer_resblocks_{m[0]}_{m[1].replace('mlp_fc1', 'mlp_c_fc')}" + elif 'mlp_fc2' in m[1]: + return f"1_model_transformer_resblocks_{m[0]}_{m[1].replace('mlp_fc2', 'mlp_c_proj')}" + else: + return f"1_model_transformer_resblocks_{m[0]}_{m[1].replace('self_attn', 'attn')}" + return key @@ -142,10 +150,20 @@ class LoraUpDownModule: def assign_lora_names_to_compvis_modules(sd_model): lora_layer_mapping = {} - for name, module in shared.sd_model.cond_stage_model.wrapped.named_modules(): - lora_name = name.replace(".", "_") - lora_layer_mapping[lora_name] = module - module.lora_layer_name = lora_name + if shared.sd_model.is_sdxl: + for i, embedder in enumerate(shared.sd_model.conditioner.embedders): + if not hasattr(embedder, 'wrapped'): + continue + + for name, module in embedder.wrapped.named_modules(): + lora_name = f'{i}_{name.replace(".", "_")}' + lora_layer_mapping[lora_name] = module + module.lora_layer_name = lora_name + else: + for name, module in shared.sd_model.cond_stage_model.wrapped.named_modules(): + lora_name = name.replace(".", "_") + lora_layer_mapping[lora_name] = module + module.lora_layer_name = lora_name for name, module in shared.sd_model.model.named_modules(): lora_name = name.replace(".", "_") @@ -168,10 +186,10 @@ def load_lora(name, lora_on_disk): keys_failed_to_match = {} is_sd2 = 'model_transformer_resblocks' in shared.sd_model.lora_layer_mapping - for key_diffusers, weight in sd.items(): - key_diffusers_without_lora_parts, lora_key = key_diffusers.split(".", 1) - key = convert_diffusers_name_to_compvis(key_diffusers_without_lora_parts, is_sd2) + for key_lora, weight in sd.items(): + key_lora_without_lora_parts, lora_key = key_lora.split(".", 1) + key = convert_diffusers_name_to_compvis(key_lora_without_lora_parts, is_sd2) sd_module = shared.sd_model.lora_layer_mapping.get(key, None) if sd_module is None: @@ -180,12 +198,15 @@ def load_lora(name, lora_on_disk): sd_module = shared.sd_model.lora_layer_mapping.get(m.group(1), None) # SDXL loras seem to already have correct compvis keys, so only need to replace "lora_unet" with "diffusion_model" - if sd_module is None and "lora_unet" in key_diffusers_without_lora_parts: - key = key_diffusers_without_lora_parts.replace("lora_unet", "diffusion_model") + if sd_module is None and "lora_unet" in key_lora_without_lora_parts: + key = key_lora_without_lora_parts.replace("lora_unet", "diffusion_model") + sd_module = shared.sd_model.lora_layer_mapping.get(key, None) + elif sd_module is None and "lora_te1_text_model" in key_lora_without_lora_parts: + key = key_lora_without_lora_parts.replace("lora_te1_text_model", "0_transformer_text_model") sd_module = shared.sd_model.lora_layer_mapping.get(key, None) if sd_module is None: - keys_failed_to_match[key_diffusers] = key + keys_failed_to_match[key_lora] = key continue lora_module = lora.modules.get(key, None) -- cgit v1.2.1 From dc3906185656dae75fcefe96625b1dcd0d31579c Mon Sep 17 00:00:00 2001 From: AUTOMATIC1111 <16777216c@gmail.com> Date: Thu, 13 Jul 2023 21:19:41 +0300 Subject: thank you linter --- extensions-builtin/Lora/lora.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'extensions-builtin/Lora/lora.py') diff --git a/extensions-builtin/Lora/lora.py b/extensions-builtin/Lora/lora.py index 4b5da7b5..302490fb 100644 --- a/extensions-builtin/Lora/lora.py +++ b/extensions-builtin/Lora/lora.py @@ -229,9 +229,9 @@ def load_lora(name, lora_on_disk): elif type(sd_module) == torch.nn.Conv2d and weight.shape[2:] == (3, 3): module = torch.nn.Conv2d(weight.shape[1], weight.shape[0], (3, 3), bias=False) else: - print(f'Lora layer {key_diffusers} matched a layer with unsupported type: {type(sd_module).__name__}') + print(f'Lora layer {key_lora} matched a layer with unsupported type: {type(sd_module).__name__}') continue - raise AssertionError(f"Lora layer {key_diffusers} matched a layer with unsupported type: {type(sd_module).__name__}") + raise AssertionError(f"Lora layer {key_lora} matched a layer with unsupported type: {type(sd_module).__name__}") with torch.no_grad(): module.weight.copy_(weight) @@ -243,7 +243,7 @@ def load_lora(name, lora_on_disk): elif lora_key == "lora_down.weight": lora_module.down = module else: - raise AssertionError(f"Bad Lora layer name: {key_diffusers} - must end in lora_up.weight, lora_down.weight or alpha") + raise AssertionError(f"Bad Lora layer name: {key_lora} - must end in lora_up.weight, lora_down.weight or alpha") if keys_failed_to_match: print(f"Failed to match keys when loading Lora {lora_on_disk.filename}: {keys_failed_to_match}") -- cgit v1.2.1