diff options
author | AUTOMATIC <16777216c@gmail.com> | 2023-01-25 08:29:46 +0000 |
---|---|---|
committer | AUTOMATIC <16777216c@gmail.com> | 2023-01-25 08:29:46 +0000 |
commit | 1bfec873fa13d803f3d4ac2a12bf6983838233fe (patch) | |
tree | a51fc3389fd2165397ef408d1402322e57331a65 | |
parent | 48a15821de768fea76e66f26df83df3fddf18f4b (diff) | |
download | stable-diffusion-webui-gfx803-1bfec873fa13d803f3d4ac2a12bf6983838233fe.tar.gz stable-diffusion-webui-gfx803-1bfec873fa13d803f3d4ac2a12bf6983838233fe.tar.bz2 stable-diffusion-webui-gfx803-1bfec873fa13d803f3d4ac2a12bf6983838233fe.zip |
add an experimental option to apply loras to outputs rather than inputs
-rw-r--r-- | extensions-builtin/Lora/lora.py | 5 | ||||
-rw-r--r-- | extensions-builtin/Lora/scripts/lora_script.py | 7 |
2 files changed, 10 insertions, 2 deletions
diff --git a/extensions-builtin/Lora/lora.py b/extensions-builtin/Lora/lora.py index 137e58f7..cb8f1d36 100644 --- a/extensions-builtin/Lora/lora.py +++ b/extensions-builtin/Lora/lora.py @@ -166,7 +166,10 @@ def lora_forward(module, input, res): for lora in loaded_loras:
module = lora.modules.get(lora_layer_name, None)
if module is not None:
- res = res + module.up(module.down(input)) * lora.multiplier * (module.alpha / module.up.weight.shape[1] if module.alpha else 1.0)
+ if shared.opts.lora_apply_to_outputs and res.shape == input.shape:
+ res = res + module.up(module.down(res)) * lora.multiplier * (module.alpha / module.up.weight.shape[1] if module.alpha else 1.0)
+ else:
+ res = res + module.up(module.down(input)) * lora.multiplier * (module.alpha / module.up.weight.shape[1] if module.alpha else 1.0)
return res
diff --git a/extensions-builtin/Lora/scripts/lora_script.py b/extensions-builtin/Lora/scripts/lora_script.py index 60b9eb64..544b228d 100644 --- a/extensions-builtin/Lora/scripts/lora_script.py +++ b/extensions-builtin/Lora/scripts/lora_script.py @@ -3,7 +3,7 @@ import torch import lora
import extra_networks_lora
import ui_extra_networks_lora
-from modules import script_callbacks, ui_extra_networks, extra_networks
+from modules import script_callbacks, ui_extra_networks, extra_networks, shared
def unload():
@@ -28,3 +28,8 @@ torch.nn.Conv2d.forward = lora.lora_Conv2d_forward script_callbacks.on_model_loaded(lora.assign_lora_names_to_compvis_modules)
script_callbacks.on_script_unloaded(unload)
script_callbacks.on_before_ui(before_ui)
+
+
+shared.options_templates.update(shared.options_section(('extra_networks', "Extra Networks"), {
+ "lora_apply_to_outputs": shared.OptionInfo(False, "Apply Lora to outputs rather than inputs when possible (experimental)"),
+}))
|