diff options
author | unknown <mcgpapu@gmail.com> | 2023-01-28 09:40:51 +0000 |
---|---|---|
committer | unknown <mcgpapu@gmail.com> | 2023-01-28 09:40:51 +0000 |
commit | e79b7db4b47a33889551b9266ee3277879d4f560 (patch) | |
tree | 1c1944204e58e254bfea22ae44edccdbb54e6b3c /extensions-builtin/Lora/scripts/lora_script.py | |
parent | b921a52071cf2a5e551c31a6073af6eaebbf7847 (diff) | |
parent | e8a41df49fadd2cf9f23b1f02d75a4947bec5646 (diff) | |
download | stable-diffusion-webui-gfx803-e79b7db4b47a33889551b9266ee3277879d4f560.tar.gz stable-diffusion-webui-gfx803-e79b7db4b47a33889551b9266ee3277879d4f560.tar.bz2 stable-diffusion-webui-gfx803-e79b7db4b47a33889551b9266ee3277879d4f560.zip |
Merge branch 'master' of github.com:AUTOMATIC1111/stable-diffusion-webui into gamepad
Diffstat (limited to 'extensions-builtin/Lora/scripts/lora_script.py')
-rw-r--r-- | extensions-builtin/Lora/scripts/lora_script.py | 35 |
1 files changed, 35 insertions, 0 deletions
diff --git a/extensions-builtin/Lora/scripts/lora_script.py b/extensions-builtin/Lora/scripts/lora_script.py new file mode 100644 index 00000000..544b228d --- /dev/null +++ b/extensions-builtin/Lora/scripts/lora_script.py @@ -0,0 +1,35 @@ +import torch
+
+import lora
+import extra_networks_lora
+import ui_extra_networks_lora
+from modules import script_callbacks, ui_extra_networks, extra_networks, shared
+
+
+def unload():
+ torch.nn.Linear.forward = torch.nn.Linear_forward_before_lora
+ torch.nn.Conv2d.forward = torch.nn.Conv2d_forward_before_lora
+
+
+def before_ui():
+ ui_extra_networks.register_page(ui_extra_networks_lora.ExtraNetworksPageLora())
+ extra_networks.register_extra_network(extra_networks_lora.ExtraNetworkLora())
+
+
+if not hasattr(torch.nn, 'Linear_forward_before_lora'):
+ torch.nn.Linear_forward_before_lora = torch.nn.Linear.forward
+
+if not hasattr(torch.nn, 'Conv2d_forward_before_lora'):
+ torch.nn.Conv2d_forward_before_lora = torch.nn.Conv2d.forward
+
+torch.nn.Linear.forward = lora.lora_Linear_forward
+torch.nn.Conv2d.forward = lora.lora_Conv2d_forward
+
+script_callbacks.on_model_loaded(lora.assign_lora_names_to_compvis_modules)
+script_callbacks.on_script_unloaded(unload)
+script_callbacks.on_before_ui(before_ui)
+
+
+shared.options_templates.update(shared.options_section(('extra_networks', "Extra Networks"), {
+ "lora_apply_to_outputs": shared.OptionInfo(False, "Apply Lora to outputs rather than inputs when possible (experimental)"),
+}))
|