aboutsummaryrefslogtreecommitdiffstats
path: root/extensions-builtin/Lora/scripts
diff options
context:
space:
mode:
authorAUTOMATIC <16777216c@gmail.com>2023-01-28 17:04:35 +0000
committerAUTOMATIC <16777216c@gmail.com>2023-01-29 07:45:46 +0000
commit7cb31a278e8f27367792b66cdd3bcfba41093b32 (patch)
treefa4f239d970ca0080c4cea790a5f372aa3b5ea72 /extensions-builtin/Lora/scripts
parent91c8d0dcfc9a5d46aec47fd3eb34c633c104b5bb (diff)
parent2abd89acc66419abf2eee9b03fd093f2737670de (diff)
downloadstable-diffusion-webui-gfx803-7cb31a278e8f27367792b66cdd3bcfba41093b32.tar.gz
stable-diffusion-webui-gfx803-7cb31a278e8f27367792b66cdd3bcfba41093b32.tar.bz2
stable-diffusion-webui-gfx803-7cb31a278e8f27367792b66cdd3bcfba41093b32.zip
initial work on SD2 Lora support
Diffstat (limited to 'extensions-builtin/Lora/scripts')
-rw-r--r--extensions-builtin/Lora/scripts/lora_script.py5
1 files changed, 5 insertions, 0 deletions
diff --git a/extensions-builtin/Lora/scripts/lora_script.py b/extensions-builtin/Lora/scripts/lora_script.py
index 2e860160..a385ae94 100644
--- a/extensions-builtin/Lora/scripts/lora_script.py
+++ b/extensions-builtin/Lora/scripts/lora_script.py
@@ -10,6 +10,7 @@ from modules import script_callbacks, ui_extra_networks, extra_networks, shared
def unload():
torch.nn.Linear.forward = torch.nn.Linear_forward_before_lora
torch.nn.Conv2d.forward = torch.nn.Conv2d_forward_before_lora
+ torch.nn.modules.linear.NonDynamicallyQuantizableLinear.forward = torch.nn.NonDynamicallyQuantizableLinear_forward_before_lora
def before_ui():
@@ -23,8 +24,12 @@ if not hasattr(torch.nn, 'Linear_forward_before_lora'):
if not hasattr(torch.nn, 'Conv2d_forward_before_lora'):
torch.nn.Conv2d_forward_before_lora = torch.nn.Conv2d.forward
+if not hasattr(torch.nn, 'NonDynamicallyQuantizableLinear_forward_before_lora'):
+ torch.nn.NonDynamicallyQuantizableLinear_forward_before_lora = torch.nn.modules.linear.NonDynamicallyQuantizableLinear.forward
+
torch.nn.Linear.forward = lora.lora_Linear_forward
torch.nn.Conv2d.forward = lora.lora_Conv2d_forward
+torch.nn.modules.linear.NonDynamicallyQuantizableLinear.forward = lora.lora_NonDynamicallyQuantizableLinear_forward
script_callbacks.on_model_loaded(lora.assign_lora_names_to_compvis_modules)
script_callbacks.on_script_unloaded(unload)