diff options
author | dongwenpu <dongwenpu@echoing.tech> | 2023-09-10 09:53:42 +0000 |
---|---|---|
committer | dongwenpu <dongwenpu@echoing.tech> | 2023-09-10 09:53:42 +0000 |
commit | 7d4d871d4679b5b78ff67b501da5367413542984 (patch) | |
tree | b8dffc034d6dae599fec0ff750ea61d240df0543 | |
parent | 924642331b13b73adbada3270a52cc5c7ae85ace (diff) | |
download | stable-diffusion-webui-gfx803-7d4d871d4679b5b78ff67b501da5367413542984.tar.gz stable-diffusion-webui-gfx803-7d4d871d4679b5b78ff67b501da5367413542984.tar.bz2 stable-diffusion-webui-gfx803-7d4d871d4679b5b78ff67b501da5367413542984.zip |
fix: lora-bias-backup don't reset cache
-rw-r--r-- | extensions-builtin/Lora/networks.py | 1 |
1 files changed, 1 insertions, 0 deletions
diff --git a/extensions-builtin/Lora/networks.py b/extensions-builtin/Lora/networks.py index 96f935b2..315682b3 100644 --- a/extensions-builtin/Lora/networks.py +++ b/extensions-builtin/Lora/networks.py @@ -418,6 +418,7 @@ def network_forward(module, input, original_forward): def network_reset_cached_weight(self: Union[torch.nn.Conv2d, torch.nn.Linear]):
self.network_current_names = ()
self.network_weights_backup = None
+ self.network_bias_backup = None
def network_Linear_forward(self, input):
|