diff options
author | AUTOMATIC1111 <16777216c@gmail.com> | 2023-08-13 05:28:48 +0000 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-08-13 05:28:48 +0000 |
commit | da80d649fd6a6083be02aca5695367bd25abf0d5 (patch) | |
tree | cfb85edce888c9d3092a72ae62340c03afb915ce /extensions-builtin/Lora/network.py | |
parent | 61673451ff2b6ea39c8b9591b4a14d7f19a32e63 (diff) | |
parent | 5881dcb8873b3f87b9c6545e9cb8d1d77023f4fe (diff) | |
download | stable-diffusion-webui-gfx803-da80d649fd6a6083be02aca5695367bd25abf0d5.tar.gz stable-diffusion-webui-gfx803-da80d649fd6a6083be02aca5695367bd25abf0d5.tar.bz2 stable-diffusion-webui-gfx803-da80d649fd6a6083be02aca5695367bd25abf0d5.zip |
Merge pull request #12503 from AUTOMATIC1111/extra-norm-module
Add Norm Module to lora ext and add "bias" support
Diffstat (limited to 'extensions-builtin/Lora/network.py')
-rw-r--r-- | extensions-builtin/Lora/network.py | 7 |
1 files changed, 5 insertions, 2 deletions
diff --git a/extensions-builtin/Lora/network.py b/extensions-builtin/Lora/network.py index 0a18d69e..d8e8dfb7 100644 --- a/extensions-builtin/Lora/network.py +++ b/extensions-builtin/Lora/network.py @@ -133,7 +133,7 @@ class NetworkModule: return 1.0
- def finalize_updown(self, updown, orig_weight, output_shape):
+ def finalize_updown(self, updown, orig_weight, output_shape, ex_bias=None):
if self.bias is not None:
updown = updown.reshape(self.bias.shape)
updown += self.bias.to(orig_weight.device, dtype=orig_weight.dtype)
@@ -145,7 +145,10 @@ class NetworkModule: if orig_weight.size().numel() == updown.size().numel():
updown = updown.reshape(orig_weight.shape)
- return updown * self.calc_scale() * self.multiplier()
+ if ex_bias is not None:
+ ex_bias = ex_bias * self.multiplier()
+
+ return updown * self.calc_scale() * self.multiplier(), ex_bias
def calc_updown(self, target):
raise NotImplementedError()
|