aboutsummaryrefslogtreecommitdiffstats
path: root/extensions-builtin/Lora/network.py
diff options
context:
space:
mode:
authorKohaku-Blueleaf <59680068+KohakuBlueleaf@users.noreply.github.com>2023-08-12 18:27:39 +0000
committerKohaku-Blueleaf <59680068+KohakuBlueleaf@users.noreply.github.com>2023-08-12 18:27:39 +0000
commitbd4da4474bef5c9c1f690c62b971704ee73d2860 (patch)
treef3624d0521366fb23c4e7861ea1d0a04b43483e6 /extensions-builtin/Lora/network.py
parentb2080756fcdc328292fc38998c06ccf23e53bd7e (diff)
downloadstable-diffusion-webui-gfx803-bd4da4474bef5c9c1f690c62b971704ee73d2860.tar.gz
stable-diffusion-webui-gfx803-bd4da4474bef5c9c1f690c62b971704ee73d2860.tar.bz2
stable-diffusion-webui-gfx803-bd4da4474bef5c9c1f690c62b971704ee73d2860.zip
Add extra norm module into built-in lora ext
refer to LyCORIS 1.9.0.dev6 add new option and module for training norm layer (Which is reported to be good for style)
Diffstat (limited to 'extensions-builtin/Lora/network.py')
-rw-r--r--extensions-builtin/Lora/network.py7
1 files changed, 5 insertions, 2 deletions
diff --git a/extensions-builtin/Lora/network.py b/extensions-builtin/Lora/network.py
index 0a18d69e..b7b89061 100644
--- a/extensions-builtin/Lora/network.py
+++ b/extensions-builtin/Lora/network.py
@@ -133,7 +133,7 @@ class NetworkModule:
return 1.0
- def finalize_updown(self, updown, orig_weight, output_shape):
+ def finalize_updown(self, updown, orig_weight, output_shape, ex_bias=None):
if self.bias is not None:
updown = updown.reshape(self.bias.shape)
updown += self.bias.to(orig_weight.device, dtype=orig_weight.dtype)
@@ -145,7 +145,10 @@ class NetworkModule:
if orig_weight.size().numel() == updown.size().numel():
updown = updown.reshape(orig_weight.shape)
- return updown * self.calc_scale() * self.multiplier()
+ if ex_bias is None:
+ ex_bias = 0
+
+ return updown * self.calc_scale() * self.multiplier(), ex_bias * self.multiplier()
def calc_updown(self, target):
raise NotImplementedError()