diff options
Diffstat (limited to 'extensions-builtin/Lora/network.py')
-rw-r--r-- | extensions-builtin/Lora/network.py | 7 |
1 files changed, 5 insertions, 2 deletions
diff --git a/extensions-builtin/Lora/network.py b/extensions-builtin/Lora/network.py index 0a18d69e..b7b89061 100644 --- a/extensions-builtin/Lora/network.py +++ b/extensions-builtin/Lora/network.py @@ -133,7 +133,7 @@ class NetworkModule: return 1.0
- def finalize_updown(self, updown, orig_weight, output_shape):
+ def finalize_updown(self, updown, orig_weight, output_shape, ex_bias=None):
if self.bias is not None:
updown = updown.reshape(self.bias.shape)
updown += self.bias.to(orig_weight.device, dtype=orig_weight.dtype)
@@ -145,7 +145,10 @@ class NetworkModule: if orig_weight.size().numel() == updown.size().numel():
updown = updown.reshape(orig_weight.shape)
- return updown * self.calc_scale() * self.multiplier()
+ if ex_bias is None:
+ ex_bias = 0
+
+ return updown * self.calc_scale() * self.multiplier(), ex_bias * self.multiplier()
def calc_updown(self, target):
raise NotImplementedError()
|