diff options
author | CodeHatchling <steve@codehatch.com> | 2023-12-03 04:14:02 +0000 |
---|---|---|
committer | CodeHatchling <steve@codehatch.com> | 2023-12-03 04:14:02 +0000 |
commit | 3bd3a091604a332de6ff249870dabd2a91215499 (patch) | |
tree | 0323625627748ee44fc192bb2496585a4db56b5a /extensions-builtin/Lora/network_glora.py | |
parent | bb04d400c95df01d191ef6c1a43e66b95425fa33 (diff) | |
parent | f0f100e67b78f686dc73cf3c8cad422e45cc9b8a (diff) | |
download | stable-diffusion-webui-gfx803-3bd3a091604a332de6ff249870dabd2a91215499.tar.gz stable-diffusion-webui-gfx803-3bd3a091604a332de6ff249870dabd2a91215499.tar.bz2 stable-diffusion-webui-gfx803-3bd3a091604a332de6ff249870dabd2a91215499.zip |
Merge remote-tracking branch 'origin/dev' into soft-inpainting
# Conflicts:
# modules/processing.py
Diffstat (limited to 'extensions-builtin/Lora/network_glora.py')
-rw-r--r-- | extensions-builtin/Lora/network_glora.py | 33 |
1 files changed, 33 insertions, 0 deletions
diff --git a/extensions-builtin/Lora/network_glora.py b/extensions-builtin/Lora/network_glora.py new file mode 100644 index 00000000..492d4870 --- /dev/null +++ b/extensions-builtin/Lora/network_glora.py @@ -0,0 +1,33 @@ + +import network + +class ModuleTypeGLora(network.ModuleType): + def create_module(self, net: network.Network, weights: network.NetworkWeights): + if all(x in weights.w for x in ["a1.weight", "a2.weight", "alpha", "b1.weight", "b2.weight"]): + return NetworkModuleGLora(net, weights) + + return None + +# adapted from https://github.com/KohakuBlueleaf/LyCORIS +class NetworkModuleGLora(network.NetworkModule): + def __init__(self, net: network.Network, weights: network.NetworkWeights): + super().__init__(net, weights) + + if hasattr(self.sd_module, 'weight'): + self.shape = self.sd_module.weight.shape + + self.w1a = weights.w["a1.weight"] + self.w1b = weights.w["b1.weight"] + self.w2a = weights.w["a2.weight"] + self.w2b = weights.w["b2.weight"] + + def calc_updown(self, orig_weight): + w1a = self.w1a.to(orig_weight.device, dtype=orig_weight.dtype) + w1b = self.w1b.to(orig_weight.device, dtype=orig_weight.dtype) + w2a = self.w2a.to(orig_weight.device, dtype=orig_weight.dtype) + w2b = self.w2b.to(orig_weight.device, dtype=orig_weight.dtype) + + output_shape = [w1a.size(0), w1b.size(1)] + updown = ((w2b @ w1b) + ((orig_weight @ w2a) @ w1a)) + + return self.finalize_updown(updown, orig_weight, output_shape) |