diff options
-rw-r--r-- | extensions-builtin/Lora/networks.py | 2 |
1 files changed, 1 insertions, 1 deletions
diff --git a/extensions-builtin/Lora/networks.py b/extensions-builtin/Lora/networks.py index 96d14344..22fdff4a 100644 --- a/extensions-builtin/Lora/networks.py +++ b/extensions-builtin/Lora/networks.py @@ -312,7 +312,7 @@ def network_apply_weights(self: Union[torch.nn.Conv2d, torch.nn.Linear, torch.nn self.network_weights_backup = weights_backup
bias_backup = getattr(self, "network_bias_backup", None)
- if bias_backup is None and getattr(self, 'bias', None) is not None:
+ if bias_backup is None:
if isinstance(self, torch.nn.MultiheadAttention) and self.out_proj.bias is not None:
bias_backup = self.out_proj.bias.to(devices.cpu, copy=True)
elif getattr(self, 'bias', None) is not None:
|