diff options
author | pieresimakp <69743585+pieresimakp@users.noreply.github.com> | 2023-03-27 03:53:15 +0000 |
---|---|---|
committer | pieresimakp <69743585+pieresimakp@users.noreply.github.com> | 2023-03-27 03:53:15 +0000 |
commit | 68a5604cac50c27dbacf9775c9466600dc8bf789 (patch) | |
tree | e3cd120bb5321d2856ef0b51b244c3aa083e82c1 /extensions-builtin/Lora/lora.py | |
parent | 774c691df8b99f5e24d3f41e451fe65f5b447952 (diff) | |
parent | 3b5a3fab91ff797b28239cda5b449aac1f85ad00 (diff) | |
download | stable-diffusion-webui-gfx803-68a5604cac50c27dbacf9775c9466600dc8bf789.tar.gz stable-diffusion-webui-gfx803-68a5604cac50c27dbacf9775c9466600dc8bf789.tar.bz2 stable-diffusion-webui-gfx803-68a5604cac50c27dbacf9775c9466600dc8bf789.zip |
Merge remote-tracking branch 'upstream/master' into pr-dev
Diffstat (limited to 'extensions-builtin/Lora/lora.py')
-rw-r--r-- | extensions-builtin/Lora/lora.py | 5 |
1 files changed, 3 insertions, 2 deletions
diff --git a/extensions-builtin/Lora/lora.py b/extensions-builtin/Lora/lora.py index edd95f78..696be8ea 100644 --- a/extensions-builtin/Lora/lora.py +++ b/extensions-builtin/Lora/lora.py @@ -2,6 +2,7 @@ import glob import os
import re
import torch
+from typing import Union
from modules import shared, devices, sd_models, errors
@@ -235,7 +236,7 @@ def lora_calc_updown(lora, module, target): return updown
-def lora_apply_weights(self: torch.nn.Conv2d | torch.nn.Linear | torch.nn.MultiheadAttention):
+def lora_apply_weights(self: Union[torch.nn.Conv2d, torch.nn.Linear, torch.nn.MultiheadAttention]):
"""
Applies the currently selected set of Loras to the weights of torch layer self.
If weights already have this particular set of loras applied, does nothing.
@@ -295,7 +296,7 @@ def lora_apply_weights(self: torch.nn.Conv2d | torch.nn.Linear | torch.nn.Multih setattr(self, "lora_current_names", wanted_names)
-def lora_reset_cached_weight(self: torch.nn.Conv2d | torch.nn.Linear):
+def lora_reset_cached_weight(self: Union[torch.nn.Conv2d, torch.nn.Linear]):
setattr(self, "lora_current_names", ())
setattr(self, "lora_weights_backup", None)
|