diff options
author | camenduru <54370274+camenduru@users.noreply.github.com> | 2023-03-27 01:28:40 +0000 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-03-27 01:28:40 +0000 |
commit | 9d7390d2d19a8baf04ee4ebe598b96ac6ba7f97e (patch) | |
tree | fcd25090fbe6261389404c85b67a390f18072ff6 | |
parent | 4c1ad743e3baf1246db0711aa0107debf036a12b (diff) | |
download | stable-diffusion-webui-gfx803-9d7390d2d19a8baf04ee4ebe598b96ac6ba7f97e.tar.gz stable-diffusion-webui-gfx803-9d7390d2d19a8baf04ee4ebe598b96ac6ba7f97e.tar.bz2 stable-diffusion-webui-gfx803-9d7390d2d19a8baf04ee4ebe598b96ac6ba7f97e.zip |
convert to python v3.9
-rw-r--r-- | extensions-builtin/Lora/lora.py | 3 |
1 files changed, 2 insertions, 1 deletions
diff --git a/extensions-builtin/Lora/lora.py b/extensions-builtin/Lora/lora.py index edd95f78..79d11e0e 100644 --- a/extensions-builtin/Lora/lora.py +++ b/extensions-builtin/Lora/lora.py @@ -2,6 +2,7 @@ import glob import os
import re
import torch
+from typing import Union
from modules import shared, devices, sd_models, errors
@@ -235,7 +236,7 @@ def lora_calc_updown(lora, module, target): return updown
-def lora_apply_weights(self: torch.nn.Conv2d | torch.nn.Linear | torch.nn.MultiheadAttention):
+def lora_apply_weights(self: Union[torch.nn.Conv2d, torch.nn.Linear, torch.nn.MultiheadAttention]):
"""
Applies the currently selected set of Loras to the weights of torch layer self.
If weights already have this particular set of loras applied, does nothing.
|