aboutsummaryrefslogtreecommitdiffstats
path: root/extensions-builtin/Lora/lora.py
diff options
context:
space:
mode:
authorAUTOMATIC <16777216c@gmail.com>2023-05-10 08:05:02 +0000
committerAUTOMATIC <16777216c@gmail.com>2023-05-10 08:05:02 +0000
commit028d3f6425d85f122027c127fba8bcbf4f66ee75 (patch)
tree05e714f118aaa4d25920918807e0b83344133629 /extensions-builtin/Lora/lora.py
parente42de4b8a2356c6d286adb07292442d75e5595d3 (diff)
downloadstable-diffusion-webui-gfx803-028d3f6425d85f122027c127fba8bcbf4f66ee75.tar.gz
stable-diffusion-webui-gfx803-028d3f6425d85f122027c127fba8bcbf4f66ee75.tar.bz2
stable-diffusion-webui-gfx803-028d3f6425d85f122027c127fba8bcbf4f66ee75.zip
ruff auto fixes
Diffstat (limited to 'extensions-builtin/Lora/lora.py')
-rw-r--r--extensions-builtin/Lora/lora.py12
1 files changed, 6 insertions, 6 deletions
diff --git a/extensions-builtin/Lora/lora.py b/extensions-builtin/Lora/lora.py
index 0ab43229..9795540f 100644
--- a/extensions-builtin/Lora/lora.py
+++ b/extensions-builtin/Lora/lora.py
@@ -172,7 +172,7 @@ def load_lora(name, filename):
else:
print(f'Lora layer {key_diffusers} matched a layer with unsupported type: {type(sd_module).__name__}')
continue
- assert False, f'Lora layer {key_diffusers} matched a layer with unsupported type: {type(sd_module).__name__}'
+ raise AssertionError(f"Lora layer {key_diffusers} matched a layer with unsupported type: {type(sd_module).__name__}")
with torch.no_grad():
module.weight.copy_(weight)
@@ -184,7 +184,7 @@ def load_lora(name, filename):
elif lora_key == "lora_down.weight":
lora_module.down = module
else:
- assert False, f'Bad Lora layer name: {key_diffusers} - must end in lora_up.weight, lora_down.weight or alpha'
+ raise AssertionError(f"Bad Lora layer name: {key_diffusers} - must end in lora_up.weight, lora_down.weight or alpha")
if len(keys_failed_to_match) > 0:
print(f"Failed to match keys when loading Lora {filename}: {keys_failed_to_match}")
@@ -202,7 +202,7 @@ def load_loras(names, multipliers=None):
loaded_loras.clear()
loras_on_disk = [available_lora_aliases.get(name, None) for name in names]
- if any([x is None for x in loras_on_disk]):
+ if any(x is None for x in loras_on_disk):
list_available_loras()
loras_on_disk = [available_lora_aliases.get(name, None) for name in names]
@@ -309,7 +309,7 @@ def lora_apply_weights(self: Union[torch.nn.Conv2d, torch.nn.Linear, torch.nn.Mu
print(f'failed to calculate lora weights for layer {lora_layer_name}')
- setattr(self, "lora_current_names", wanted_names)
+ self.lora_current_names = wanted_names
def lora_forward(module, input, original_forward):
@@ -343,8 +343,8 @@ def lora_forward(module, input, original_forward):
def lora_reset_cached_weight(self: Union[torch.nn.Conv2d, torch.nn.Linear]):
- setattr(self, "lora_current_names", ())
- setattr(self, "lora_weights_backup", None)
+ self.lora_current_names = ()
+ self.lora_weights_backup = None
def lora_Linear_forward(self, input):