aboutsummaryrefslogtreecommitdiffstats
path: root/extensions-builtin
diff options
context:
space:
mode:
Diffstat (limited to 'extensions-builtin')
-rw-r--r--extensions-builtin/Lora/lora.py92
-rw-r--r--extensions-builtin/Lora/scripts/lora_script.py15
-rw-r--r--extensions-builtin/Lora/ui_extra_networks_lora.py15
-rw-r--r--extensions-builtin/prompt-bracket-checker/javascript/prompt-bracket-checker.js15
4 files changed, 94 insertions, 43 deletions
diff --git a/extensions-builtin/Lora/lora.py b/extensions-builtin/Lora/lora.py
index b7e775ae..d4345ada 100644
--- a/extensions-builtin/Lora/lora.py
+++ b/extensions-builtin/Lora/lora.py
@@ -3,7 +3,9 @@ import os
import re
import torch
-from modules import shared, devices, sd_models
+from modules import shared, devices, sd_models, errors
+
+metadata_tags_order = {"ss_sd_model_name": 1, "ss_resolution": 2, "ss_clip_skip": 3, "ss_num_train_images": 10, "ss_tag_frequency": 20}
re_digits = re.compile(r"\d+")
re_unet_down_blocks = re.compile(r"lora_unet_down_blocks_(\d+)_attentions_(\d+)_(.+)")
@@ -51,6 +53,23 @@ class LoraOnDisk:
def __init__(self, name, filename):
self.name = name
self.filename = filename
+ self.metadata = {}
+
+ _, ext = os.path.splitext(filename)
+ if ext.lower() == ".safetensors":
+ try:
+ self.metadata = sd_models.read_metadata_from_safetensors(filename)
+ except Exception as e:
+ errors.display(e, f"reading lora {filename}")
+
+ if self.metadata:
+ m = {}
+ for k, v in sorted(self.metadata.items(), key=lambda x: metadata_tags_order.get(x[0], 999)):
+ m[k] = v
+
+ self.metadata = m
+
+ self.ssmd_cover_images = self.metadata.pop('ssmd_cover_images', None) # those are cover images and they are too big to display in UI as text
class LoraModule:
@@ -125,7 +144,7 @@ def load_lora(name, filename):
with torch.no_grad():
module.weight.copy_(weight)
- module.to(device=devices.device, dtype=devices.dtype)
+ module.to(device=devices.cpu, dtype=devices.dtype)
if lora_key == "lora_up.weight":
lora_module.up = module
@@ -171,28 +190,69 @@ def load_loras(names, multipliers=None):
loaded_loras.append(lora)
-def lora_forward(module, input, res):
- if len(loaded_loras) == 0:
- return res
+def lora_apply_weights(self: torch.nn.Conv2d | torch.nn.Linear):
+ """
+ Applies the currently selected set of Loras to the weight of torch layer self.
+ If weights already have this particular set of loras applied, does nothing.
+ If not, restores orginal weights from backup and alters weights according to loras.
+ """
- lora_layer_name = getattr(module, 'lora_layer_name', None)
- for lora in loaded_loras:
- module = lora.modules.get(lora_layer_name, None)
- if module is not None:
- if shared.opts.lora_apply_to_outputs and res.shape == input.shape:
- res = res + module.up(module.down(res)) * lora.multiplier * (module.alpha / module.up.weight.shape[1] if module.alpha else 1.0)
- else:
- res = res + module.up(module.down(input)) * lora.multiplier * (module.alpha / module.up.weight.shape[1] if module.alpha else 1.0)
+ current_names = getattr(self, "lora_current_names", ())
+ wanted_names = tuple((x.name, x.multiplier) for x in loaded_loras)
+
+ weights_backup = getattr(self, "lora_weights_backup", None)
+ if weights_backup is None:
+ weights_backup = self.weight.to(devices.cpu, copy=True)
+ self.lora_weights_backup = weights_backup
+
+ if current_names != wanted_names:
+ if weights_backup is not None:
+ self.weight.copy_(weights_backup)
+
+ lora_layer_name = getattr(self, 'lora_layer_name', None)
+ for lora in loaded_loras:
+ module = lora.modules.get(lora_layer_name, None)
+ if module is None:
+ continue
+
+ with torch.no_grad():
+ up = module.up.weight.to(self.weight.device, dtype=self.weight.dtype)
+ down = module.down.weight.to(self.weight.device, dtype=self.weight.dtype)
- return res
+ if up.shape[2:] == (1, 1) and down.shape[2:] == (1, 1):
+ updown = (up.squeeze(2).squeeze(2) @ down.squeeze(2).squeeze(2)).unsqueeze(2).unsqueeze(3)
+ else:
+ updown = up @ down
+
+ self.weight += updown * lora.multiplier * (module.alpha / module.up.weight.shape[1] if module.alpha else 1.0)
+
+ setattr(self, "lora_current_names", wanted_names)
def lora_Linear_forward(self, input):
- return lora_forward(self, input, torch.nn.Linear_forward_before_lora(self, input))
+ lora_apply_weights(self)
+
+ return torch.nn.Linear_forward_before_lora(self, input)
+
+
+def lora_Linear_load_state_dict(self: torch.nn.Linear, *args, **kwargs):
+ setattr(self, "lora_current_names", ())
+ setattr(self, "lora_weights_backup", None)
+
+ return torch.nn.Linear_load_state_dict_before_lora(self, *args, **kwargs)
def lora_Conv2d_forward(self, input):
- return lora_forward(self, input, torch.nn.Conv2d_forward_before_lora(self, input))
+ lora_apply_weights(self)
+
+ return torch.nn.Conv2d_forward_before_lora(self, input)
+
+
+def lora_Conv2d_load_state_dict(self: torch.nn.Conv2d, *args, **kwargs):
+ setattr(self, "lora_current_names", ())
+ setattr(self, "lora_weights_backup", None)
+
+ return torch.nn.Conv2d_load_state_dict_before_lora(self, *args, **kwargs)
def lora_NonDynamicallyQuantizableLinear_forward(self, input):
diff --git a/extensions-builtin/Lora/scripts/lora_script.py b/extensions-builtin/Lora/scripts/lora_script.py
index a385ae94..dc329e81 100644
--- a/extensions-builtin/Lora/scripts/lora_script.py
+++ b/extensions-builtin/Lora/scripts/lora_script.py
@@ -9,8 +9,9 @@ from modules import script_callbacks, ui_extra_networks, extra_networks, shared
def unload():
torch.nn.Linear.forward = torch.nn.Linear_forward_before_lora
+ torch.nn.Linear._load_from_state_dict = torch.nn.Linear_load_state_dict_before_lora
torch.nn.Conv2d.forward = torch.nn.Conv2d_forward_before_lora
- torch.nn.modules.linear.NonDynamicallyQuantizableLinear.forward = torch.nn.NonDynamicallyQuantizableLinear_forward_before_lora
+ torch.nn.Conv2d._load_from_state_dict = torch.nn.Conv2d_load_state_dict_before_lora
def before_ui():
@@ -21,15 +22,19 @@ def before_ui():
if not hasattr(torch.nn, 'Linear_forward_before_lora'):
torch.nn.Linear_forward_before_lora = torch.nn.Linear.forward
+if not hasattr(torch.nn, 'Linear_load_state_dict_before_lora'):
+ torch.nn.Linear_load_state_dict_before_lora = torch.nn.Linear._load_from_state_dict
+
if not hasattr(torch.nn, 'Conv2d_forward_before_lora'):
torch.nn.Conv2d_forward_before_lora = torch.nn.Conv2d.forward
-if not hasattr(torch.nn, 'NonDynamicallyQuantizableLinear_forward_before_lora'):
- torch.nn.NonDynamicallyQuantizableLinear_forward_before_lora = torch.nn.modules.linear.NonDynamicallyQuantizableLinear.forward
+if not hasattr(torch.nn, 'Conv2d_load_state_dict_before_lora'):
+ torch.nn.Conv2d_load_state_dict_before_lora = torch.nn.Conv2d._load_from_state_dict
torch.nn.Linear.forward = lora.lora_Linear_forward
+torch.nn.Linear._load_from_state_dict = lora.lora_Linear_load_state_dict
torch.nn.Conv2d.forward = lora.lora_Conv2d_forward
-torch.nn.modules.linear.NonDynamicallyQuantizableLinear.forward = lora.lora_NonDynamicallyQuantizableLinear_forward
+torch.nn.Conv2d._load_from_state_dict = lora.lora_Conv2d_load_state_dict
script_callbacks.on_model_loaded(lora.assign_lora_names_to_compvis_modules)
script_callbacks.on_script_unloaded(unload)
@@ -38,6 +43,4 @@ script_callbacks.on_before_ui(before_ui)
shared.options_templates.update(shared.options_section(('extra_networks', "Extra Networks"), {
"sd_lora": shared.OptionInfo("None", "Add Lora to prompt", gr.Dropdown, lambda: {"choices": [""] + [x for x in lora.available_loras]}, refresh=lora.list_available_loras),
- "lora_apply_to_outputs": shared.OptionInfo(False, "Apply Lora to outputs rather than inputs when possible (experimental)"),
-
}))
diff --git a/extensions-builtin/Lora/ui_extra_networks_lora.py b/extensions-builtin/Lora/ui_extra_networks_lora.py
index 54a80d36..68b11332 100644
--- a/extensions-builtin/Lora/ui_extra_networks_lora.py
+++ b/extensions-builtin/Lora/ui_extra_networks_lora.py
@@ -15,20 +15,15 @@ class ExtraNetworksPageLora(ui_extra_networks.ExtraNetworksPage):
def list_items(self):
for name, lora_on_disk in lora.available_loras.items():
path, ext = os.path.splitext(lora_on_disk.filename)
- previews = [path + ".png", path + ".preview.png"]
-
- preview = None
- for file in previews:
- if os.path.isfile(file):
- preview = "./file=" + file.replace('\\', '/') + "?mtime=" + str(os.path.getmtime(file))
- break
-
yield {
"name": name,
"filename": path,
- "preview": preview,
+ "preview": self.find_preview(path),
+ "description": self.find_description(path),
+ "search_term": self.search_terms_from_path(lora_on_disk.filename),
"prompt": json.dumps(f"<lora:{name}:") + " + opts.extra_networks_default_multiplier + " + json.dumps(">"),
- "local_preview": path + ".png",
+ "local_preview": f"{path}.{shared.opts.samples_format}",
+ "metadata": json.dumps(lora_on_disk.metadata, indent=4) if lora_on_disk.metadata else None,
}
def allowed_directories_for_previews(self):
diff --git a/extensions-builtin/prompt-bracket-checker/javascript/prompt-bracket-checker.js b/extensions-builtin/prompt-bracket-checker/javascript/prompt-bracket-checker.js
index 4a85c8eb..f0918e26 100644
--- a/extensions-builtin/prompt-bracket-checker/javascript/prompt-bracket-checker.js
+++ b/extensions-builtin/prompt-bracket-checker/javascript/prompt-bracket-checker.js
@@ -89,22 +89,15 @@ function checkBrackets(evt, textArea, counterElt) {
function setupBracketChecking(id_prompt, id_counter){
var textarea = gradioApp().querySelector("#" + id_prompt + " > label > textarea");
var counter = gradioApp().getElementById(id_counter)
+
textarea.addEventListener("input", function(evt){
checkBrackets(evt, textarea, counter)
});
}
-var shadowRootLoaded = setInterval(function() {
- var shadowRoot = document.querySelector('gradio-app').shadowRoot;
- if(! shadowRoot) return false;
-
- var shadowTextArea = shadowRoot.querySelectorAll('#txt2img_prompt > label > textarea');
- if(shadowTextArea.length < 1) return false;
-
- clearInterval(shadowRootLoaded);
-
+onUiLoaded(function(){
setupBracketChecking('txt2img_prompt', 'txt2img_token_counter')
setupBracketChecking('txt2img_neg_prompt', 'txt2img_negative_token_counter')
- setupBracketChecking('img2img_prompt', 'imgimg_token_counter')
+ setupBracketChecking('img2img_prompt', 'img2img_token_counter')
setupBracketChecking('img2img_neg_prompt', 'img2img_negative_token_counter')
-}, 1000);
+}) \ No newline at end of file