From f4b332f0419e09cec6983edcd07aae2ee0c14c24 Mon Sep 17 00:00:00 2001 From: missionfloyd Date: Tue, 18 Apr 2023 17:01:46 -0600 Subject: Add "None" option to extra networks dropdowns --- extensions-builtin/Lora/extra_networks_lora.py | 2 +- extensions-builtin/Lora/scripts/lora_script.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) (limited to 'extensions-builtin/Lora') diff --git a/extensions-builtin/Lora/extra_networks_lora.py b/extensions-builtin/Lora/extra_networks_lora.py index 6be6ef73..45f899fc 100644 --- a/extensions-builtin/Lora/extra_networks_lora.py +++ b/extensions-builtin/Lora/extra_networks_lora.py @@ -8,7 +8,7 @@ class ExtraNetworkLora(extra_networks.ExtraNetwork): def activate(self, p, params_list): additional = shared.opts.sd_lora - if additional != "" and additional in lora.available_loras and len([x for x in params_list if x.items[0] == additional]) == 0: + if additional != "None" and additional in lora.available_loras and len([x for x in params_list if x.items[0] == additional]) == 0: p.all_prompts = [x + f"" for x in p.all_prompts] params_list.append(extra_networks.ExtraNetworkParams(items=[additional, shared.opts.extra_networks_default_multiplier])) diff --git a/extensions-builtin/Lora/scripts/lora_script.py b/extensions-builtin/Lora/scripts/lora_script.py index 0adab225..3fc38ab9 100644 --- a/extensions-builtin/Lora/scripts/lora_script.py +++ b/extensions-builtin/Lora/scripts/lora_script.py @@ -52,5 +52,5 @@ script_callbacks.on_before_ui(before_ui) shared.options_templates.update(shared.options_section(('extra_networks', "Extra Networks"), { - "sd_lora": shared.OptionInfo("None", "Add Lora to prompt", gr.Dropdown, lambda: {"choices": [""] + [x for x in lora.available_loras]}, refresh=lora.list_available_loras), + "sd_lora": shared.OptionInfo("None", "Add Lora to prompt", gr.Dropdown, lambda: {"choices": ["None"] + [x for x in lora.available_loras]}, refresh=lora.list_available_loras), })) -- cgit v1.2.3 From f9253cee660a0c14b8e9e429158ef84fba3968a2 Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Sat, 29 Apr 2023 20:10:10 +0300 Subject: do not fail all Loras if some have failed to load when making a picture --- extensions-builtin/Lora/lora.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) (limited to 'extensions-builtin/Lora') diff --git a/extensions-builtin/Lora/lora.py b/extensions-builtin/Lora/lora.py index d3eb0d3b..6f246921 100644 --- a/extensions-builtin/Lora/lora.py +++ b/extensions-builtin/Lora/lora.py @@ -211,7 +211,11 @@ def load_loras(names, multipliers=None): lora_on_disk = loras_on_disk[i] if lora_on_disk is not None: if lora is None or os.path.getmtime(lora_on_disk.filename) > lora.mtime: - lora = load_lora(name, lora_on_disk.filename) + try: + lora = load_lora(name, lora_on_disk.filename) + except Exception as e: + errors.display(e, f"loading Lora {lora_on_disk.filename}") + continue if lora is None: print(f"Couldn't find Lora with name {name}") -- cgit v1.2.3 From c3eced22fc7b9da4fbb2f55f2d53a7e5e511cfbd Mon Sep 17 00:00:00 2001 From: Leo Mozoloa Date: Thu, 4 May 2023 16:14:33 +0200 Subject: Fix some Lora's not working --- extensions-builtin/Lora/lora.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) (limited to 'extensions-builtin/Lora') diff --git a/extensions-builtin/Lora/lora.py b/extensions-builtin/Lora/lora.py index 6f246921..bcf36d77 100644 --- a/extensions-builtin/Lora/lora.py +++ b/extensions-builtin/Lora/lora.py @@ -165,8 +165,10 @@ def load_lora(name, filename): module = torch.nn.Linear(weight.shape[1], weight.shape[0], bias=False) elif type(sd_module) == torch.nn.MultiheadAttention: module = torch.nn.Linear(weight.shape[1], weight.shape[0], bias=False) - elif type(sd_module) == torch.nn.Conv2d: + elif type(sd_module) == torch.nn.Conv2d and weight.shape[2:] == (1, 1): module = torch.nn.Conv2d(weight.shape[1], weight.shape[0], (1, 1), bias=False) + elif type(sd_module) == torch.nn.Conv2d and weight.shape[2:] == (3, 3): + module = torch.nn.Conv2d(weight.shape[1], weight.shape[0], (3, 3), bias=False) else: print(f'Lora layer {key_diffusers} matched a layer with unsupported type: {type(sd_module).__name__}') continue @@ -232,6 +234,8 @@ def lora_calc_updown(lora, module, target): if up.shape[2:] == (1, 1) and down.shape[2:] == (1, 1): updown = (up.squeeze(2).squeeze(2) @ down.squeeze(2).squeeze(2)).unsqueeze(2).unsqueeze(3) + elif up.shape[2:] == (3, 3) or down.shape[2:] == (3, 3): + updown = torch.nn.functional.conv2d(down.permute(1, 0, 2, 3), up).permute(1, 0, 2, 3) else: updown = up @ down -- cgit v1.2.3 From 2cb3b0be1def43e0d225b45a640592a7999a0d69 Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Sun, 7 May 2023 08:25:34 +0300 Subject: if present, use Lora's "ss_output_name" field to refer to it in prompt --- extensions-builtin/Lora/extra_networks_lora.py | 1 + extensions-builtin/Lora/lora.py | 13 ++++++++++--- extensions-builtin/Lora/ui_extra_networks_lora.py | 2 +- 3 files changed, 12 insertions(+), 4 deletions(-) (limited to 'extensions-builtin/Lora') diff --git a/extensions-builtin/Lora/extra_networks_lora.py b/extensions-builtin/Lora/extra_networks_lora.py index 45f899fc..ccb249ac 100644 --- a/extensions-builtin/Lora/extra_networks_lora.py +++ b/extensions-builtin/Lora/extra_networks_lora.py @@ -1,6 +1,7 @@ from modules import extra_networks, shared import lora + class ExtraNetworkLora(extra_networks.ExtraNetwork): def __init__(self): super().__init__('lora') diff --git a/extensions-builtin/Lora/lora.py b/extensions-builtin/Lora/lora.py index 6f246921..e3ca7fa2 100644 --- a/extensions-builtin/Lora/lora.py +++ b/extensions-builtin/Lora/lora.py @@ -93,6 +93,7 @@ class LoraOnDisk: self.metadata = m self.ssmd_cover_images = self.metadata.pop('ssmd_cover_images', None) # those are cover images and they are too big to display in UI as text + self.alias = self.metadata.get('ss_output_name', self.name) class LoraModule: @@ -199,11 +200,11 @@ def load_loras(names, multipliers=None): loaded_loras.clear() - loras_on_disk = [available_loras.get(name, None) for name in names] + loras_on_disk = [available_lora_aliases.get(name, None) for name in names] if any([x is None for x in loras_on_disk]): list_available_loras() - loras_on_disk = [available_loras.get(name, None) for name in names] + loras_on_disk = [available_lora_aliases.get(name, None) for name in names] for i, name in enumerate(names): lora = already_loaded.get(name, None) @@ -343,6 +344,7 @@ def lora_MultiheadAttention_load_state_dict(self, *args, **kwargs): def list_available_loras(): available_loras.clear() + available_lora_aliases.clear() os.makedirs(shared.cmd_opts.lora_dir, exist_ok=True) @@ -356,11 +358,16 @@ def list_available_loras(): continue name = os.path.splitext(os.path.basename(filename))[0] + entry = LoraOnDisk(name, filename) - available_loras[name] = LoraOnDisk(name, filename) + available_loras[name] = entry + + available_lora_aliases[name] = entry + available_lora_aliases[entry.alias] = entry available_loras = {} +available_lora_aliases = {} loaded_loras = [] list_available_loras() diff --git a/extensions-builtin/Lora/ui_extra_networks_lora.py b/extensions-builtin/Lora/ui_extra_networks_lora.py index 68b11332..a0edbc1e 100644 --- a/extensions-builtin/Lora/ui_extra_networks_lora.py +++ b/extensions-builtin/Lora/ui_extra_networks_lora.py @@ -21,7 +21,7 @@ class ExtraNetworksPageLora(ui_extra_networks.ExtraNetworksPage): "preview": self.find_preview(path), "description": self.find_description(path), "search_term": self.search_terms_from_path(lora_on_disk.filename), - "prompt": json.dumps(f""), + "prompt": json.dumps(f""), "local_preview": f"{path}.{shared.opts.samples_format}", "metadata": json.dumps(lora_on_disk.metadata, indent=4) if lora_on_disk.metadata else None, } -- cgit v1.2.3 From 2473bafa67b2dd0077f752bf23e4bf8f89990a8c Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Mon, 8 May 2023 07:28:30 +0300 Subject: read infotext params from the other extension for Lora if it's not active --- extensions-builtin/Lora/lora.py | 36 +++++++++++++++++++++++++- extensions-builtin/Lora/scripts/lora_script.py | 1 + 2 files changed, 36 insertions(+), 1 deletion(-) (limited to 'extensions-builtin/Lora') diff --git a/extensions-builtin/Lora/lora.py b/extensions-builtin/Lora/lora.py index e3ca7fa2..94ec021b 100644 --- a/extensions-builtin/Lora/lora.py +++ b/extensions-builtin/Lora/lora.py @@ -4,7 +4,7 @@ import re import torch from typing import Union -from modules import shared, devices, sd_models, errors +from modules import shared, devices, sd_models, errors, scripts metadata_tags_order = {"ss_sd_model_name": 1, "ss_resolution": 2, "ss_clip_skip": 3, "ss_num_train_images": 10, "ss_tag_frequency": 20} @@ -366,6 +366,40 @@ def list_available_loras(): available_lora_aliases[entry.alias] = entry +re_lora_name = re.compile(r"(.*)\s*\([0-9a-fA-F]+\)") + + +def infotext_pasted(infotext, params): + if "AddNet Module 1" in [x[1] for x in scripts.scripts_txt2img.infotext_fields]: + return # if the other extension is active, it will handle those fields, no need to do anything + + added = [] + + for k, v in params.items(): + if not k.startswith("AddNet Model "): + continue + + num = k[13:] + + if params.get("AddNet Module " + num) != "LoRA": + continue + + name = params.get("AddNet Model " + num) + if name is None: + continue + + m = re_lora_name.match(name) + if m: + name = m.group(1) + + multiplier = params.get("AddNet Weight A " + num, "1.0") + + added.append(f"") + + if added: + params["Prompt"] += "\n" + "".join(added) + + available_loras = {} available_lora_aliases = {} loaded_loras = [] diff --git a/extensions-builtin/Lora/scripts/lora_script.py b/extensions-builtin/Lora/scripts/lora_script.py index 3fc38ab9..2f2267a2 100644 --- a/extensions-builtin/Lora/scripts/lora_script.py +++ b/extensions-builtin/Lora/scripts/lora_script.py @@ -49,6 +49,7 @@ torch.nn.MultiheadAttention._load_from_state_dict = lora.lora_MultiheadAttention script_callbacks.on_model_loaded(lora.assign_lora_names_to_compvis_modules) script_callbacks.on_script_unloaded(unload) script_callbacks.on_before_ui(before_ui) +script_callbacks.on_infotext_pasted(lora.infotext_pasted) shared.options_templates.update(shared.options_section(('extra_networks', "Extra Networks"), { -- cgit v1.2.3