From 904121fecc0a1f11db76a73ca8649fb21e05ac5b Mon Sep 17 00:00:00 2001 From: Nandaka Date: Thu, 24 Nov 2022 02:39:09 +0000 Subject: Support NAI exif for PNG Info --- modules/extras.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) (limited to 'modules/extras.py') diff --git a/modules/extras.py b/modules/extras.py index 71b93a06..af4cd97d 100644 --- a/modules/extras.py +++ b/modules/extras.py @@ -233,6 +233,20 @@ def run_pnginfo(image): geninfo = items.get('parameters', geninfo) + # nai prompt + if "Software" in items.keys() and items["Software"] == "NovelAI": + import json + json_info = json.loads(items["Comment"]) + geninfo = f'{items["Description"]}\r\nNegative prompt: {json_info["uc"]}\r\n' + sampler = "Euler a" + if json_info["sampler"] == "k_euler_ancestral": + sampler = "Euler a" + elif json_info["sampler"] == "k_euler": + sampler = "Euler" + model_hash = '925997e9' # assuming this is the correct model hash + # not sure with noise and strength parameter + geninfo += f'Steps: {json_info["steps"]}, Sampler: {sampler}, CFG scale: {json_info["scale"]}, Seed: {json_info["seed"]}, Size: {image.width}x{image.height}, Model hash: {model_hash}' # , Denoising strength: {json_info["noise"]}' + info = '' for key, text in items.items(): info += f""" -- cgit v1.2.3 From dac9b6f15de5e675053d9490a20e0457dcd1a23e Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Sun, 27 Nov 2022 15:51:29 +0300 Subject: add safetensors support for model merging #4869 --- modules/extras.py | 26 ++++++++++++++------------ modules/sd_models.py | 26 +++++++++++++++----------- modules/ui.py | 7 ++++++- 3 files changed, 35 insertions(+), 24 deletions(-) (limited to 'modules/extras.py') diff --git a/modules/extras.py b/modules/extras.py index 71b93a06..3d65d90a 100644 --- a/modules/extras.py +++ b/modules/extras.py @@ -20,6 +20,7 @@ import modules.codeformer_model import piexif import piexif.helper import gradio as gr +import safetensors.torch class LruCache(OrderedDict): @@ -249,7 +250,7 @@ def run_pnginfo(image): return '', geninfo, info -def run_modelmerger(primary_model_name, secondary_model_name, teritary_model_name, interp_method, multiplier, save_as_half, custom_name): +def run_modelmerger(primary_model_name, secondary_model_name, teritary_model_name, interp_method, multiplier, save_as_half, custom_name, checkpoint_format): def weighted_sum(theta0, theta1, alpha): return ((1 - alpha) * theta0) + (alpha * theta1) @@ -264,19 +265,15 @@ def run_modelmerger(primary_model_name, secondary_model_name, teritary_model_nam teritary_model_info = sd_models.checkpoints_list.get(teritary_model_name, None) print(f"Loading {primary_model_info.filename}...") - primary_model = torch.load(primary_model_info.filename, map_location='cpu') - theta_0 = sd_models.get_state_dict_from_checkpoint(primary_model) + theta_0 = sd_models.read_state_dict(primary_model_info.filename, map_location='cpu') print(f"Loading {secondary_model_info.filename}...") - secondary_model = torch.load(secondary_model_info.filename, map_location='cpu') - theta_1 = sd_models.get_state_dict_from_checkpoint(secondary_model) + theta_1 = sd_models.read_state_dict(secondary_model_info.filename, map_location='cpu') if teritary_model_info is not None: print(f"Loading {teritary_model_info.filename}...") - teritary_model = torch.load(teritary_model_info.filename, map_location='cpu') - theta_2 = sd_models.get_state_dict_from_checkpoint(teritary_model) + theta_2 = sd_models.read_state_dict(teritary_model_info.filename, map_location='cpu') else: - teritary_model = None theta_2 = None theta_funcs = { @@ -295,7 +292,7 @@ def run_modelmerger(primary_model_name, secondary_model_name, teritary_model_nam theta_1[key] = theta_func1(theta_1[key], t2) else: theta_1[key] = torch.zeros_like(theta_1[key]) - del theta_2, teritary_model + del theta_2 for key in tqdm.tqdm(theta_0.keys()): if 'model' in key and key in theta_1: @@ -314,12 +311,17 @@ def run_modelmerger(primary_model_name, secondary_model_name, teritary_model_nam ckpt_dir = shared.cmd_opts.ckpt_dir or sd_models.model_path - filename = primary_model_info.model_name + '_' + str(round(1-multiplier, 2)) + '-' + secondary_model_info.model_name + '_' + str(round(multiplier, 2)) + '-' + interp_method.replace(" ", "_") + '-merged.ckpt' - filename = filename if custom_name == '' else (custom_name + '.ckpt') + filename = primary_model_info.model_name + '_' + str(round(1-multiplier, 2)) + '-' + secondary_model_info.model_name + '_' + str(round(multiplier, 2)) + '-' + interp_method.replace(" ", "_") + '-merged.' + checkpoint_format + filename = filename if custom_name == '' else (custom_name + '.' + checkpoint_format) output_modelname = os.path.join(ckpt_dir, filename) print(f"Saving to {output_modelname}...") - torch.save(primary_model, output_modelname) + + _, extension = os.path.splitext(output_modelname) + if extension.lower() == ".safetensors": + safetensors.torch.save_file(theta_0, output_modelname, metadata={"format": "pt"}) + else: + torch.save(theta_0, output_modelname) sd_models.list_models() diff --git a/modules/sd_models.py b/modules/sd_models.py index 77236480..a1ea5611 100644 --- a/modules/sd_models.py +++ b/modules/sd_models.py @@ -160,6 +160,20 @@ def get_state_dict_from_checkpoint(pl_sd): return pl_sd +def read_state_dict(checkpoint_file, print_global_state=False, map_location=None): + _, extension = os.path.splitext(checkpoint_file) + if extension.lower() == ".safetensors": + pl_sd = safetensors.torch.load_file(checkpoint_file, device=map_location or shared.weight_load_location) + else: + pl_sd = torch.load(checkpoint_file, map_location=map_location or shared.weight_load_location) + + if print_global_state and "global_step" in pl_sd: + print(f"Global Step: {pl_sd['global_step']}") + + sd = get_state_dict_from_checkpoint(pl_sd) + return sd + + def load_model_weights(model, checkpoint_info, vae_file="auto"): checkpoint_file = checkpoint_info.filename sd_model_hash = checkpoint_info.hash @@ -174,17 +188,7 @@ def load_model_weights(model, checkpoint_info, vae_file="auto"): # load from file print(f"Loading weights [{sd_model_hash}] from {checkpoint_file}") - _, extension = os.path.splitext(checkpoint_file) - if extension.lower() == ".safetensors": - pl_sd = safetensors.torch.load_file(checkpoint_file, device=shared.weight_load_location) - else: - pl_sd = torch.load(checkpoint_file, map_location=shared.weight_load_location) - - if "global_step" in pl_sd: - print(f"Global Step: {pl_sd['global_step']}") - - sd = get_state_dict_from_checkpoint(pl_sd) - del pl_sd + sd = read_state_dict(checkpoint_file) model.load_state_dict(sd, strict=False) del sd diff --git a/modules/ui.py b/modules/ui.py index de2b5544..aa13978d 100644 --- a/modules/ui.py +++ b/modules/ui.py @@ -1164,7 +1164,11 @@ def create_ui(wrap_gradio_gpu_call): custom_name = gr.Textbox(label="Custom Name (Optional)") interp_amount = gr.Slider(minimum=0.0, maximum=1.0, step=0.05, label='Multiplier (M) - set to 0 to get model A', value=0.3) interp_method = gr.Radio(choices=["Weighted sum", "Add difference"], value="Weighted sum", label="Interpolation Method") - save_as_half = gr.Checkbox(value=False, label="Save as float16") + + with gr.Row(): + checkpoint_format = gr.Radio(choices=["ckpt", "safetensors"], value="ckpt", label="Checkpoint format") + save_as_half = gr.Checkbox(value=False, label="Save as float16") + modelmerger_merge = gr.Button(elem_id="modelmerger_merge", label="Merge", variant='primary') with gr.Column(variant='panel'): @@ -1692,6 +1696,7 @@ def create_ui(wrap_gradio_gpu_call): interp_amount, save_as_half, custom_name, + checkpoint_format, ], outputs=[ submit_result, -- cgit v1.2.3 From 506d529d19f135f57e142371271f84d4971b456f Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Sun, 27 Nov 2022 16:28:32 +0300 Subject: rework #5012 to also work for pictures dragged into the prompt and also add Clip skip + ENSD to parameters --- modules/extras.py | 40 ++++-------------------------- modules/generation_parameters_copypaste.py | 1 + modules/images.py | 38 +++++++++++++++++++++++++++- modules/sd_samplers.py | 2 +- 4 files changed, 44 insertions(+), 37 deletions(-) (limited to 'modules/extras.py') diff --git a/modules/extras.py b/modules/extras.py index 0057bf9c..6021a024 100644 --- a/modules/extras.py +++ b/modules/extras.py @@ -1,6 +1,8 @@ from __future__ import annotations import math import os +import sys +import traceback import numpy as np from PIL import Image @@ -12,7 +14,7 @@ from typing import Callable, List, OrderedDict, Tuple from functools import partial from dataclasses import dataclass -from modules import processing, shared, images, devices, sd_models +from modules import processing, shared, images, devices, sd_models, sd_samplers from modules.shared import opts import modules.gfpgan_model from modules.ui import plaintext_to_html @@ -22,7 +24,6 @@ import piexif.helper import gradio as gr import safetensors.torch - class LruCache(OrderedDict): @dataclass(frozen=True) class Key: @@ -214,39 +215,8 @@ def run_pnginfo(image): if image is None: return '', '', '' - items = image.info - geninfo = '' - - if "exif" in image.info: - exif = piexif.load(image.info["exif"]) - exif_comment = (exif or {}).get("Exif", {}).get(piexif.ExifIFD.UserComment, b'') - try: - exif_comment = piexif.helper.UserComment.load(exif_comment) - except ValueError: - exif_comment = exif_comment.decode('utf8', errors="ignore") - - items['exif comment'] = exif_comment - geninfo = exif_comment - - for field in ['jfif', 'jfif_version', 'jfif_unit', 'jfif_density', 'dpi', 'exif', - 'loop', 'background', 'timestamp', 'duration']: - items.pop(field, None) - - geninfo = items.get('parameters', geninfo) - - # nai prompt - if "Software" in items.keys() and items["Software"] == "NovelAI": - import json - json_info = json.loads(items["Comment"]) - geninfo = f'{items["Description"]}\r\nNegative prompt: {json_info["uc"]}\r\n' - sampler = "Euler a" - if json_info["sampler"] == "k_euler_ancestral": - sampler = "Euler a" - elif json_info["sampler"] == "k_euler": - sampler = "Euler" - model_hash = '925997e9' # assuming this is the correct model hash - # not sure with noise and strength parameter - geninfo += f'Steps: {json_info["steps"]}, Sampler: {sampler}, CFG scale: {json_info["scale"]}, Seed: {json_info["seed"]}, Size: {image.width}x{image.height}, Model hash: {model_hash}' # , Denoising strength: {json_info["noise"]}' + geninfo, items = images.read_info_from_image(image) + items = {**{'parameters': geninfo}, **items} info = '' for key, text in items.items(): diff --git a/modules/generation_parameters_copypaste.py b/modules/generation_parameters_copypaste.py index 1408ea05..0973c695 100644 --- a/modules/generation_parameters_copypaste.py +++ b/modules/generation_parameters_copypaste.py @@ -75,6 +75,7 @@ def integrate_settings_paste_fields(component_dict): 'CLIP_stop_at_last_layers': 'Clip skip', 'inpainting_mask_weight': 'Conditional mask weight', 'sd_model_checkpoint': 'Model hash', + 'eta_noise_seed_delta': 'ENSD', } settings_paste_fields = [ (component_dict[k], lambda d, k=k, v=v: ui.apply_setting(k, d.get(v, None))) diff --git a/modules/images.py b/modules/images.py index b968d6a6..08a72e67 100644 --- a/modules/images.py +++ b/modules/images.py @@ -15,6 +15,7 @@ import piexif.helper from PIL import Image, ImageFont, ImageDraw, PngImagePlugin from fonts.ttf import Roboto import string +import json from modules import sd_samplers, shared, script_callbacks from modules.shared import opts, cmd_opts @@ -553,10 +554,45 @@ def save_image(image, path, basename, seed=None, prompt=None, extension='png', i return fullfn, txt_fullfn +def read_info_from_image(image): + items = image.info or {} + + geninfo = items.pop('parameters', None) + + if "exif" in items: + exif = piexif.load(items["exif"]) + exif_comment = (exif or {}).get("Exif", {}).get(piexif.ExifIFD.UserComment, b'') + try: + exif_comment = piexif.helper.UserComment.load(exif_comment) + except ValueError: + exif_comment = exif_comment.decode('utf8', errors="ignore") + + items['exif comment'] = exif_comment + geninfo = exif_comment + + for field in ['jfif', 'jfif_version', 'jfif_unit', 'jfif_density', 'dpi', 'exif', + 'loop', 'background', 'timestamp', 'duration']: + items.pop(field, None) + + if items.get("Software", None) == "NovelAI": + try: + json_info = json.loads(items["Comment"]) + sampler = sd_samplers.samplers_map.get(json_info["sampler"], "Euler a") + + geninfo = f"""{items["Description"]} +Negative prompt: {json_info["uc"]} +Steps: {json_info["steps"]}, Sampler: {sampler}, CFG scale: {json_info["scale"]}, Seed: {json_info["seed"]}, Size: {image.width}x{image.height}, Clip skip: 2, ENSD: 31337""" + except Exception: + print(f"Error parsing NovelAI iamge generation parameters:", file=sys.stderr) + print(traceback.format_exc(), file=sys.stderr) + + return geninfo, items + + def image_data(data): try: image = Image.open(io.BytesIO(data)) - textinfo = image.text["parameters"] + textinfo, _ = read_info_from_image(image) return textinfo, None except Exception: pass diff --git a/modules/sd_samplers.py b/modules/sd_samplers.py index 2ca17d8b..5fefb227 100644 --- a/modules/sd_samplers.py +++ b/modules/sd_samplers.py @@ -18,7 +18,7 @@ from modules.script_callbacks import CFGDenoiserParams, cfg_denoiser_callback SamplerData = namedtuple('SamplerData', ['name', 'constructor', 'aliases', 'options']) samplers_k_diffusion = [ - ('Euler a', 'sample_euler_ancestral', ['k_euler_a'], {}), + ('Euler a', 'sample_euler_ancestral', ['k_euler_a', 'k_euler_ancestral'], {}), ('Euler', 'sample_euler', ['k_euler'], {}), ('LMS', 'sample_lms', ['k_lms'], {}), ('Heun', 'sample_heun', ['k_heun'], {}), -- cgit v1.2.3 From 44c46f0ed395967cd3830dd481a2db759fda5b3b Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Sun, 4 Dec 2022 12:30:44 +0300 Subject: make it possible to merge inpainting model with non-inpainting one --- modules/extras.py | 27 +++++++++++++++++++++++++-- 1 file changed, 25 insertions(+), 2 deletions(-) (limited to 'modules/extras.py') diff --git a/modules/extras.py b/modules/extras.py index 6021a024..bc349d5e 100644 --- a/modules/extras.py +++ b/modules/extras.py @@ -247,6 +247,7 @@ def run_modelmerger(primary_model_name, secondary_model_name, teritary_model_nam primary_model_info = sd_models.checkpoints_list[primary_model_name] secondary_model_info = sd_models.checkpoints_list[secondary_model_name] teritary_model_info = sd_models.checkpoints_list.get(teritary_model_name, None) + result_is_inpainting_model = False print(f"Loading {primary_model_info.filename}...") theta_0 = sd_models.read_state_dict(primary_model_info.filename, map_location='cpu') @@ -280,8 +281,22 @@ def run_modelmerger(primary_model_name, secondary_model_name, teritary_model_nam for key in tqdm.tqdm(theta_0.keys()): if 'model' in key and key in theta_1: + a = theta_0[key] + b = theta_1[key] - theta_0[key] = theta_func2(theta_0[key], theta_1[key], multiplier) + # this enables merging an inpainting model (A) with another one (B); + # where normal model would have 4 channels, for latenst space, inpainting model would + # have another 4 channels for unmasked picture's latent space, plus one channel for mask, for a total of 9 + if a.shape != b.shape and a.shape[0:1] + a.shape[2:] == b.shape[0:1] + b.shape[2:]: + if a.shape[1] == 4 and b.shape[1] == 9: + raise RuntimeError("When merging inpainting model with a normal one, A must be the inpainting model.") + + assert a.shape[1] == 9 and b.shape[1] == 4, f"Bad dimensions for merged layer {key}: A={a.shape}, B={b.shape}" + + theta_0[key][:, 0:4, :, :] = theta_func2(a[:, 0:4, :, :], b, multiplier) + result_is_inpainting_model = True + else: + theta_0[key] = theta_func2(a, b, multiplier) if save_as_half: theta_0[key] = theta_0[key].half() @@ -295,8 +310,16 @@ def run_modelmerger(primary_model_name, secondary_model_name, teritary_model_nam ckpt_dir = shared.cmd_opts.ckpt_dir or sd_models.model_path - filename = primary_model_info.model_name + '_' + str(round(1-multiplier, 2)) + '-' + secondary_model_info.model_name + '_' + str(round(multiplier, 2)) + '-' + interp_method.replace(" ", "_") + '-merged.' + checkpoint_format + filename = \ + primary_model_info.model_name + '_' + str(round(1-multiplier, 2)) + '-' + \ + secondary_model_info.model_name + '_' + str(round(multiplier, 2)) + '-' + \ + interp_method.replace(" ", "_") + \ + '-merged.' + \ + ("inpainting." if result_is_inpainting_model else "") + \ + checkpoint_format + filename = filename if custom_name == '' else (custom_name + '.' + checkpoint_format) + output_modelname = os.path.join(ckpt_dir, filename) print(f"Saving to {output_modelname}...") -- cgit v1.2.3 From 681c450ecd8f0999cbaf562c5e734c7105320ad9 Mon Sep 17 00:00:00 2001 From: Mackerel Date: Sun, 4 Dec 2022 01:13:36 -0500 Subject: extras.py: use as little RAM as possible, misc fixes maximum of 2 models loaded at once. delete unneeded model before next step. fix 'teritary' -> 'tertiary'. gracefully fail when "add difference" is selected without a tertiary model --- modules/extras.py | 37 +++++++++++++++++++------------------ 1 file changed, 19 insertions(+), 18 deletions(-) (limited to 'modules/extras.py') diff --git a/modules/extras.py b/modules/extras.py index bc349d5e..0ad8deec 100644 --- a/modules/extras.py +++ b/modules/extras.py @@ -62,7 +62,7 @@ def run_extras(extras_mode, resize_mode, image, image_folder, input_dir, output_ # Also keep track of original file names imageNameArr = [] outputs = [] - + if extras_mode == 1: #convert file to pillow image for img in image_folder: @@ -234,7 +234,7 @@ def run_pnginfo(image): return '', geninfo, info -def run_modelmerger(primary_model_name, secondary_model_name, teritary_model_name, interp_method, multiplier, save_as_half, custom_name, checkpoint_format): +def run_modelmerger(primary_model_name, secondary_model_name, tertiary_model_name, interp_method, multiplier, save_as_half, custom_name, checkpoint_format): def weighted_sum(theta0, theta1, alpha): return ((1 - alpha) * theta0) + (alpha * theta1) @@ -246,30 +246,25 @@ def run_modelmerger(primary_model_name, secondary_model_name, teritary_model_nam primary_model_info = sd_models.checkpoints_list[primary_model_name] secondary_model_info = sd_models.checkpoints_list[secondary_model_name] - teritary_model_info = sd_models.checkpoints_list.get(teritary_model_name, None) + tertiary_model_info = sd_models.checkpoints_list.get(tertiary_model_name, None) result_is_inpainting_model = False - print(f"Loading {primary_model_info.filename}...") - theta_0 = sd_models.read_state_dict(primary_model_info.filename, map_location='cpu') - - print(f"Loading {secondary_model_info.filename}...") - theta_1 = sd_models.read_state_dict(secondary_model_info.filename, map_location='cpu') - - if teritary_model_info is not None: - print(f"Loading {teritary_model_info.filename}...") - theta_2 = sd_models.read_state_dict(teritary_model_info.filename, map_location='cpu') - else: - theta_2 = None - theta_funcs = { "Weighted sum": (None, weighted_sum), "Add difference": (get_difference, add_difference), } theta_func1, theta_func2 = theta_funcs[interp_method] - print(f"Merging...") + if theta_func1 and not tertiary_model_info: + return ["Failed: Interpolation method requires a tertiary model."] + [gr.Dropdown.update(choices=sd_models.checkpoint_tiles()) for _ in range(4)] + + print(f"Loading {secondary_model_info.filename}...") + theta_1 = sd_models.read_state_dict(secondary_model_info.filename, map_location='cpu') if theta_func1: + print(f"Loading {tertiary_model_info.filename}...") + theta_2 = sd_models.read_state_dict(tertiary_model_info.filename, map_location='cpu') + for key in tqdm.tqdm(theta_1.keys()): if 'model' in key: if key in theta_2: @@ -277,7 +272,12 @@ def run_modelmerger(primary_model_name, secondary_model_name, teritary_model_nam theta_1[key] = theta_func1(theta_1[key], t2) else: theta_1[key] = torch.zeros_like(theta_1[key]) - del theta_2 + del theta_2 + + print(f"Loading {primary_model_info.filename}...") + theta_0 = sd_models.read_state_dict(primary_model_info.filename, map_location='cpu') + + print("Merging...") for key in tqdm.tqdm(theta_0.keys()): if 'model' in key and key in theta_1: @@ -307,6 +307,7 @@ def run_modelmerger(primary_model_name, secondary_model_name, teritary_model_nam theta_0[key] = theta_1[key] if save_as_half: theta_0[key] = theta_0[key].half() + del theta_1 ckpt_dir = shared.cmd_opts.ckpt_dir or sd_models.model_path @@ -332,5 +333,5 @@ def run_modelmerger(primary_model_name, secondary_model_name, teritary_model_nam sd_models.list_models() - print(f"Checkpoint saved.") + print("Checkpoint saved.") return ["Checkpoint saved to " + output_modelname] + [gr.Dropdown.update(choices=sd_models.checkpoint_tiles()) for _ in range(4)] -- cgit v1.2.3