From 40a18d38a8fcb88d1c2947a2653b52cd2085536f Mon Sep 17 00:00:00 2001 From: lambertae Date: Tue, 18 Jul 2023 00:32:01 -0400 Subject: add restart sampler --- modules/sd_samplers_kdiffusion.py | 70 +++++++++++++++++++++++++++++++++++++-- 1 file changed, 68 insertions(+), 2 deletions(-) (limited to 'modules/sd_samplers_kdiffusion.py') diff --git a/modules/sd_samplers_kdiffusion.py b/modules/sd_samplers_kdiffusion.py index 71581b76..c63b677c 100644 --- a/modules/sd_samplers_kdiffusion.py +++ b/modules/sd_samplers_kdiffusion.py @@ -1,3 +1,5 @@ +# export PIP_CACHE_DIR=/scratch/dengm/cache +# export XDG_CACHE_HOME=/scratch/dengm/cache from collections import deque import torch import inspect @@ -30,12 +32,76 @@ samplers_k_diffusion = [ ('DPM++ 2M Karras', 'sample_dpmpp_2m', ['k_dpmpp_2m_ka'], {'scheduler': 'karras'}), ('DPM++ SDE Karras', 'sample_dpmpp_sde', ['k_dpmpp_sde_ka'], {'scheduler': 'karras', "second_order": True, "brownian_noise": True}), ('DPM++ 2M SDE Karras', 'sample_dpmpp_2m_sde', ['k_dpmpp_2m_sde_ka'], {'scheduler': 'karras', "brownian_noise": True}), + ('Restart (new)', 'restart_sampler', ['restart'], {'scheduler': 'karras', "second_order": True}), ] + +@torch.no_grad() +def restart_sampler(model, x, sigmas, extra_args=None, callback=None, disable=None, s_noise=1., restart_list = {0.1: [10, 2, 2]}): + """Implements restart sampling in Restart Sampling for Improving Generative Processes (2023)""" + '''Restart_list format: {min_sigma: [ restart_steps, restart_times, max_sigma]}''' + + from tqdm.auto import trange, tqdm + extra_args = {} if extra_args is None else extra_args + s_in = x.new_ones([x.shape[0]]) + step_id = 0 + + from k_diffusion.sampling import to_d, append_zero + + def heun_step(x, old_sigma, new_sigma): + nonlocal step_id + denoised = model(x, old_sigma * s_in, **extra_args) + d = to_d(x, old_sigma, denoised) + if callback is not None: + callback({'x': x, 'i': step_id, 'sigma': new_sigma, 'sigma_hat': old_sigma, 'denoised': denoised}) + dt = new_sigma - old_sigma + if new_sigma == 0: + # Euler method + x = x + d * dt + else: + # Heun's method + x_2 = x + d * dt + denoised_2 = model(x_2, new_sigma * s_in, **extra_args) + d_2 = to_d(x_2, new_sigma, denoised_2) + d_prime = (d + d_2) / 2 + x = x + d_prime * dt + step_id += 1 + return x + # print(sigmas) + temp_list = dict() + for key, value in restart_list.items(): + temp_list[int(torch.argmin(abs(sigmas - key), dim=0))] = value + restart_list = temp_list + + + def get_sigmas_karras(n, sigma_min, sigma_max, rho=7., device='cpu'): + ramp = torch.linspace(0, 1, n).to(device) + min_inv_rho = (sigma_min ** (1 / rho)) + max_inv_rho = (sigma_max ** (1 / rho)) + if isinstance(min_inv_rho, torch.Tensor): + min_inv_rho = min_inv_rho.to(device) + if isinstance(max_inv_rho, torch.Tensor): + max_inv_rho = max_inv_rho.to(device) + sigmas = (max_inv_rho + ramp * (min_inv_rho - max_inv_rho)) ** rho + return append_zero(sigmas).to(device) + + for i in trange(len(sigmas) - 1, disable=disable): + x = heun_step(x, sigmas[i], sigmas[i+1]) + if i + 1 in restart_list: + restart_steps, restart_times, restart_max = restart_list[i + 1] + min_idx = i + 1 + max_idx = int(torch.argmin(abs(sigmas - restart_max), dim=0)) + sigma_restart = get_sigmas_karras(restart_steps, sigmas[min_idx], sigmas[max_idx], device=sigmas.device)[:-1] # remove the zero at the end + for times in range(restart_times): + x = x + torch.randn_like(x) * s_noise * (sigmas[max_idx] ** 2 - sigmas[min_idx] ** 2) ** 0.5 + for (old_sigma, new_sigma) in zip(sigma_restart[:-1], sigma_restart[1:]): + x = heun_step(x, old_sigma, new_sigma) + return x + samplers_data_k_diffusion = [ sd_samplers_common.SamplerData(label, lambda model, funcname=funcname: KDiffusionSampler(funcname, model), aliases, options) for label, funcname, aliases, options in samplers_k_diffusion - if hasattr(k_diffusion.sampling, funcname) + if (hasattr(k_diffusion.sampling, funcname) or funcname == 'restart_sampler') ] sampler_extra_params = { @@ -245,7 +311,7 @@ class KDiffusionSampler: self.model_wrap = denoiser(sd_model, quantize=shared.opts.enable_quantization) self.funcname = funcname - self.func = getattr(k_diffusion.sampling, self.funcname) + self.func = getattr(k_diffusion.sampling, self.funcname) if funcname != "restart_sampler" else restart_sampler self.extra_params = sampler_extra_params.get(funcname, []) self.model_wrap_cfg = CFGDenoiser(self.model_wrap) self.sampler_noises = None -- cgit v1.2.3 From 15a94d6cf7fa075c09362e73c1239692d021c559 Mon Sep 17 00:00:00 2001 From: lambertae Date: Tue, 18 Jul 2023 00:39:26 -0400 Subject: remove useless header --- modules/sd_samplers_kdiffusion.py | 2 -- 1 file changed, 2 deletions(-) (limited to 'modules/sd_samplers_kdiffusion.py') diff --git a/modules/sd_samplers_kdiffusion.py b/modules/sd_samplers_kdiffusion.py index c63b677c..7888d864 100644 --- a/modules/sd_samplers_kdiffusion.py +++ b/modules/sd_samplers_kdiffusion.py @@ -1,5 +1,3 @@ -# export PIP_CACHE_DIR=/scratch/dengm/cache -# export XDG_CACHE_HOME=/scratch/dengm/cache from collections import deque import torch import inspect -- cgit v1.2.3 From 37e048a7e2356f4caebfd976351112f03856f082 Mon Sep 17 00:00:00 2001 From: lambertae Date: Tue, 18 Jul 2023 00:55:02 -0400 Subject: fix floating error --- modules/sd_samplers_kdiffusion.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) (limited to 'modules/sd_samplers_kdiffusion.py') diff --git a/modules/sd_samplers_kdiffusion.py b/modules/sd_samplers_kdiffusion.py index 7888d864..1bb25adf 100644 --- a/modules/sd_samplers_kdiffusion.py +++ b/modules/sd_samplers_kdiffusion.py @@ -89,11 +89,12 @@ def restart_sampler(model, x, sigmas, extra_args=None, callback=None, disable=No restart_steps, restart_times, restart_max = restart_list[i + 1] min_idx = i + 1 max_idx = int(torch.argmin(abs(sigmas - restart_max), dim=0)) - sigma_restart = get_sigmas_karras(restart_steps, sigmas[min_idx], sigmas[max_idx], device=sigmas.device)[:-1] # remove the zero at the end - for times in range(restart_times): - x = x + torch.randn_like(x) * s_noise * (sigmas[max_idx] ** 2 - sigmas[min_idx] ** 2) ** 0.5 - for (old_sigma, new_sigma) in zip(sigma_restart[:-1], sigma_restart[1:]): - x = heun_step(x, old_sigma, new_sigma) + if max_idx < min_idx: + sigma_restart = get_sigmas_karras(restart_steps, sigmas[min_idx], sigmas[max_idx], device=sigmas.device)[:-1] # remove the zero at the end + for times in range(restart_times): + x = x + torch.randn_like(x) * s_noise * (sigmas[max_idx] ** 2 - sigmas[min_idx] ** 2) ** 0.5 + for (old_sigma, new_sigma) in zip(sigma_restart[:-1], sigma_restart[1:]): + x = heun_step(x, old_sigma, new_sigma) return x samplers_data_k_diffusion = [ -- cgit v1.2.3 From 7bb0fbed136c6a345b211e09102659fd89362576 Mon Sep 17 00:00:00 2001 From: lambertae Date: Tue, 18 Jul 2023 01:02:04 -0400 Subject: code styling --- modules/sd_samplers_kdiffusion.py | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) (limited to 'modules/sd_samplers_kdiffusion.py') diff --git a/modules/sd_samplers_kdiffusion.py b/modules/sd_samplers_kdiffusion.py index 1bb25adf..db7013f2 100644 --- a/modules/sd_samplers_kdiffusion.py +++ b/modules/sd_samplers_kdiffusion.py @@ -35,17 +35,15 @@ samplers_k_diffusion = [ @torch.no_grad() -def restart_sampler(model, x, sigmas, extra_args=None, callback=None, disable=None, s_noise=1., restart_list = {0.1: [10, 2, 2]}): +def restart_sampler(model, x, sigmas, extra_args=None, callback=None, disable=None, s_noise=1.): """Implements restart sampling in Restart Sampling for Improving Generative Processes (2023)""" '''Restart_list format: {min_sigma: [ restart_steps, restart_times, max_sigma]}''' - - from tqdm.auto import trange, tqdm + restart_list = {0.1: [10, 2, 2]} + from tqdm.auto import trange extra_args = {} if extra_args is None else extra_args s_in = x.new_ones([x.shape[0]]) step_id = 0 - from k_diffusion.sampling import to_d, append_zero - def heun_step(x, old_sigma, new_sigma): nonlocal step_id denoised = model(x, old_sigma * s_in, **extra_args) @@ -70,8 +68,6 @@ def restart_sampler(model, x, sigmas, extra_args=None, callback=None, disable=No for key, value in restart_list.items(): temp_list[int(torch.argmin(abs(sigmas - key), dim=0))] = value restart_list = temp_list - - def get_sigmas_karras(n, sigma_min, sigma_max, rho=7., device='cpu'): ramp = torch.linspace(0, 1, n).to(device) min_inv_rho = (sigma_min ** (1 / rho)) @@ -82,7 +78,6 @@ def restart_sampler(model, x, sigmas, extra_args=None, callback=None, disable=No max_inv_rho = max_inv_rho.to(device) sigmas = (max_inv_rho + ramp * (min_inv_rho - max_inv_rho)) ** rho return append_zero(sigmas).to(device) - for i in trange(len(sigmas) - 1, disable=disable): x = heun_step(x, sigmas[i], sigmas[i+1]) if i + 1 in restart_list: @@ -91,7 +86,8 @@ def restart_sampler(model, x, sigmas, extra_args=None, callback=None, disable=No max_idx = int(torch.argmin(abs(sigmas - restart_max), dim=0)) if max_idx < min_idx: sigma_restart = get_sigmas_karras(restart_steps, sigmas[min_idx], sigmas[max_idx], device=sigmas.device)[:-1] # remove the zero at the end - for times in range(restart_times): + while restart_times > 0: + restart_times -= 1 x = x + torch.randn_like(x) * s_noise * (sigmas[max_idx] ** 2 - sigmas[min_idx] ** 2) ** 0.5 for (old_sigma, new_sigma) in zip(sigma_restart[:-1], sigma_restart[1:]): x = heun_step(x, old_sigma, new_sigma) -- cgit v1.2.3 From ddbf4a73f5c0cfe63ca0988b8e642d3b977a3fa9 Mon Sep 17 00:00:00 2001 From: lambertae Date: Thu, 20 Jul 2023 02:24:18 -0400 Subject: restart-sampler with correct steps --- modules/sd_samplers_kdiffusion.py | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) (limited to 'modules/sd_samplers_kdiffusion.py') diff --git a/modules/sd_samplers_kdiffusion.py b/modules/sd_samplers_kdiffusion.py index db7013f2..ed5e6c79 100644 --- a/modules/sd_samplers_kdiffusion.py +++ b/modules/sd_samplers_kdiffusion.py @@ -38,20 +38,19 @@ samplers_k_diffusion = [ def restart_sampler(model, x, sigmas, extra_args=None, callback=None, disable=None, s_noise=1.): """Implements restart sampling in Restart Sampling for Improving Generative Processes (2023)""" '''Restart_list format: {min_sigma: [ restart_steps, restart_times, max_sigma]}''' - restart_list = {0.1: [10, 2, 2]} from tqdm.auto import trange extra_args = {} if extra_args is None else extra_args s_in = x.new_ones([x.shape[0]]) step_id = 0 from k_diffusion.sampling import to_d, append_zero - def heun_step(x, old_sigma, new_sigma): + def heun_step(x, old_sigma, new_sigma, second_order = True): nonlocal step_id denoised = model(x, old_sigma * s_in, **extra_args) d = to_d(x, old_sigma, denoised) if callback is not None: callback({'x': x, 'i': step_id, 'sigma': new_sigma, 'sigma_hat': old_sigma, 'denoised': denoised}) dt = new_sigma - old_sigma - if new_sigma == 0: + if new_sigma == 0 or not second_order: # Euler method x = x + d * dt else: @@ -63,11 +62,6 @@ def restart_sampler(model, x, sigmas, extra_args=None, callback=None, disable=No x = x + d_prime * dt step_id += 1 return x - # print(sigmas) - temp_list = dict() - for key, value in restart_list.items(): - temp_list[int(torch.argmin(abs(sigmas - key), dim=0))] = value - restart_list = temp_list def get_sigmas_karras(n, sigma_min, sigma_max, rho=7., device='cpu'): ramp = torch.linspace(0, 1, n).to(device) min_inv_rho = (sigma_min ** (1 / rho)) @@ -78,6 +72,18 @@ def restart_sampler(model, x, sigmas, extra_args=None, callback=None, disable=No max_inv_rho = max_inv_rho.to(device) sigmas = (max_inv_rho + ramp * (min_inv_rho - max_inv_rho)) ** rho return append_zero(sigmas).to(device) + steps = sigmas.shape[0] - 1 + if steps >= 20: + restart_steps = 9 + restart_times = 2 if steps >= 36 else 1 + sigmas = get_sigmas_karras(steps - restart_steps * restart_times, sigmas[-2], sigmas[0], device=sigmas.device) + restart_list = {0.1: [restart_steps + 1, restart_times, 2]} + else: + restart_list = dict() + temp_list = dict() + for key, value in restart_list.items(): + temp_list[int(torch.argmin(abs(sigmas - key), dim=0))] = value + restart_list = temp_list for i in trange(len(sigmas) - 1, disable=disable): x = heun_step(x, sigmas[i], sigmas[i+1]) if i + 1 in restart_list: -- cgit v1.2.3 From 2f57a559ac3381c1ef2516655c3a3d1088191c54 Mon Sep 17 00:00:00 2001 From: lambertae Date: Thu, 20 Jul 2023 20:34:41 -0400 Subject: allow choise of restart_list & use karras from kdiffusion --- modules/sd_samplers_kdiffusion.py | 32 ++++++++++++-------------------- 1 file changed, 12 insertions(+), 20 deletions(-) (limited to 'modules/sd_samplers_kdiffusion.py') diff --git a/modules/sd_samplers_kdiffusion.py b/modules/sd_samplers_kdiffusion.py index ed5e6c79..c72d01c8 100644 --- a/modules/sd_samplers_kdiffusion.py +++ b/modules/sd_samplers_kdiffusion.py @@ -35,14 +35,15 @@ samplers_k_diffusion = [ @torch.no_grad() -def restart_sampler(model, x, sigmas, extra_args=None, callback=None, disable=None, s_noise=1.): +def restart_sampler(model, x, sigmas, extra_args=None, callback=None, disable=None, s_noise=1., restart_list = None): """Implements restart sampling in Restart Sampling for Improving Generative Processes (2023)""" '''Restart_list format: {min_sigma: [ restart_steps, restart_times, max_sigma]}''' + '''If restart_list is None: will choose restart_list automatically, otherwise will use the given restart_list''' from tqdm.auto import trange extra_args = {} if extra_args is None else extra_args s_in = x.new_ones([x.shape[0]]) step_id = 0 - from k_diffusion.sampling import to_d, append_zero + from k_diffusion.sampling import to_d, append_zero, get_sigmas_karras def heun_step(x, old_sigma, new_sigma, second_order = True): nonlocal step_id denoised = model(x, old_sigma * s_in, **extra_args) @@ -62,24 +63,15 @@ def restart_sampler(model, x, sigmas, extra_args=None, callback=None, disable=No x = x + d_prime * dt step_id += 1 return x - def get_sigmas_karras(n, sigma_min, sigma_max, rho=7., device='cpu'): - ramp = torch.linspace(0, 1, n).to(device) - min_inv_rho = (sigma_min ** (1 / rho)) - max_inv_rho = (sigma_max ** (1 / rho)) - if isinstance(min_inv_rho, torch.Tensor): - min_inv_rho = min_inv_rho.to(device) - if isinstance(max_inv_rho, torch.Tensor): - max_inv_rho = max_inv_rho.to(device) - sigmas = (max_inv_rho + ramp * (min_inv_rho - max_inv_rho)) ** rho - return append_zero(sigmas).to(device) steps = sigmas.shape[0] - 1 - if steps >= 20: - restart_steps = 9 - restart_times = 2 if steps >= 36 else 1 - sigmas = get_sigmas_karras(steps - restart_steps * restart_times, sigmas[-2], sigmas[0], device=sigmas.device) - restart_list = {0.1: [restart_steps + 1, restart_times, 2]} - else: - restart_list = dict() + if restart_list is None: + if steps >= 20: + restart_steps = 9 + restart_times = 2 if steps >= 36 else 1 + sigmas = get_sigmas_karras(steps - restart_steps * restart_times, sigmas[-2].item(), sigmas[0].item(), device=sigmas.device) + restart_list = {0.1: [restart_steps + 1, restart_times, 2]} + else: + restart_list = dict() temp_list = dict() for key, value in restart_list.items(): temp_list[int(torch.argmin(abs(sigmas - key), dim=0))] = value @@ -91,7 +83,7 @@ def restart_sampler(model, x, sigmas, extra_args=None, callback=None, disable=No min_idx = i + 1 max_idx = int(torch.argmin(abs(sigmas - restart_max), dim=0)) if max_idx < min_idx: - sigma_restart = get_sigmas_karras(restart_steps, sigmas[min_idx], sigmas[max_idx], device=sigmas.device)[:-1] # remove the zero at the end + sigma_restart = get_sigmas_karras(restart_steps, sigmas[min_idx].item(), sigmas[max_idx].item(), device=sigmas.device)[:-1] # remove the zero at the end while restart_times > 0: restart_times -= 1 x = x + torch.randn_like(x) * s_noise * (sigmas[max_idx] ** 2 - sigmas[min_idx] ** 2) ** 0.5 -- cgit v1.2.3 From 128d59c9ccfbc9c7fccd6f1b2fe58bbbb18459f9 Mon Sep 17 00:00:00 2001 From: lambertae Date: Thu, 20 Jul 2023 20:36:40 -0400 Subject: fix ruff --- modules/sd_samplers_kdiffusion.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'modules/sd_samplers_kdiffusion.py') diff --git a/modules/sd_samplers_kdiffusion.py b/modules/sd_samplers_kdiffusion.py index c72d01c8..21b347ed 100644 --- a/modules/sd_samplers_kdiffusion.py +++ b/modules/sd_samplers_kdiffusion.py @@ -43,7 +43,7 @@ def restart_sampler(model, x, sigmas, extra_args=None, callback=None, disable=No extra_args = {} if extra_args is None else extra_args s_in = x.new_ones([x.shape[0]]) step_id = 0 - from k_diffusion.sampling import to_d, append_zero, get_sigmas_karras + from k_diffusion.sampling import to_d, get_sigmas_karras def heun_step(x, old_sigma, new_sigma, second_order = True): nonlocal step_id denoised = model(x, old_sigma * s_in, **extra_args) -- cgit v1.2.3 From f87389029839a27464a18846815339e81787b882 Mon Sep 17 00:00:00 2001 From: lambertae Date: Thu, 20 Jul 2023 21:27:43 -0400 Subject: new restart scheme --- modules/sd_samplers_kdiffusion.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) (limited to 'modules/sd_samplers_kdiffusion.py') diff --git a/modules/sd_samplers_kdiffusion.py b/modules/sd_samplers_kdiffusion.py index 21b347ed..ed60670c 100644 --- a/modules/sd_samplers_kdiffusion.py +++ b/modules/sd_samplers_kdiffusion.py @@ -67,7 +67,10 @@ def restart_sampler(model, x, sigmas, extra_args=None, callback=None, disable=No if restart_list is None: if steps >= 20: restart_steps = 9 - restart_times = 2 if steps >= 36 else 1 + restart_times = 1 + if steps >= 36: + restart_steps = steps // 4 + restart_times = 2 sigmas = get_sigmas_karras(steps - restart_steps * restart_times, sigmas[-2].item(), sigmas[0].item(), device=sigmas.device) restart_list = {0.1: [restart_steps + 1, restart_times, 2]} else: -- cgit v1.2.3 From 8de6d3ff77e841a5fd9d5f1b16bdd22737c8d657 Mon Sep 17 00:00:00 2001 From: lambertae Date: Tue, 25 Jul 2023 22:35:43 -0400 Subject: fix progress bar & torchHijack --- modules/sd_samplers_kdiffusion.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) (limited to 'modules/sd_samplers_kdiffusion.py') diff --git a/modules/sd_samplers_kdiffusion.py b/modules/sd_samplers_kdiffusion.py index ed60670c..7a2427b5 100644 --- a/modules/sd_samplers_kdiffusion.py +++ b/modules/sd_samplers_kdiffusion.py @@ -79,19 +79,26 @@ def restart_sampler(model, x, sigmas, extra_args=None, callback=None, disable=No for key, value in restart_list.items(): temp_list[int(torch.argmin(abs(sigmas - key), dim=0))] = value restart_list = temp_list - for i in trange(len(sigmas) - 1, disable=disable): - x = heun_step(x, sigmas[i], sigmas[i+1]) + step_list = [] + for i in range(len(sigmas) - 1): + step_list.append((sigmas[i], sigmas[i + 1])) if i + 1 in restart_list: restart_steps, restart_times, restart_max = restart_list[i + 1] min_idx = i + 1 max_idx = int(torch.argmin(abs(sigmas - restart_max), dim=0)) if max_idx < min_idx: - sigma_restart = get_sigmas_karras(restart_steps, sigmas[min_idx].item(), sigmas[max_idx].item(), device=sigmas.device)[:-1] # remove the zero at the end + sigma_restart = get_sigmas_karras(restart_steps, sigmas[min_idx].item(), sigmas[max_idx].item(), device=sigmas.device)[:-1] while restart_times > 0: restart_times -= 1 - x = x + torch.randn_like(x) * s_noise * (sigmas[max_idx] ** 2 - sigmas[min_idx] ** 2) ** 0.5 - for (old_sigma, new_sigma) in zip(sigma_restart[:-1], sigma_restart[1:]): - x = heun_step(x, old_sigma, new_sigma) + step_list.extend([(old_sigma, new_sigma) for (old_sigma, new_sigma) in zip(sigma_restart[:-1], sigma_restart[1:])]) + last_sigma = None + for i in trange(len(step_list), disable=disable): + if last_sigma is None: + last_sigma = step_list[i][0] + elif last_sigma < step_list[i][0]: + x = x + k_diffusion.sampling.torch.randn_like(x) * s_noise * (step_list[i][0] ** 2 - last_sigma ** 2) ** 0.5 + x = heun_step(x, step_list[i][0], step_list[i][1]) + last_sigma = step_list[i][1] return x samplers_data_k_diffusion = [ -- cgit v1.2.3