From 3366e494a1147e570d8527eea19da88edb3a1e0c Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Mon, 22 May 2023 00:13:53 +0300 Subject: option to pad prompt/neg prompt to be same length --- modules/sd_samplers_kdiffusion.py | 10 ++++++++++ 1 file changed, 10 insertions(+) (limited to 'modules/sd_samplers_kdiffusion.py') diff --git a/modules/sd_samplers_kdiffusion.py b/modules/sd_samplers_kdiffusion.py index 59982fc9..638e0ac9 100644 --- a/modules/sd_samplers_kdiffusion.py +++ b/modules/sd_samplers_kdiffusion.py @@ -125,6 +125,16 @@ class CFGDenoiser(torch.nn.Module): x_in = x_in[:-batch_size] sigma_in = sigma_in[:-batch_size] + # TODO add infotext entry + if shared.opts.pad_cond_uncond and tensor.shape[1] != uncond.shape[1]: + empty = shared.sd_model.cond_stage_model_empty_prompt + num_repeats = (tensor.shape[1] - uncond.shape[1]) // empty.shape[1] + + if num_repeats < 0: + tensor = torch.cat([tensor, empty.repeat((tensor.shape[0], -num_repeats, 1))], axis=1) + elif num_repeats > 0: + uncond = torch.cat([uncond, empty.repeat((uncond.shape[0], num_repeats, 1))], axis=1) + if tensor.shape[1] == uncond.shape[1] or skip_uncond: if is_edit_model: cond_in = torch.cat([tensor, uncond, uncond]) -- cgit v1.2.3 From a10487986925ca8fd07ee7ae7fc5034752298551 Mon Sep 17 00:00:00 2001 From: Kohaku-Blueleaf <59680068+KohakuBlueleaf@users.noreply.github.com> Date: Mon, 22 May 2023 21:52:46 +0800 Subject: Add custom karras scheduler --- modules/sd_samplers_kdiffusion.py | 6 ++++++ 1 file changed, 6 insertions(+) (limited to 'modules/sd_samplers_kdiffusion.py') diff --git a/modules/sd_samplers_kdiffusion.py b/modules/sd_samplers_kdiffusion.py index 638e0ac9..eb6c760c 100644 --- a/modules/sd_samplers_kdiffusion.py +++ b/modules/sd_samplers_kdiffusion.py @@ -304,6 +304,12 @@ class KDiffusionSampler: if p.sampler_noise_scheduler_override: sigmas = p.sampler_noise_scheduler_override(steps) + elif p.enable_karras: + sigma_max = p.sigma_max + sigma_min = p.sigma_min + rho = p.rho + print(f"\nsigma_min: {sigma_min}, sigma_max: {sigma_max}, rho: {rho}") + sigmas = k_diffusion.sampling.get_sigmas_karras(n=steps, sigma_min=sigma_min, sigma_max=sigma_max, rho=rho, device=shared.device) elif self.config is not None and self.config.options.get('scheduler', None) == 'karras': sigma_min, sigma_max = (0.1, 10) if opts.use_old_karras_scheduler_sigmas else (self.model_wrap.sigmas[0].item(), self.model_wrap.sigmas[-1].item()) -- cgit v1.2.3 From 90ec557d60289a89b4ea6cd9b311658fbe682dc3 Mon Sep 17 00:00:00 2001 From: Kohaku-Blueleaf <59680068+KohakuBlueleaf@users.noreply.github.com> Date: Mon, 22 May 2023 22:06:13 +0800 Subject: remove debug print --- modules/sd_samplers_kdiffusion.py | 1 - 1 file changed, 1 deletion(-) (limited to 'modules/sd_samplers_kdiffusion.py') diff --git a/modules/sd_samplers_kdiffusion.py b/modules/sd_samplers_kdiffusion.py index eb6c760c..d428551d 100644 --- a/modules/sd_samplers_kdiffusion.py +++ b/modules/sd_samplers_kdiffusion.py @@ -308,7 +308,6 @@ class KDiffusionSampler: sigma_max = p.sigma_max sigma_min = p.sigma_min rho = p.rho - print(f"\nsigma_min: {sigma_min}, sigma_max: {sigma_max}, rho: {rho}") sigmas = k_diffusion.sampling.get_sigmas_karras(n=steps, sigma_min=sigma_min, sigma_max=sigma_max, rho=rho, device=shared.device) elif self.config is not None and self.config.options.get('scheduler', None) == 'karras': sigma_min, sigma_max = (0.1, 10) if opts.use_old_karras_scheduler_sigmas else (self.model_wrap.sigmas[0].item(), self.model_wrap.sigmas[-1].item()) -- cgit v1.2.3 From e6269cba7fd84a76b2bd0012cb954f947a79b6a5 Mon Sep 17 00:00:00 2001 From: Kohaku-Blueleaf <59680068+KohakuBlueleaf@users.noreply.github.com> Date: Mon, 22 May 2023 23:02:05 +0800 Subject: Add dropdown for scheduler type --- modules/sd_samplers_kdiffusion.py | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) (limited to 'modules/sd_samplers_kdiffusion.py') diff --git a/modules/sd_samplers_kdiffusion.py b/modules/sd_samplers_kdiffusion.py index d428551d..441c040e 100644 --- a/modules/sd_samplers_kdiffusion.py +++ b/modules/sd_samplers_kdiffusion.py @@ -44,6 +44,12 @@ sampler_extra_params = { 'sample_dpm_2': ['s_churn', 's_tmin', 's_tmax', 's_noise'], } +k_diffusion_scheduler = { + 'karras': k_diffusion.sampling.get_sigmas_karras, + 'exponential': k_diffusion.sampling.get_sigmas_exponential, + 'polyexponential': k_diffusion.sampling.get_sigmas_polyexponential +} + class CFGDenoiser(torch.nn.Module): """ @@ -305,10 +311,15 @@ class KDiffusionSampler: if p.sampler_noise_scheduler_override: sigmas = p.sampler_noise_scheduler_override(steps) elif p.enable_karras: - sigma_max = p.sigma_max - sigma_min = p.sigma_min - rho = p.rho - sigmas = k_diffusion.sampling.get_sigmas_karras(n=steps, sigma_min=sigma_min, sigma_max=sigma_max, rho=rho, device=shared.device) + print(p.k_sched_type, p.sigma_min, p.sigma_max, p.rho) + sigmas_func = k_diffusion_scheduler[p.k_sched_type] + sigmas_kwargs = { + 'sigma_min': p.sigma_min, + 'sigma_max': p.sigma_max + } + if p.k_sched_type != 'exponential': + sigmas_kwargs['rho'] = p.rho + sigmas = sigmas_func(n=steps, **sigmas_kwargs, device=shared.device) elif self.config is not None and self.config.options.get('scheduler', None) == 'karras': sigma_min, sigma_max = (0.1, 10) if opts.use_old_karras_scheduler_sigmas else (self.model_wrap.sigmas[0].item(), self.model_wrap.sigmas[-1].item()) -- cgit v1.2.3 From 7882f76da45de7279c7db0dd17b6aca82b7ddf46 Mon Sep 17 00:00:00 2001 From: Kohaku-Blueleaf <59680068+KohakuBlueleaf@users.noreply.github.com> Date: Mon, 22 May 2023 23:26:28 +0800 Subject: Replace karras by k_diffusion, fix gen info --- modules/sd_samplers_kdiffusion.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'modules/sd_samplers_kdiffusion.py') diff --git a/modules/sd_samplers_kdiffusion.py b/modules/sd_samplers_kdiffusion.py index 441c040e..4d8f57a7 100644 --- a/modules/sd_samplers_kdiffusion.py +++ b/modules/sd_samplers_kdiffusion.py @@ -44,6 +44,7 @@ sampler_extra_params = { 'sample_dpm_2': ['s_churn', 's_tmin', 's_tmax', 's_noise'], } +k_diffusion_samplers_map = {x.name: x for x in samplers_data_k_diffusion} k_diffusion_scheduler = { 'karras': k_diffusion.sampling.get_sigmas_karras, 'exponential': k_diffusion.sampling.get_sigmas_exponential, @@ -310,7 +311,7 @@ class KDiffusionSampler: if p.sampler_noise_scheduler_override: sigmas = p.sampler_noise_scheduler_override(steps) - elif p.enable_karras: + elif p.enable_custom_k_sched: print(p.k_sched_type, p.sigma_min, p.sigma_max, p.rho) sigmas_func = k_diffusion_scheduler[p.k_sched_type] sigmas_kwargs = { -- cgit v1.2.3 From 5dfb1f597b47b1028ee010df2ed8642e2beb6c1c Mon Sep 17 00:00:00 2001 From: Kohaku-Blueleaf <59680068+KohakuBlueleaf@users.noreply.github.com> Date: Mon, 22 May 2023 23:36:16 +0800 Subject: remove not related code --- modules/sd_samplers_kdiffusion.py | 1 - 1 file changed, 1 deletion(-) (limited to 'modules/sd_samplers_kdiffusion.py') diff --git a/modules/sd_samplers_kdiffusion.py b/modules/sd_samplers_kdiffusion.py index 4d8f57a7..ba0cf08e 100644 --- a/modules/sd_samplers_kdiffusion.py +++ b/modules/sd_samplers_kdiffusion.py @@ -312,7 +312,6 @@ class KDiffusionSampler: if p.sampler_noise_scheduler_override: sigmas = p.sampler_noise_scheduler_override(steps) elif p.enable_custom_k_sched: - print(p.k_sched_type, p.sigma_min, p.sigma_max, p.rho) sigmas_func = k_diffusion_scheduler[p.k_sched_type] sigmas_kwargs = { 'sigma_min': p.sigma_min, -- cgit v1.2.3 From 65a87ccc9bf92a0fd24a453e2837dd2d19bbf5ce Mon Sep 17 00:00:00 2001 From: Kohaku-Blueleaf <59680068+KohakuBlueleaf@users.noreply.github.com> Date: Tue, 23 May 2023 00:09:49 +0800 Subject: Add error information for recursion error --- modules/sd_samplers_kdiffusion.py | 6 ++++++ 1 file changed, 6 insertions(+) (limited to 'modules/sd_samplers_kdiffusion.py') diff --git a/modules/sd_samplers_kdiffusion.py b/modules/sd_samplers_kdiffusion.py index ba0cf08e..e2f18b54 100644 --- a/modules/sd_samplers_kdiffusion.py +++ b/modules/sd_samplers_kdiffusion.py @@ -272,6 +272,12 @@ class KDiffusionSampler: try: return func() + except RecursionError: + print( + 'rho>5 with polyexponential scheduler may cause this error.' + 'You should try to use smaller rho instead.' + ) + return self.last_latent except sd_samplers_common.InterruptedException: return self.last_latent -- cgit v1.2.3 From 403b304162b670597f20b01f147bb042eb78ee5c Mon Sep 17 00:00:00 2001 From: Kohaku-Blueleaf <59680068+KohakuBlueleaf@users.noreply.github.com> Date: Tue, 23 May 2023 00:29:38 +0800 Subject: use sigma_max/min in model if sigma_max/min is 0 --- modules/sd_samplers_kdiffusion.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) (limited to 'modules/sd_samplers_kdiffusion.py') diff --git a/modules/sd_samplers_kdiffusion.py b/modules/sd_samplers_kdiffusion.py index e2f18b54..7364ed44 100644 --- a/modules/sd_samplers_kdiffusion.py +++ b/modules/sd_samplers_kdiffusion.py @@ -318,10 +318,11 @@ class KDiffusionSampler: if p.sampler_noise_scheduler_override: sigmas = p.sampler_noise_scheduler_override(steps) elif p.enable_custom_k_sched: + sigma_min, sigma_max = (0.1, 10) if opts.use_old_karras_scheduler_sigmas else (self.model_wrap.sigmas[0].item(), self.model_wrap.sigmas[-1].item()) sigmas_func = k_diffusion_scheduler[p.k_sched_type] sigmas_kwargs = { - 'sigma_min': p.sigma_min, - 'sigma_max': p.sigma_max + 'sigma_min': p.sigma_min or sigma_min, + 'sigma_max': p.sigma_max or sigma_max } if p.k_sched_type != 'exponential': sigmas_kwargs['rho'] = p.rho -- cgit v1.2.3 From 51d672890d168fe46dd152305d2fde3f1bb9b531 Mon Sep 17 00:00:00 2001 From: catboxanon <122327233+catboxanon@users.noreply.github.com> Date: Mon, 22 May 2023 13:06:57 -0400 Subject: Revert #10586 --- modules/sd_samplers_kdiffusion.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'modules/sd_samplers_kdiffusion.py') diff --git a/modules/sd_samplers_kdiffusion.py b/modules/sd_samplers_kdiffusion.py index 638e0ac9..dcec9e0e 100644 --- a/modules/sd_samplers_kdiffusion.py +++ b/modules/sd_samplers_kdiffusion.py @@ -20,7 +20,7 @@ samplers_k_diffusion = [ ('DPM++ 2S a', 'sample_dpmpp_2s_ancestral', ['k_dpmpp_2s_a'], {"uses_ensd": True, "second_order": True}), ('DPM++ 2M', 'sample_dpmpp_2m', ['k_dpmpp_2m'], {}), ('DPM++ SDE', 'sample_dpmpp_sde', ['k_dpmpp_sde'], {"second_order": True, "brownian_noise": True}), - ('DPM++ 2M SDE', 'sample_dpmpp_2m_sde', ['k_dpmpp_2m_sde_ka'], {"brownian_noise": True, 'discard_next_to_last_sigma': True}), + ('DPM++ 2M SDE', 'sample_dpmpp_2m_sde', ['k_dpmpp_2m_sde_ka'], {"brownian_noise": True}), ('DPM fast', 'sample_dpm_fast', ['k_dpm_fast'], {"uses_ensd": True}), ('DPM adaptive', 'sample_dpm_adaptive', ['k_dpm_ad'], {"uses_ensd": True}), ('LMS Karras', 'sample_lms', ['k_lms_ka'], {'scheduler': 'karras'}), @@ -29,7 +29,7 @@ samplers_k_diffusion = [ ('DPM++ 2S a Karras', 'sample_dpmpp_2s_ancestral', ['k_dpmpp_2s_a_ka'], {'scheduler': 'karras', "uses_ensd": True, "second_order": True}), ('DPM++ 2M Karras', 'sample_dpmpp_2m', ['k_dpmpp_2m_ka'], {'scheduler': 'karras'}), ('DPM++ SDE Karras', 'sample_dpmpp_sde', ['k_dpmpp_sde_ka'], {'scheduler': 'karras', "second_order": True, "brownian_noise": True}), - ('DPM++ 2M SDE Karras', 'sample_dpmpp_2m_sde', ['k_dpmpp_2m_sde_ka'], {'scheduler': 'karras', "brownian_noise": True, 'discard_next_to_last_sigma': True}), + ('DPM++ 2M SDE Karras', 'sample_dpmpp_2m_sde', ['k_dpmpp_2m_sde_ka'], {'scheduler': 'karras', "brownian_noise": True}), ] samplers_data_k_diffusion = [ -- cgit v1.2.3 From 38aaad654bec640e99beb42964d09357878179bd Mon Sep 17 00:00:00 2001 From: Kohaku-Blueleaf <59680068+KohakuBlueleaf@users.noreply.github.com> Date: Tue, 23 May 2023 09:38:30 +0800 Subject: Better hint for user Co-authored-by: catboxanon <122327233+catboxanon@users.noreply.github.com> --- modules/sd_samplers_kdiffusion.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) (limited to 'modules/sd_samplers_kdiffusion.py') diff --git a/modules/sd_samplers_kdiffusion.py b/modules/sd_samplers_kdiffusion.py index 7364ed44..969ef02b 100644 --- a/modules/sd_samplers_kdiffusion.py +++ b/modules/sd_samplers_kdiffusion.py @@ -274,8 +274,9 @@ class KDiffusionSampler: return func() except RecursionError: print( - 'rho>5 with polyexponential scheduler may cause this error.' - 'You should try to use smaller rho instead.' + 'Encountered RecursionError during sampling, returning last latent. ' + 'rho >5 with a polyexponential scheduler may cause this error. ' + 'You should try to use a smaller rho value instead.' ) return self.last_latent except sd_samplers_common.InterruptedException: -- cgit v1.2.3 From 70650f87a42615a62568a896403156d0065621b4 Mon Sep 17 00:00:00 2001 From: Kohaku-Blueleaf <59680068+KohakuBlueleaf@users.noreply.github.com> Date: Tue, 23 May 2023 11:34:51 +0800 Subject: Use better way to impl --- modules/sd_samplers_kdiffusion.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) (limited to 'modules/sd_samplers_kdiffusion.py') diff --git a/modules/sd_samplers_kdiffusion.py b/modules/sd_samplers_kdiffusion.py index 969ef02b..5fea08b0 100644 --- a/modules/sd_samplers_kdiffusion.py +++ b/modules/sd_samplers_kdiffusion.py @@ -295,6 +295,13 @@ class KDiffusionSampler: k_diffusion.sampling.torch = TorchHijack(self.sampler_noises if self.sampler_noises is not None else []) + if opts.custom_k_sched: + p.extra_generation_params["Enable Custom KDiffusion Schedule"] = True + p.extra_generation_params["KDiffusion Scheduler Type"] = opts.k_sched_type + p.extra_generation_params["KDiffusion Scheduler sigma_max"] = opts.sigma_max + p.extra_generation_params["KDiffusion Scheduler sigma_min"] = opts.sigma_min + p.extra_generation_params["KDiffusion Scheduler rho"] = opts.rho + extra_params_kwargs = {} for param_name in self.extra_params: if hasattr(p, param_name) and param_name in inspect.signature(self.func).parameters: @@ -318,15 +325,15 @@ class KDiffusionSampler: if p.sampler_noise_scheduler_override: sigmas = p.sampler_noise_scheduler_override(steps) - elif p.enable_custom_k_sched: + elif opts.custom_k_sched: sigma_min, sigma_max = (0.1, 10) if opts.use_old_karras_scheduler_sigmas else (self.model_wrap.sigmas[0].item(), self.model_wrap.sigmas[-1].item()) - sigmas_func = k_diffusion_scheduler[p.k_sched_type] + sigmas_func = k_diffusion_scheduler[opts.k_sched_type] sigmas_kwargs = { - 'sigma_min': p.sigma_min or sigma_min, - 'sigma_max': p.sigma_max or sigma_max + 'sigma_min': opts.sigma_min or sigma_min, + 'sigma_max': opts.sigma_max or sigma_max } - if p.k_sched_type != 'exponential': - sigmas_kwargs['rho'] = p.rho + if opts.k_sched_type != 'exponential': + sigmas_kwargs['rho'] = opts.rho sigmas = sigmas_func(n=steps, **sigmas_kwargs, device=shared.device) elif self.config is not None and self.config.options.get('scheduler', None) == 'karras': sigma_min, sigma_max = (0.1, 10) if opts.use_old_karras_scheduler_sigmas else (self.model_wrap.sigmas[0].item(), self.model_wrap.sigmas[-1].item()) -- cgit v1.2.3 From 72377b02518f96051a01a7e0ea30a6a14d8ec1de Mon Sep 17 00:00:00 2001 From: Kohaku-Blueleaf <59680068+KohakuBlueleaf@users.noreply.github.com> Date: Tue, 23 May 2023 23:48:23 +0800 Subject: Use type to determine if it is enable --- modules/sd_samplers_kdiffusion.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'modules/sd_samplers_kdiffusion.py') diff --git a/modules/sd_samplers_kdiffusion.py b/modules/sd_samplers_kdiffusion.py index 5fea08b0..eff2e32d 100644 --- a/modules/sd_samplers_kdiffusion.py +++ b/modules/sd_samplers_kdiffusion.py @@ -46,6 +46,7 @@ sampler_extra_params = { k_diffusion_samplers_map = {x.name: x for x in samplers_data_k_diffusion} k_diffusion_scheduler = { + 'None': None, 'karras': k_diffusion.sampling.get_sigmas_karras, 'exponential': k_diffusion.sampling.get_sigmas_exponential, 'polyexponential': k_diffusion.sampling.get_sigmas_polyexponential @@ -295,8 +296,7 @@ class KDiffusionSampler: k_diffusion.sampling.torch = TorchHijack(self.sampler_noises if self.sampler_noises is not None else []) - if opts.custom_k_sched: - p.extra_generation_params["Enable Custom KDiffusion Schedule"] = True + if opts.k_sched_type != "None": p.extra_generation_params["KDiffusion Scheduler Type"] = opts.k_sched_type p.extra_generation_params["KDiffusion Scheduler sigma_max"] = opts.sigma_max p.extra_generation_params["KDiffusion Scheduler sigma_min"] = opts.sigma_min @@ -325,7 +325,7 @@ class KDiffusionSampler: if p.sampler_noise_scheduler_override: sigmas = p.sampler_noise_scheduler_override(steps) - elif opts.custom_k_sched: + elif opts.k_sched_type != "None": sigma_min, sigma_max = (0.1, 10) if opts.use_old_karras_scheduler_sigmas else (self.model_wrap.sigmas[0].item(), self.model_wrap.sigmas[-1].item()) sigmas_func = k_diffusion_scheduler[opts.k_sched_type] sigmas_kwargs = { -- cgit v1.2.3 From 1601fccebca2dc5a806a0d2f0d33aa2da81a28fb Mon Sep 17 00:00:00 2001 From: Kohaku-Blueleaf <59680068+KohakuBlueleaf@users.noreply.github.com> Date: Wed, 24 May 2023 00:18:09 +0800 Subject: Use automatic instead of None/default --- modules/sd_samplers_kdiffusion.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'modules/sd_samplers_kdiffusion.py') diff --git a/modules/sd_samplers_kdiffusion.py b/modules/sd_samplers_kdiffusion.py index eff2e32d..a4c797c6 100644 --- a/modules/sd_samplers_kdiffusion.py +++ b/modules/sd_samplers_kdiffusion.py @@ -46,7 +46,7 @@ sampler_extra_params = { k_diffusion_samplers_map = {x.name: x for x in samplers_data_k_diffusion} k_diffusion_scheduler = { - 'None': None, + 'Automatic': None, 'karras': k_diffusion.sampling.get_sigmas_karras, 'exponential': k_diffusion.sampling.get_sigmas_exponential, 'polyexponential': k_diffusion.sampling.get_sigmas_polyexponential @@ -296,7 +296,7 @@ class KDiffusionSampler: k_diffusion.sampling.torch = TorchHijack(self.sampler_noises if self.sampler_noises is not None else []) - if opts.k_sched_type != "None": + if opts.k_sched_type != "Automatic": p.extra_generation_params["KDiffusion Scheduler Type"] = opts.k_sched_type p.extra_generation_params["KDiffusion Scheduler sigma_max"] = opts.sigma_max p.extra_generation_params["KDiffusion Scheduler sigma_min"] = opts.sigma_min @@ -325,7 +325,7 @@ class KDiffusionSampler: if p.sampler_noise_scheduler_override: sigmas = p.sampler_noise_scheduler_override(steps) - elif opts.k_sched_type != "None": + elif opts.k_sched_type != "Automatic": sigma_min, sigma_max = (0.1, 10) if opts.use_old_karras_scheduler_sigmas else (self.model_wrap.sigmas[0].item(), self.model_wrap.sigmas[-1].item()) sigmas_func = k_diffusion_scheduler[opts.k_sched_type] sigmas_kwargs = { -- cgit v1.2.3 From 4b88e24ebe776680b327e33fe96d7fcf38e2e5d2 Mon Sep 17 00:00:00 2001 From: Kohaku-Blueleaf <59680068+KohakuBlueleaf@users.noreply.github.com> Date: Wed, 24 May 2023 20:35:58 +0800 Subject: improvements See: https://github.com/AUTOMATIC1111/stable-diffusion-webui/pull/10649#issuecomment-1561047723 --- modules/sd_samplers_kdiffusion.py | 27 +++++++++++++++++---------- 1 file changed, 17 insertions(+), 10 deletions(-) (limited to 'modules/sd_samplers_kdiffusion.py') diff --git a/modules/sd_samplers_kdiffusion.py b/modules/sd_samplers_kdiffusion.py index a4c797c6..d2d172e4 100644 --- a/modules/sd_samplers_kdiffusion.py +++ b/modules/sd_samplers_kdiffusion.py @@ -296,12 +296,6 @@ class KDiffusionSampler: k_diffusion.sampling.torch = TorchHijack(self.sampler_noises if self.sampler_noises is not None else []) - if opts.k_sched_type != "Automatic": - p.extra_generation_params["KDiffusion Scheduler Type"] = opts.k_sched_type - p.extra_generation_params["KDiffusion Scheduler sigma_max"] = opts.sigma_max - p.extra_generation_params["KDiffusion Scheduler sigma_min"] = opts.sigma_min - p.extra_generation_params["KDiffusion Scheduler rho"] = opts.rho - extra_params_kwargs = {} for param_name in self.extra_params: if hasattr(p, param_name) and param_name in inspect.signature(self.func).parameters: @@ -326,14 +320,27 @@ class KDiffusionSampler: if p.sampler_noise_scheduler_override: sigmas = p.sampler_noise_scheduler_override(steps) elif opts.k_sched_type != "Automatic": - sigma_min, sigma_max = (0.1, 10) if opts.use_old_karras_scheduler_sigmas else (self.model_wrap.sigmas[0].item(), self.model_wrap.sigmas[-1].item()) - sigmas_func = k_diffusion_scheduler[opts.k_sched_type] + m_sigma_min, m_sigma_max = (self.model_wrap.sigmas[0].item(), self.model_wrap.sigmas[-1].item()) + sigma_min, sigma_max = (0.1, 10) sigmas_kwargs = { - 'sigma_min': opts.sigma_min or sigma_min, - 'sigma_max': opts.sigma_max or sigma_max + 'sigma_min': sigma_min if opts.use_old_karras_scheduler_sigmas else m_sigma_min, + 'sigma_max': sigma_max if opts.use_old_karras_scheduler_sigmas else m_sigma_max } + + sigmas_func = k_diffusion_scheduler[opts.k_sched_type] + p.extra_generation_params["KDiff Sched Type"] = opts.k_sched_type + + if opts.sigma_min != 0.3: + # take 0.0 as model default + sigmas_kwargs['sigma_min'] = opts.sigma_min or m_sigma_min + p.extra_generation_params["KDiff Sched min sigma"] = opts.sigma_min + if opts.sigma_max != 14.6: + sigmas_kwargs['sigma_max'] = opts.sigma_max or m_sigma_max + p.extra_generation_params["KDiff Sched max sigma"] = opts.sigma_max if opts.k_sched_type != 'exponential': sigmas_kwargs['rho'] = opts.rho + p.extra_generation_params["KDiff Sched rho"] = opts.rho + sigmas = sigmas_func(n=steps, **sigmas_kwargs, device=shared.device) elif self.config is not None and self.config.options.get('scheduler', None) == 'karras': sigma_min, sigma_max = (0.1, 10) if opts.use_old_karras_scheduler_sigmas else (self.model_wrap.sigmas[0].item(), self.model_wrap.sigmas[-1].item()) -- cgit v1.2.3 From a69b71a37f1fd32a60fbd87beed13f4f280400bd Mon Sep 17 00:00:00 2001 From: Kohaku-Blueleaf <59680068+KohakuBlueleaf@users.noreply.github.com> Date: Wed, 24 May 2023 20:40:37 +0800 Subject: use Schedule instead of Sched --- modules/sd_samplers_kdiffusion.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) (limited to 'modules/sd_samplers_kdiffusion.py') diff --git a/modules/sd_samplers_kdiffusion.py b/modules/sd_samplers_kdiffusion.py index d2d172e4..9c9d9f17 100644 --- a/modules/sd_samplers_kdiffusion.py +++ b/modules/sd_samplers_kdiffusion.py @@ -328,18 +328,18 @@ class KDiffusionSampler: } sigmas_func = k_diffusion_scheduler[opts.k_sched_type] - p.extra_generation_params["KDiff Sched Type"] = opts.k_sched_type + p.extra_generation_params["KDiff Schedule Type"] = opts.k_sched_type if opts.sigma_min != 0.3: # take 0.0 as model default sigmas_kwargs['sigma_min'] = opts.sigma_min or m_sigma_min - p.extra_generation_params["KDiff Sched min sigma"] = opts.sigma_min + p.extra_generation_params["KDiff Schedule min sigma"] = opts.sigma_min if opts.sigma_max != 14.6: sigmas_kwargs['sigma_max'] = opts.sigma_max or m_sigma_max - p.extra_generation_params["KDiff Sched max sigma"] = opts.sigma_max + p.extra_generation_params["KDiff Schedule max sigma"] = opts.sigma_max if opts.k_sched_type != 'exponential': sigmas_kwargs['rho'] = opts.rho - p.extra_generation_params["KDiff Sched rho"] = opts.rho + p.extra_generation_params["KDiff Schedule rho"] = opts.rho sigmas = sigmas_func(n=steps, **sigmas_kwargs, device=shared.device) elif self.config is not None and self.config.options.get('scheduler', None) == 'karras': -- cgit v1.2.3 From e8e7fe11e903115a706187f8301df2e06fa018f8 Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Sat, 27 May 2023 19:53:09 +0300 Subject: updates for the noise schedule settings --- modules/sd_samplers_kdiffusion.py | 30 ++++++++++++++++-------------- 1 file changed, 16 insertions(+), 14 deletions(-) (limited to 'modules/sd_samplers_kdiffusion.py') diff --git a/modules/sd_samplers_kdiffusion.py b/modules/sd_samplers_kdiffusion.py index 9c9d9f17..e9ba2c61 100644 --- a/modules/sd_samplers_kdiffusion.py +++ b/modules/sd_samplers_kdiffusion.py @@ -321,25 +321,27 @@ class KDiffusionSampler: sigmas = p.sampler_noise_scheduler_override(steps) elif opts.k_sched_type != "Automatic": m_sigma_min, m_sigma_max = (self.model_wrap.sigmas[0].item(), self.model_wrap.sigmas[-1].item()) - sigma_min, sigma_max = (0.1, 10) + sigma_min, sigma_max = (0.1, 10) if opts.use_old_karras_scheduler_sigmas else (m_sigma_min, m_sigma_max) sigmas_kwargs = { - 'sigma_min': sigma_min if opts.use_old_karras_scheduler_sigmas else m_sigma_min, - 'sigma_max': sigma_max if opts.use_old_karras_scheduler_sigmas else m_sigma_max + 'sigma_min': sigma_min, + 'sigma_max': sigma_max, } sigmas_func = k_diffusion_scheduler[opts.k_sched_type] - p.extra_generation_params["KDiff Schedule Type"] = opts.k_sched_type - - if opts.sigma_min != 0.3: - # take 0.0 as model default - sigmas_kwargs['sigma_min'] = opts.sigma_min or m_sigma_min - p.extra_generation_params["KDiff Schedule min sigma"] = opts.sigma_min - if opts.sigma_max != 14.6: - sigmas_kwargs['sigma_max'] = opts.sigma_max or m_sigma_max - p.extra_generation_params["KDiff Schedule max sigma"] = opts.sigma_max - if opts.k_sched_type != 'exponential': + p.extra_generation_params["Schedule type"] = opts.k_sched_type + + if opts.sigma_min != m_sigma_min and opts.sigma_min != 0: + sigmas_kwargs['sigma_min'] = opts.sigma_min + p.extra_generation_params["Schedule min sigma"] = opts.sigma_min + if opts.sigma_max != m_sigma_max and opts.sigma_max != 0: + sigmas_kwargs['sigma_max'] = opts.sigma_max + p.extra_generation_params["Schedule max sigma"] = opts.sigma_max + + default_rho = 1. if opts.k_sched_type == "polyexponential" else 7. + + if opts.k_sched_type != 'exponential' and opts.rho != 0 and opts.rho != default_rho: sigmas_kwargs['rho'] = opts.rho - p.extra_generation_params["KDiff Schedule rho"] = opts.rho + p.extra_generation_params["Schedule rho"] = opts.rho sigmas = sigmas_func(n=steps, **sigmas_kwargs, device=shared.device) elif self.config is not None and self.config.options.get('scheduler', None) == 'karras': -- cgit v1.2.3 From 4bd490c28dd8f17b7df943eb3963c34d725084fc Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Tue, 27 Jun 2023 06:18:43 +0300 Subject: add missing infotext entry for the pad cond/uncond option --- modules/sd_samplers_kdiffusion.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) (limited to 'modules/sd_samplers_kdiffusion.py') diff --git a/modules/sd_samplers_kdiffusion.py b/modules/sd_samplers_kdiffusion.py index f8a0c7ba..71581b76 100644 --- a/modules/sd_samplers_kdiffusion.py +++ b/modules/sd_samplers_kdiffusion.py @@ -69,6 +69,7 @@ class CFGDenoiser(torch.nn.Module): self.init_latent = None self.step = 0 self.image_cfg_scale = None + self.padded_cond_uncond = False def combine_denoised(self, x_out, conds_list, uncond, cond_scale): denoised_uncond = x_out[-uncond.shape[0]:] @@ -133,15 +134,17 @@ class CFGDenoiser(torch.nn.Module): x_in = x_in[:-batch_size] sigma_in = sigma_in[:-batch_size] - # TODO add infotext entry + self.padded_cond_uncond = False if shared.opts.pad_cond_uncond and tensor.shape[1] != uncond.shape[1]: empty = shared.sd_model.cond_stage_model_empty_prompt num_repeats = (tensor.shape[1] - uncond.shape[1]) // empty.shape[1] if num_repeats < 0: tensor = torch.cat([tensor, empty.repeat((tensor.shape[0], -num_repeats, 1))], axis=1) + self.padded_cond_uncond = True elif num_repeats > 0: uncond = torch.cat([uncond, empty.repeat((uncond.shape[0], num_repeats, 1))], axis=1) + self.padded_cond_uncond = True if tensor.shape[1] == uncond.shape[1] or skip_uncond: if is_edit_model: @@ -405,6 +408,9 @@ class KDiffusionSampler: samples = self.launch_sampling(t_enc + 1, lambda: self.func(self.model_wrap_cfg, xi, extra_args=extra_args, disable=False, callback=self.callback_state, **extra_params_kwargs)) + if self.model_wrap_cfg.padded_cond_uncond: + p.extra_generation_params["Pad conds"] = True + return samples def sample(self, p, x, conditioning, unconditional_conditioning, steps=None, image_conditioning=None): @@ -438,5 +444,8 @@ class KDiffusionSampler: 's_min_uncond': self.s_min_uncond }, disable=False, callback=self.callback_state, **extra_params_kwargs)) + if self.model_wrap_cfg.padded_cond_uncond: + p.extra_generation_params["Pad conds"] = True + return samples -- cgit v1.2.3