diff options
author | Beinsezii <39478211+Beinsezii@users.noreply.github.com> | 2023-06-27 22:29:47 +0000 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-06-27 22:29:47 +0000 |
commit | 9d8af4bd6aaf09b8a94dc10dd5e99c82e23dec38 (patch) | |
tree | d86d470db2289a31e589a7bd777a38784605313c /modules/sd_samplers_kdiffusion.py | |
parent | 1d7c51fb9f757b5dcdc506f8fc003e6047151567 (diff) | |
parent | fab73f2e7d388ca99cdb3d5de7f36c0b9a1a3b1c (diff) | |
download | stable-diffusion-webui-gfx803-9d8af4bd6aaf09b8a94dc10dd5e99c82e23dec38.tar.gz stable-diffusion-webui-gfx803-9d8af4bd6aaf09b8a94dc10dd5e99c82e23dec38.tar.bz2 stable-diffusion-webui-gfx803-9d8af4bd6aaf09b8a94dc10dd5e99c82e23dec38.zip |
Merge branch 'AUTOMATIC1111:dev' into dev
Diffstat (limited to 'modules/sd_samplers_kdiffusion.py')
-rw-r--r-- | modules/sd_samplers_kdiffusion.py | 11 |
1 files changed, 10 insertions, 1 deletions
diff --git a/modules/sd_samplers_kdiffusion.py b/modules/sd_samplers_kdiffusion.py index f8a0c7ba..71581b76 100644 --- a/modules/sd_samplers_kdiffusion.py +++ b/modules/sd_samplers_kdiffusion.py @@ -69,6 +69,7 @@ class CFGDenoiser(torch.nn.Module): self.init_latent = None
self.step = 0
self.image_cfg_scale = None
+ self.padded_cond_uncond = False
def combine_denoised(self, x_out, conds_list, uncond, cond_scale):
denoised_uncond = x_out[-uncond.shape[0]:]
@@ -133,15 +134,17 @@ class CFGDenoiser(torch.nn.Module): x_in = x_in[:-batch_size]
sigma_in = sigma_in[:-batch_size]
- # TODO add infotext entry
+ self.padded_cond_uncond = False
if shared.opts.pad_cond_uncond and tensor.shape[1] != uncond.shape[1]:
empty = shared.sd_model.cond_stage_model_empty_prompt
num_repeats = (tensor.shape[1] - uncond.shape[1]) // empty.shape[1]
if num_repeats < 0:
tensor = torch.cat([tensor, empty.repeat((tensor.shape[0], -num_repeats, 1))], axis=1)
+ self.padded_cond_uncond = True
elif num_repeats > 0:
uncond = torch.cat([uncond, empty.repeat((uncond.shape[0], num_repeats, 1))], axis=1)
+ self.padded_cond_uncond = True
if tensor.shape[1] == uncond.shape[1] or skip_uncond:
if is_edit_model:
@@ -405,6 +408,9 @@ class KDiffusionSampler: samples = self.launch_sampling(t_enc + 1, lambda: self.func(self.model_wrap_cfg, xi, extra_args=extra_args, disable=False, callback=self.callback_state, **extra_params_kwargs))
+ if self.model_wrap_cfg.padded_cond_uncond:
+ p.extra_generation_params["Pad conds"] = True
+
return samples
def sample(self, p, x, conditioning, unconditional_conditioning, steps=None, image_conditioning=None):
@@ -438,5 +444,8 @@ class KDiffusionSampler: 's_min_uncond': self.s_min_uncond
}, disable=False, callback=self.callback_state, **extra_params_kwargs))
+ if self.model_wrap_cfg.padded_cond_uncond:
+ p.extra_generation_params["Pad conds"] = True
+
return samples
|