diff options
author | DepFA <35278260+dfaker@users.noreply.github.com> | 2022-09-26 14:43:16 +0000 |
---|---|---|
committer | AUTOMATIC1111 <16777216c@gmail.com> | 2022-09-27 06:30:45 +0000 |
commit | a860839f1ffa827cd0a9356d6d5e9125585a66e0 (patch) | |
tree | 17d92564c449d385e9c94514f6c07101a5006da3 | |
parent | 4ea36a37d69fafcb60cf3f250b6c0e1bcfe82bb1 (diff) | |
download | stable-diffusion-webui-gfx803-a860839f1ffa827cd0a9356d6d5e9125585a66e0.tar.gz stable-diffusion-webui-gfx803-a860839f1ffa827cd0a9356d6d5e9125585a66e0.tar.bz2 stable-diffusion-webui-gfx803-a860839f1ffa827cd0a9356d6d5e9125585a66e0.zip |
take extra sampler properties from StableDiffusionProcessing
-rw-r--r-- | modules/sd_samplers.py | 12 |
1 files changed, 6 insertions, 6 deletions
diff --git a/modules/sd_samplers.py b/modules/sd_samplers.py index 2ac44f6c..b18beb21 100644 --- a/modules/sd_samplers.py +++ b/modules/sd_samplers.py @@ -125,9 +125,9 @@ class VanillaStableDiffusionSampler: # existing code fails with cetain step counts, like 9
try:
- self.sampler.make_schedule(ddim_num_steps=steps, ddim_eta=opts.ddim_eta, ddim_discretize=opts.ddim_discretize, verbose=False)
+ self.sampler.make_schedule(ddim_num_steps=steps, ddim_eta=p.ddim_eta, ddim_discretize=p.ddim_discretize, verbose=False)
except Exception:
- self.sampler.make_schedule(ddim_num_steps=steps+1,ddim_eta=opts.ddim_eta, ddim_discretize=opts.ddim_discretize, verbose=False)
+ self.sampler.make_schedule(ddim_num_steps=steps+1,ddim_eta=p.ddim_eta, ddim_discretize=p.ddim_discretize, verbose=False)
x1 = self.sampler.stochastic_encode(x, torch.tensor([t_enc] * int(x.shape[0])).to(shared.device), noise=noise)
@@ -277,8 +277,8 @@ class KDiffusionSampler: extra_params_kwargs = {}
for val in self.extra_params:
- if hasattr(opts,val):
- extra_params_kwargs[val] = getattr(opts,val)
+ if hasattr(p,val):
+ extra_params_kwargs[val] = getattr(p,val)
return self.func(self.model_wrap_cfg, xi, sigma_sched, extra_args={'cond': conditioning, 'uncond': unconditional_conditioning, 'cond_scale': p.cfg_scale}, disable=False, callback=self.callback_state, **extra_params_kwargs)
@@ -299,8 +299,8 @@ class KDiffusionSampler: extra_params_kwargs = {}
for val in self.extra_params:
- if hasattr(opts,val):
- extra_params_kwargs[val] = getattr(opts,val)
+ if hasattr(p,val):
+ extra_params_kwargs[val] = getattr(p,val)
samples = self.func(self.model_wrap_cfg, x, sigmas, extra_args={'cond': conditioning, 'uncond': unconditional_conditioning, 'cond_scale': p.cfg_scale}, disable=False, callback=self.callback_state, **extra_params_kwargs)
|