diff options
author | Vladimir Mandic <mandic00@live.com> | 2023-02-19 14:19:25 +0000 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-02-19 14:19:25 +0000 |
commit | 8affa42588e85d10da86c401e3558143b05121af (patch) | |
tree | e6b9640f52787f41ba675ec0fb98e580dbad1d85 /modules/sd_hijack_unet.py | |
parent | a320d157ec0221fa4e9c756327e31d881b9921ae (diff) | |
parent | 076d624a297532d6e4abebe5807fd7c7504d7a73 (diff) | |
download | stable-diffusion-webui-gfx803-8affa42588e85d10da86c401e3558143b05121af.tar.gz stable-diffusion-webui-gfx803-8affa42588e85d10da86c401e3558143b05121af.tar.bz2 stable-diffusion-webui-gfx803-8affa42588e85d10da86c401e3558143b05121af.zip |
Merge branch 'AUTOMATIC1111:master' into hide_ui_tabs
Diffstat (limited to 'modules/sd_hijack_unet.py')
-rw-r--r-- | modules/sd_hijack_unet.py | 11 |
1 files changed, 11 insertions, 0 deletions
diff --git a/modules/sd_hijack_unet.py b/modules/sd_hijack_unet.py index 45cf2b18..843ab66c 100644 --- a/modules/sd_hijack_unet.py +++ b/modules/sd_hijack_unet.py @@ -44,6 +44,7 @@ def apply_model(orig_func, self, x_noisy, t, cond, **kwargs): with devices.autocast():
return orig_func(self, x_noisy.to(devices.dtype_unet), t.to(devices.dtype_unet), cond, **kwargs).float()
+
class GELUHijack(torch.nn.GELU, torch.nn.Module):
def __init__(self, *args, **kwargs):
torch.nn.GELU.__init__(self, *args, **kwargs)
@@ -53,6 +54,16 @@ class GELUHijack(torch.nn.GELU, torch.nn.Module): else:
return torch.nn.GELU.forward(self, x)
+
+ddpm_edit_hijack = None
+def hijack_ddpm_edit():
+ global ddpm_edit_hijack
+ if not ddpm_edit_hijack:
+ CondFunc('modules.models.diffusion.ddpm_edit.LatentDiffusion.decode_first_stage', first_stage_sub, first_stage_cond)
+ CondFunc('modules.models.diffusion.ddpm_edit.LatentDiffusion.encode_first_stage', first_stage_sub, first_stage_cond)
+ ddpm_edit_hijack = CondFunc('modules.models.diffusion.ddpm_edit.LatentDiffusion.apply_model', apply_model, unet_needs_upcast)
+
+
unet_needs_upcast = lambda *args, **kwargs: devices.unet_needs_upcast
CondFunc('ldm.models.diffusion.ddpm.LatentDiffusion.apply_model', apply_model, unet_needs_upcast)
CondFunc('ldm.modules.diffusionmodules.openaimodel.timestep_embedding', lambda orig_func, timesteps, *args, **kwargs: orig_func(timesteps, *args, **kwargs).to(torch.float32 if timesteps.dtype == torch.int64 else devices.dtype_unet), unet_needs_upcast)
|