aboutsummaryrefslogtreecommitdiffstats
path: root/modules/sd_hijack.py
diff options
context:
space:
mode:
authorBilly Cao <aliencaocao@gmail.com>2022-11-23 10:11:24 +0000
committerBilly Cao <aliencaocao@gmail.com>2022-11-23 10:11:24 +0000
commitadb6cb7619989cbc7a271cc6c2ae27bb936c43d9 (patch)
tree164da7276d0dcb00d3f6871c9099604a05151277 /modules/sd_hijack.py
parent828438b4a190759807f9054932cae3a8b880ddf1 (diff)
downloadstable-diffusion-webui-gfx803-adb6cb7619989cbc7a271cc6c2ae27bb936c43d9.tar.gz
stable-diffusion-webui-gfx803-adb6cb7619989cbc7a271cc6c2ae27bb936c43d9.tar.bz2
stable-diffusion-webui-gfx803-adb6cb7619989cbc7a271cc6c2ae27bb936c43d9.zip
Patch UNet Forward to support resolutions that are not multiples of 64
Also modifed the UI to no longer step in 64
Diffstat (limited to 'modules/sd_hijack.py')
-rw-r--r--modules/sd_hijack.py2
1 files changed, 2 insertions, 0 deletions
diff --git a/modules/sd_hijack.py b/modules/sd_hijack.py
index eaedac13..6141f705 100644
--- a/modules/sd_hijack.py
+++ b/modules/sd_hijack.py
@@ -16,6 +16,7 @@ import ldm.modules.attention
import ldm.modules.diffusionmodules.model
import ldm.models.diffusion.ddim
import ldm.models.diffusion.plms
+import ldm.modules.diffusionmodules.openaimodel
attention_CrossAttention_forward = ldm.modules.attention.CrossAttention.forward
diffusionmodules_model_nonlinearity = ldm.modules.diffusionmodules.model.nonlinearity
@@ -26,6 +27,7 @@ def apply_optimizations():
undo_optimizations()
ldm.modules.diffusionmodules.model.nonlinearity = silu
+ ldm.modules.diffusionmodules.openaimodel.UNetModel.forward = sd_hijack_optimizations.patched_unet_forward
if cmd_opts.force_enable_xformers or (cmd_opts.xformers and shared.xformers_available and torch.version.cuda and (6, 0) <= torch.cuda.get_device_capability(shared.device) <= (9, 0)):
print("Applying xformers cross attention optimization.")