diff options
author | AUTOMATIC <16777216c@gmail.com> | 2022-10-07 13:39:51 +0000 |
---|---|---|
committer | AUTOMATIC <16777216c@gmail.com> | 2022-10-07 13:39:51 +0000 |
commit | f7c787eb7c295c27439f4fbdf78c26b8389560be (patch) | |
tree | 699c9721baa119af3f8f6e888fa25373f46c6042 /modules/sd_hijack.py | |
parent | 97bc0b9504572d2df80598d0b694703bcd626de6 (diff) | |
download | stable-diffusion-webui-gfx803-f7c787eb7c295c27439f4fbdf78c26b8389560be.tar.gz stable-diffusion-webui-gfx803-f7c787eb7c295c27439f4fbdf78c26b8389560be.tar.bz2 stable-diffusion-webui-gfx803-f7c787eb7c295c27439f4fbdf78c26b8389560be.zip |
make it possible to use hypernetworks without opt split attention
Diffstat (limited to 'modules/sd_hijack.py')
-rw-r--r-- | modules/sd_hijack.py | 6 |
1 files changed, 4 insertions, 2 deletions
diff --git a/modules/sd_hijack.py b/modules/sd_hijack.py index a6fa890c..d68f89cc 100644 --- a/modules/sd_hijack.py +++ b/modules/sd_hijack.py @@ -8,7 +8,7 @@ from torch import einsum from torch.nn.functional import silu
import modules.textual_inversion.textual_inversion
-from modules import prompt_parser, devices, sd_hijack_optimizations, shared
+from modules import prompt_parser, devices, sd_hijack_optimizations, shared, hypernetwork
from modules.shared import opts, device, cmd_opts
import ldm.modules.attention
@@ -20,6 +20,8 @@ diffusionmodules_model_AttnBlock_forward = ldm.modules.diffusionmodules.model.At def apply_optimizations():
+ undo_optimizations()
+
ldm.modules.diffusionmodules.model.nonlinearity = silu
if cmd_opts.opt_split_attention_v1:
@@ -30,7 +32,7 @@ def apply_optimizations(): def undo_optimizations():
- ldm.modules.attention.CrossAttention.forward = attention_CrossAttention_forward
+ ldm.modules.attention.CrossAttention.forward = hypernetwork.attention_CrossAttention_forward
ldm.modules.diffusionmodules.model.nonlinearity = diffusionmodules_model_nonlinearity
ldm.modules.diffusionmodules.model.AttnBlock.forward = diffusionmodules_model_AttnBlock_forward
|