diff options
author | brkirch <brkirch@users.noreply.github.com> | 2022-10-11 02:48:54 +0000 |
---|---|---|
committer | AUTOMATIC1111 <16777216c@gmail.com> | 2022-10-11 14:24:00 +0000 |
commit | c0484f1b986ce7acb0e3596f6089a191279f5442 (patch) | |
tree | fe510abe4f3a389ffdf3b363dfcfd9a68506fd80 /modules/sd_hijack.py | |
parent | f7e86aa420a90bede8a6fbb9bb40a21c807cafcb (diff) | |
download | stable-diffusion-webui-gfx803-c0484f1b986ce7acb0e3596f6089a191279f5442.tar.gz stable-diffusion-webui-gfx803-c0484f1b986ce7acb0e3596f6089a191279f5442.tar.bz2 stable-diffusion-webui-gfx803-c0484f1b986ce7acb0e3596f6089a191279f5442.zip |
Add cross-attention optimization from InvokeAI
* Add cross-attention optimization from InvokeAI (~30% speed improvement on MPS)
* Add command line option for it
* Make it default when CUDA is unavailable
Diffstat (limited to 'modules/sd_hijack.py')
-rw-r--r-- | modules/sd_hijack.py | 5 |
1 files changed, 4 insertions, 1 deletions
diff --git a/modules/sd_hijack.py b/modules/sd_hijack.py index f07ec041..5a1b167f 100644 --- a/modules/sd_hijack.py +++ b/modules/sd_hijack.py @@ -30,8 +30,11 @@ def apply_optimizations(): elif cmd_opts.opt_split_attention_v1:
print("Applying v1 cross attention optimization.")
ldm.modules.attention.CrossAttention.forward = sd_hijack_optimizations.split_cross_attention_forward_v1
+ elif not cmd_opts.disable_opt_split_attention and (cmd_opts.opt_split_attention_invokeai or not torch.cuda.is_available()):
+ print("Applying cross attention optimization (InvokeAI).")
+ ldm.modules.attention.CrossAttention.forward = sd_hijack_optimizations.split_cross_attention_forward_invokeAI
elif not cmd_opts.disable_opt_split_attention and (cmd_opts.opt_split_attention or torch.cuda.is_available()):
- print("Applying cross attention optimization.")
+ print("Applying cross attention optimization (Doggettx).")
ldm.modules.attention.CrossAttention.forward = sd_hijack_optimizations.split_cross_attention_forward
ldm.modules.diffusionmodules.model.AttnBlock.forward = sd_hijack_optimizations.cross_attention_attnblock_forward
|