diff options
author | Pam <pamhome21@gmail.com> | 2023-03-06 19:33:13 +0000 |
---|---|---|
committer | Pam <pamhome21@gmail.com> | 2023-03-06 19:33:13 +0000 |
commit | fec0a895119a124a295e3dad5205de5766031dc7 (patch) | |
tree | 000a8ea99831b164435454761d1e24830317bc89 /modules/sd_hijack.py | |
parent | 0cc0ee1bcb4c24a8c9715f66cede06601bfc00c8 (diff) | |
download | stable-diffusion-webui-gfx803-fec0a895119a124a295e3dad5205de5766031dc7.tar.gz stable-diffusion-webui-gfx803-fec0a895119a124a295e3dad5205de5766031dc7.tar.bz2 stable-diffusion-webui-gfx803-fec0a895119a124a295e3dad5205de5766031dc7.zip |
scaled dot product attention
Diffstat (limited to 'modules/sd_hijack.py')
-rw-r--r-- | modules/sd_hijack.py | 4 |
1 files changed, 4 insertions, 0 deletions
diff --git a/modules/sd_hijack.py b/modules/sd_hijack.py index 79476783..76cb9120 100644 --- a/modules/sd_hijack.py +++ b/modules/sd_hijack.py @@ -42,6 +42,10 @@ def apply_optimizations(): ldm.modules.attention.CrossAttention.forward = sd_hijack_optimizations.xformers_attention_forward
ldm.modules.diffusionmodules.model.AttnBlock.forward = sd_hijack_optimizations.xformers_attnblock_forward
optimization_method = 'xformers'
+ elif cmd_opts.opt_sdp_attention and (hasattr(torch.nn.functional, "scaled_dot_product_attention") and callable(getattr(torch.nn.functional, "scaled_dot_product_attention"))):
+ print("Applying scaled dot product cross attention optimization.")
+ ldm.modules.attention.CrossAttention.forward = sd_hijack_optimizations.scaled_dot_product_attention_forward
+ optimization_method = 'sdp'
elif cmd_opts.opt_sub_quad_attention:
print("Applying sub-quadratic cross attention optimization.")
ldm.modules.attention.CrossAttention.forward = sd_hijack_optimizations.sub_quad_attention_forward
|