aboutsummaryrefslogtreecommitdiffstats
path: root/modules/sd_hijack.py
diff options
context:
space:
mode:
authorPam <pamhome21@gmail.com>2023-03-06 19:33:13 +0000
committerPam <pamhome21@gmail.com>2023-03-06 19:33:13 +0000
commitfec0a895119a124a295e3dad5205de5766031dc7 (patch)
tree000a8ea99831b164435454761d1e24830317bc89 /modules/sd_hijack.py
parent0cc0ee1bcb4c24a8c9715f66cede06601bfc00c8 (diff)
downloadstable-diffusion-webui-gfx803-fec0a895119a124a295e3dad5205de5766031dc7.tar.gz
stable-diffusion-webui-gfx803-fec0a895119a124a295e3dad5205de5766031dc7.tar.bz2
stable-diffusion-webui-gfx803-fec0a895119a124a295e3dad5205de5766031dc7.zip
scaled dot product attention
Diffstat (limited to 'modules/sd_hijack.py')
-rw-r--r--modules/sd_hijack.py4
1 files changed, 4 insertions, 0 deletions
diff --git a/modules/sd_hijack.py b/modules/sd_hijack.py
index 79476783..76cb9120 100644
--- a/modules/sd_hijack.py
+++ b/modules/sd_hijack.py
@@ -42,6 +42,10 @@ def apply_optimizations():
ldm.modules.attention.CrossAttention.forward = sd_hijack_optimizations.xformers_attention_forward
ldm.modules.diffusionmodules.model.AttnBlock.forward = sd_hijack_optimizations.xformers_attnblock_forward
optimization_method = 'xformers'
+ elif cmd_opts.opt_sdp_attention and (hasattr(torch.nn.functional, "scaled_dot_product_attention") and callable(getattr(torch.nn.functional, "scaled_dot_product_attention"))):
+ print("Applying scaled dot product cross attention optimization.")
+ ldm.modules.attention.CrossAttention.forward = sd_hijack_optimizations.scaled_dot_product_attention_forward
+ optimization_method = 'sdp'
elif cmd_opts.opt_sub_quad_attention:
print("Applying sub-quadratic cross attention optimization.")
ldm.modules.attention.CrossAttention.forward = sd_hijack_optimizations.sub_quad_attention_forward