From 3ee12386307bbedb51265028e2e9af246094a12c Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Thu, 1 Jun 2023 08:12:06 +0300 Subject: revert default cross attention optimization to Doggettx make --disable-opt-split-attention command line option work again --- modules/sd_hijack_optimizations.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'modules/sd_hijack_optimizations.py') diff --git a/modules/sd_hijack_optimizations.py b/modules/sd_hijack_optimizations.py index 2ec0b049..80e48a42 100644 --- a/modules/sd_hijack_optimizations.py +++ b/modules/sd_hijack_optimizations.py @@ -59,7 +59,7 @@ class SdOptimizationSdpNoMem(SdOptimization): name = "sdp-no-mem" label = "scaled dot product without memory efficient attention" cmd_opt = "opt_sdp_no_mem_attention" - priority = 90 + priority = 80 def is_available(self): return hasattr(torch.nn.functional, "scaled_dot_product_attention") and callable(torch.nn.functional.scaled_dot_product_attention) @@ -73,7 +73,7 @@ class SdOptimizationSdp(SdOptimizationSdpNoMem): name = "sdp" label = "scaled dot product" cmd_opt = "opt_sdp_attention" - priority = 80 + priority = 70 def apply(self): ldm.modules.attention.CrossAttention.forward = scaled_dot_product_attention_forward @@ -116,7 +116,7 @@ class SdOptimizationInvokeAI(SdOptimization): class SdOptimizationDoggettx(SdOptimization): name = "Doggettx" cmd_opt = "opt_split_attention" - priority = 20 + priority = 90 def apply(self): ldm.modules.attention.CrossAttention.forward = split_cross_attention_forward -- cgit v1.2.3