diff options
author | brkirch <brkirch@users.noreply.github.com> | 2023-01-05 04:10:31 +0000 |
---|---|---|
committer | brkirch <brkirch@users.noreply.github.com> | 2023-01-06 05:15:22 +0000 |
commit | 3bfe2bb5498241c4873cdd71b3f0a5bac5f64d7f (patch) | |
tree | c296183ec50286cf3552add8c1c967c2f843ec47 /modules/sd_hijack.py | |
parent | f6ab5a39d762a7791573d1c52ae5a3024b10e8ed (diff) | |
parent | 5f4fa942b8ec3ed3b15a352903489d6f9e6eb46e (diff) | |
download | stable-diffusion-webui-gfx803-3bfe2bb5498241c4873cdd71b3f0a5bac5f64d7f.tar.gz stable-diffusion-webui-gfx803-3bfe2bb5498241c4873cdd71b3f0a5bac5f64d7f.tar.bz2 stable-diffusion-webui-gfx803-3bfe2bb5498241c4873cdd71b3f0a5bac5f64d7f.zip |
Merge remote-tracking branch 'upstream/master' into sub-quad_attn_opt
Diffstat (limited to 'modules/sd_hijack.py')
-rw-r--r-- | modules/sd_hijack.py | 12 |
1 files changed, 11 insertions, 1 deletions
diff --git a/modules/sd_hijack.py b/modules/sd_hijack.py index 55a684cc..ef25dadb 100644 --- a/modules/sd_hijack.py +++ b/modules/sd_hijack.py @@ -33,25 +33,34 @@ def apply_optimizations(): ldm.modules.diffusionmodules.model.nonlinearity = silu
ldm.modules.diffusionmodules.openaimodel.th = sd_hijack_unet.th
+
+ optimization_method = None
if cmd_opts.force_enable_xformers or (cmd_opts.xformers and shared.xformers_available and torch.version.cuda and (6, 0) <= torch.cuda.get_device_capability(shared.device) <= (9, 0)):
print("Applying xformers cross attention optimization.")
ldm.modules.attention.CrossAttention.forward = sd_hijack_optimizations.xformers_attention_forward
ldm.modules.diffusionmodules.model.AttnBlock.forward = sd_hijack_optimizations.xformers_attnblock_forward
+ optimization_method = 'xformers'
elif cmd_opts.opt_sub_quad_attention:
print("Applying sub-quadratic cross attention optimization.")
ldm.modules.attention.CrossAttention.forward = sd_hijack_optimizations.sub_quad_attention_forward
ldm.modules.diffusionmodules.model.AttnBlock.forward = sd_hijack_optimizations.sub_quad_attnblock_forward
+ optimization_method = 'sub-quadratic'
elif cmd_opts.opt_split_attention_v1:
print("Applying v1 cross attention optimization.")
ldm.modules.attention.CrossAttention.forward = sd_hijack_optimizations.split_cross_attention_forward_v1
+ optimization_method = 'V1'
elif not cmd_opts.disable_opt_split_attention and (cmd_opts.opt_split_attention_invokeai or not torch.cuda.is_available()):
print("Applying cross attention optimization (InvokeAI).")
ldm.modules.attention.CrossAttention.forward = sd_hijack_optimizations.split_cross_attention_forward_invokeAI
+ optimization_method = 'InvokeAI'
elif not cmd_opts.disable_opt_split_attention and (cmd_opts.opt_split_attention or torch.cuda.is_available()):
print("Applying cross attention optimization (Doggettx).")
ldm.modules.attention.CrossAttention.forward = sd_hijack_optimizations.split_cross_attention_forward
ldm.modules.diffusionmodules.model.AttnBlock.forward = sd_hijack_optimizations.cross_attention_attnblock_forward
+ optimization_method = 'Doggettx'
+
+ return optimization_method
def undo_optimizations():
@@ -72,6 +81,7 @@ class StableDiffusionModelHijack: layers = None
circular_enabled = False
clip = None
+ optimization_method = None
embedding_db = modules.textual_inversion.textual_inversion.EmbeddingDatabase(cmd_opts.embeddings_dir)
@@ -91,7 +101,7 @@ class StableDiffusionModelHijack: m.cond_stage_model.model.token_embedding = EmbeddingsWithFixes(m.cond_stage_model.model.token_embedding, self)
m.cond_stage_model = sd_hijack_open_clip.FrozenOpenCLIPEmbedderWithCustomWords(m.cond_stage_model, self)
- apply_optimizations()
+ self.optimization_method = apply_optimizations()
self.clip = m.cond_stage_model
|