diff options
author | AUTOMATIC <16777216c@gmail.com> | 2022-09-21 06:49:02 +0000 |
---|---|---|
committer | AUTOMATIC <16777216c@gmail.com> | 2022-09-21 06:49:02 +0000 |
commit | 254da5d127d1ac42803fb5db61857bfb5005b655 (patch) | |
tree | 485cb0b49ac7bee13d37315d587f3844a9667bd7 /modules/sd_hijack.py | |
parent | 6785fabefb57e95f563527ce6171ec4fa9ea6b7d (diff) | |
download | stable-diffusion-webui-gfx803-254da5d127d1ac42803fb5db61857bfb5005b655.tar.gz stable-diffusion-webui-gfx803-254da5d127d1ac42803fb5db61857bfb5005b655.tar.bz2 stable-diffusion-webui-gfx803-254da5d127d1ac42803fb5db61857bfb5005b655.zip |
--opt-split-attention now on by default for torch.cuda, off for others (cpu and MPS; because the option does not work there according to reports)
Diffstat (limited to 'modules/sd_hijack.py')
-rw-r--r-- | modules/sd_hijack.py | 2 |
1 files changed, 1 insertions, 1 deletions
diff --git a/modules/sd_hijack.py b/modules/sd_hijack.py index bd1a0936..62ba9101 100644 --- a/modules/sd_hijack.py +++ b/modules/sd_hijack.py @@ -245,7 +245,7 @@ class StableDiffusionModelHijack: if cmd_opts.opt_split_attention_v1:
ldm.modules.attention.CrossAttention.forward = split_cross_attention_forward_v1
- elif not cmd_opts.disable_opt_split_attention:
+ elif not cmd_opts.disable_opt_split_attention and (cmd_opts.opt_split_attention or torch.cuda.is_available()):
ldm.modules.attention.CrossAttention.forward = split_cross_attention_forward
ldm.modules.diffusionmodules.model.nonlinearity = nonlinearity_hijack
ldm.modules.diffusionmodules.model.AttnBlock.forward = cross_attention_attnblock_forward
|