aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorAUTOMATIC <16777216c@gmail.com>2022-10-08 13:33:39 +0000
committerAUTOMATIC <16777216c@gmail.com>2022-10-08 13:33:39 +0000
commit7ff1170a2e11b6f00f587407326db0b9f8f51adf (patch)
treecaf085b089c8f11989de4c9413a05ecf72140e7c
parent48feae37ff36915df9a3502a0a5aa1b7f146ab14 (diff)
downloadstable-diffusion-webui-gfx803-7ff1170a2e11b6f00f587407326db0b9f8f51adf.tar.gz
stable-diffusion-webui-gfx803-7ff1170a2e11b6f00f587407326db0b9f8f51adf.tar.bz2
stable-diffusion-webui-gfx803-7ff1170a2e11b6f00f587407326db0b9f8f51adf.zip
emergency fix for xformers (continue + shared)
-rw-r--r--modules/sd_hijack_optimizations.py16
1 files changed, 8 insertions, 8 deletions
diff --git a/modules/sd_hijack_optimizations.py b/modules/sd_hijack_optimizations.py
index e43e2c7a..05023b6f 100644
--- a/modules/sd_hijack_optimizations.py
+++ b/modules/sd_hijack_optimizations.py
@@ -1,19 +1,19 @@
import math
import torch
from torch import einsum
-try:
- import xformers.ops
- import functorch
- xformers._is_functorch_available = True
- shared.xformers_available = True
-except:
- print('Cannot find xformers, defaulting to split attention. Try setting --xformers in your webui-user file if you wish to install it.')
- continue
+
from ldm.util import default
from einops import rearrange
from modules import shared
+try:
+ import xformers.ops
+ import functorch
+ xformers._is_functorch_available = True
+ shared.xformers_available = True
+except Exception:
+ print('Cannot find xformers, defaulting to split attention. Try adding --xformers commandline argument to your webui-user file if you wish to install it.')
# see https://github.com/basujindal/stable-diffusion/pull/117 for discussion
def split_cross_attention_forward_v1(self, x, context=None, mask=None):