diff options
author | AUTOMATIC1111 <16777216c@gmail.com> | 2023-07-08 12:10:10 +0000 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-07-08 12:10:10 +0000 |
commit | ec9bbda3da846b4aa2f03b1e0f0952ffbc61f4f6 (patch) | |
tree | 5ae676b6ac7242ad082ab7421cf8ae9d50d52818 /modules/sd_hijack_optimizations.py | |
parent | c4c63dd5e4760c56405cef2e71abc5c3604c4578 (diff) | |
parent | 18256c5f0174126cb103afece2b39b6b831e034a (diff) | |
download | stable-diffusion-webui-gfx803-ec9bbda3da846b4aa2f03b1e0f0952ffbc61f4f6.tar.gz stable-diffusion-webui-gfx803-ec9bbda3da846b4aa2f03b1e0f0952ffbc61f4f6.tar.bz2 stable-diffusion-webui-gfx803-ec9bbda3da846b4aa2f03b1e0f0952ffbc61f4f6.zip |
Merge branch 'dev' into img2img-batch-png-info
Diffstat (limited to 'modules/sd_hijack_optimizations.py')
-rw-r--r-- | modules/sd_hijack_optimizations.py | 9 |
1 files changed, 3 insertions, 6 deletions
diff --git a/modules/sd_hijack_optimizations.py b/modules/sd_hijack_optimizations.py index 80e48a42..53e27ade 100644 --- a/modules/sd_hijack_optimizations.py +++ b/modules/sd_hijack_optimizations.py @@ -1,7 +1,5 @@ from __future__ import annotations
import math
-import sys
-import traceback
import psutil
import torch
@@ -48,7 +46,7 @@ class SdOptimizationXformers(SdOptimization): priority = 100
def is_available(self):
- return shared.cmd_opts.force_enable_xformers or (shared.xformers_available and torch.version.cuda and (6, 0) <= torch.cuda.get_device_capability(shared.device) <= (9, 0))
+ return shared.cmd_opts.force_enable_xformers or (shared.xformers_available and torch.cuda.is_available() and (6, 0) <= torch.cuda.get_device_capability(shared.device) <= (9, 0))
def apply(self):
ldm.modules.attention.CrossAttention.forward = xformers_attention_forward
@@ -140,8 +138,7 @@ if shared.cmd_opts.xformers or shared.cmd_opts.force_enable_xformers: import xformers.ops
shared.xformers_available = True
except Exception:
- print("Cannot import xformers", file=sys.stderr)
- print(traceback.format_exc(), file=sys.stderr)
+ errors.report("Cannot import xformers", exc_info=True)
def get_available_vram():
@@ -605,7 +602,7 @@ def sdp_attnblock_forward(self, x): q, k, v = (rearrange(t, 'b c h w -> b (h w) c') for t in (q, k, v))
dtype = q.dtype
if shared.opts.upcast_attn:
- q, k = q.float(), k.float()
+ q, k, v = q.float(), k.float(), v.float()
q = q.contiguous()
k = k.contiguous()
v = v.contiguous()
|