diff options
author | AUTOMATIC <16777216c@gmail.com> | 2023-06-27 05:38:14 +0000 |
---|---|---|
committer | AUTOMATIC <16777216c@gmail.com> | 2023-06-27 05:38:14 +0000 |
commit | 394ffa7b0a7fff3ec484bcd084e673a8b301ccc8 (patch) | |
tree | b0e9b9d93f90b5d50084292a48578bd4f9a83ec6 /modules/sd_hijack_optimizations.py | |
parent | baf6946e06249c5af9851c60171692c44ef633e0 (diff) | |
parent | dbc88c96450793b08b520f3b86cd46d6aeaaae52 (diff) | |
download | stable-diffusion-webui-gfx803-394ffa7b0a7fff3ec484bcd084e673a8b301ccc8.tar.gz stable-diffusion-webui-gfx803-394ffa7b0a7fff3ec484bcd084e673a8b301ccc8.tar.bz2 stable-diffusion-webui-gfx803-394ffa7b0a7fff3ec484bcd084e673a8b301ccc8.zip |
Merge branch 'release_candidate'
Diffstat (limited to 'modules/sd_hijack_optimizations.py')
-rw-r--r-- | modules/sd_hijack_optimizations.py | 9 |
1 files changed, 3 insertions, 6 deletions
diff --git a/modules/sd_hijack_optimizations.py b/modules/sd_hijack_optimizations.py index 80e48a42..53e27ade 100644 --- a/modules/sd_hijack_optimizations.py +++ b/modules/sd_hijack_optimizations.py @@ -1,7 +1,5 @@ from __future__ import annotations
import math
-import sys
-import traceback
import psutil
import torch
@@ -48,7 +46,7 @@ class SdOptimizationXformers(SdOptimization): priority = 100
def is_available(self):
- return shared.cmd_opts.force_enable_xformers or (shared.xformers_available and torch.version.cuda and (6, 0) <= torch.cuda.get_device_capability(shared.device) <= (9, 0))
+ return shared.cmd_opts.force_enable_xformers or (shared.xformers_available and torch.cuda.is_available() and (6, 0) <= torch.cuda.get_device_capability(shared.device) <= (9, 0))
def apply(self):
ldm.modules.attention.CrossAttention.forward = xformers_attention_forward
@@ -140,8 +138,7 @@ if shared.cmd_opts.xformers or shared.cmd_opts.force_enable_xformers: import xformers.ops
shared.xformers_available = True
except Exception:
- print("Cannot import xformers", file=sys.stderr)
- print(traceback.format_exc(), file=sys.stderr)
+ errors.report("Cannot import xformers", exc_info=True)
def get_available_vram():
@@ -605,7 +602,7 @@ def sdp_attnblock_forward(self, x): q, k, v = (rearrange(t, 'b c h w -> b (h w) c') for t in (q, k, v))
dtype = q.dtype
if shared.opts.upcast_attn:
- q, k = q.float(), k.float()
+ q, k, v = q.float(), k.float(), v.float()
q = q.contiguous()
k = k.contiguous()
v = v.contiguous()
|