diff options
author | AUTOMATIC1111 <16777216c@gmail.com> | 2023-06-27 06:05:53 +0000 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-06-27 06:05:53 +0000 |
commit | 4147fd6b2f905f76c6bc20c3d9de2ea0842fa853 (patch) | |
tree | e7915af4d068912cd8509f1638e05460445a5eea /modules/sd_hijack_optimizations.py | |
parent | f603275d84301b5ee952683e951dd1aad72ba615 (diff) | |
parent | bedcd2f377a38ef4da58c11dbe222d32b954be2f (diff) | |
download | stable-diffusion-webui-gfx803-4147fd6b2f905f76c6bc20c3d9de2ea0842fa853.tar.gz stable-diffusion-webui-gfx803-4147fd6b2f905f76c6bc20c3d9de2ea0842fa853.tar.bz2 stable-diffusion-webui-gfx803-4147fd6b2f905f76c6bc20c3d9de2ea0842fa853.zip |
Merge branch 'dev' into 10141-gradio-user-exif
Diffstat (limited to 'modules/sd_hijack_optimizations.py')
-rw-r--r-- | modules/sd_hijack_optimizations.py | 9 |
1 files changed, 3 insertions, 6 deletions
diff --git a/modules/sd_hijack_optimizations.py b/modules/sd_hijack_optimizations.py index 80e48a42..53e27ade 100644 --- a/modules/sd_hijack_optimizations.py +++ b/modules/sd_hijack_optimizations.py @@ -1,7 +1,5 @@ from __future__ import annotations
import math
-import sys
-import traceback
import psutil
import torch
@@ -48,7 +46,7 @@ class SdOptimizationXformers(SdOptimization): priority = 100
def is_available(self):
- return shared.cmd_opts.force_enable_xformers or (shared.xformers_available and torch.version.cuda and (6, 0) <= torch.cuda.get_device_capability(shared.device) <= (9, 0))
+ return shared.cmd_opts.force_enable_xformers or (shared.xformers_available and torch.cuda.is_available() and (6, 0) <= torch.cuda.get_device_capability(shared.device) <= (9, 0))
def apply(self):
ldm.modules.attention.CrossAttention.forward = xformers_attention_forward
@@ -140,8 +138,7 @@ if shared.cmd_opts.xformers or shared.cmd_opts.force_enable_xformers: import xformers.ops
shared.xformers_available = True
except Exception:
- print("Cannot import xformers", file=sys.stderr)
- print(traceback.format_exc(), file=sys.stderr)
+ errors.report("Cannot import xformers", exc_info=True)
def get_available_vram():
@@ -605,7 +602,7 @@ def sdp_attnblock_forward(self, x): q, k, v = (rearrange(t, 'b c h w -> b (h w) c') for t in (q, k, v))
dtype = q.dtype
if shared.opts.upcast_attn:
- q, k = q.float(), k.float()
+ q, k, v = q.float(), k.float(), v.float()
q = q.contiguous()
k = k.contiguous()
v = v.contiguous()
|