diff options
Diffstat (limited to 'modules/sd_hijack_optimizations.py')
-rw-r--r-- | modules/sd_hijack_optimizations.py | 11 |
1 files changed, 4 insertions, 7 deletions
diff --git a/modules/sd_hijack_optimizations.py b/modules/sd_hijack_optimizations.py index 2ec0b049..b41aa419 100644 --- a/modules/sd_hijack_optimizations.py +++ b/modules/sd_hijack_optimizations.py @@ -1,7 +1,5 @@ from __future__ import annotations
import math
-import sys
-import traceback
import psutil
import torch
@@ -59,7 +57,7 @@ class SdOptimizationSdpNoMem(SdOptimization): name = "sdp-no-mem"
label = "scaled dot product without memory efficient attention"
cmd_opt = "opt_sdp_no_mem_attention"
- priority = 90
+ priority = 80
def is_available(self):
return hasattr(torch.nn.functional, "scaled_dot_product_attention") and callable(torch.nn.functional.scaled_dot_product_attention)
@@ -73,7 +71,7 @@ class SdOptimizationSdp(SdOptimizationSdpNoMem): name = "sdp"
label = "scaled dot product"
cmd_opt = "opt_sdp_attention"
- priority = 80
+ priority = 70
def apply(self):
ldm.modules.attention.CrossAttention.forward = scaled_dot_product_attention_forward
@@ -116,7 +114,7 @@ class SdOptimizationInvokeAI(SdOptimization): class SdOptimizationDoggettx(SdOptimization):
name = "Doggettx"
cmd_opt = "opt_split_attention"
- priority = 20
+ priority = 90
def apply(self):
ldm.modules.attention.CrossAttention.forward = split_cross_attention_forward
@@ -140,8 +138,7 @@ if shared.cmd_opts.xformers or shared.cmd_opts.force_enable_xformers: import xformers.ops
shared.xformers_available = True
except Exception:
- print("Cannot import xformers", file=sys.stderr)
- print(traceback.format_exc(), file=sys.stderr)
+ errors.report("Cannot import xformers", exc_info=True)
def get_available_vram():
|