diff options
author | brkirch <brkirch@users.noreply.github.com> | 2022-10-11 03:55:48 +0000 |
---|---|---|
committer | AUTOMATIC1111 <16777216c@gmail.com> | 2022-10-11 14:24:00 +0000 |
commit | 98fd5cde72d5bda1620ab78416c7828fdc3dc10b (patch) | |
tree | e618b7844c85f153e7ac7798425a5ca360f372d8 /modules/sd_hijack_optimizations.py | |
parent | c0484f1b986ce7acb0e3596f6089a191279f5442 (diff) | |
download | stable-diffusion-webui-gfx803-98fd5cde72d5bda1620ab78416c7828fdc3dc10b.tar.gz stable-diffusion-webui-gfx803-98fd5cde72d5bda1620ab78416c7828fdc3dc10b.tar.bz2 stable-diffusion-webui-gfx803-98fd5cde72d5bda1620ab78416c7828fdc3dc10b.zip |
Add check for psutil
Diffstat (limited to 'modules/sd_hijack_optimizations.py')
-rw-r--r-- | modules/sd_hijack_optimizations.py | 19 |
1 files changed, 15 insertions, 4 deletions
diff --git a/modules/sd_hijack_optimizations.py b/modules/sd_hijack_optimizations.py index 870226c5..2a4ac7e0 100644 --- a/modules/sd_hijack_optimizations.py +++ b/modules/sd_hijack_optimizations.py @@ -1,7 +1,7 @@ import math
import sys
import traceback
-import psutil
+import importlib
import torch
from torch import einsum
@@ -117,9 +117,20 @@ def split_cross_attention_forward(self, x, context=None, mask=None): return self.to_out(r2)
-# -- From https://github.com/invoke-ai/InvokeAI/blob/main/ldm/modules/attention.py (with hypernetworks support added) --
-mem_total_gb = psutil.virtual_memory().total // (1 << 30)
+def check_for_psutil():
+ try:
+ spec = importlib.util.find_spec('psutil')
+ return spec is not None
+ except ModuleNotFoundError:
+ return False
+
+invokeAI_mps_available = check_for_psutil()
+
+# -- Taken from https://github.com/invoke-ai/InvokeAI --
+if invokeAI_mps_available:
+ import psutil
+ mem_total_gb = psutil.virtual_memory().total // (1 << 30)
def einsum_op_compvis(q, k, v):
s = einsum('b i d, b j d -> b i j', q, k)
@@ -193,7 +204,7 @@ def split_cross_attention_forward_invokeAI(self, x, context=None, mask=None): r = einsum_op(q, k, v)
return self.to_out(rearrange(r, '(b h) n d -> b n (h d)', h=h))
-# -- End of code from https://github.com/invoke-ai/InvokeAI/blob/main/ldm/modules/attention.py --
+# -- End of code from https://github.com/invoke-ai/InvokeAI --
def xformers_attention_forward(self, x, context=None, mask=None):
h = self.heads
|