aboutsummaryrefslogtreecommitdiffstats
path: root/modules/sd_hijack_optimizations.py
diff options
context:
space:
mode:
authorFampai <unknown>2022-10-09 02:37:35 +0000
committerAUTOMATIC1111 <16777216c@gmail.com>2022-10-09 08:08:11 +0000
commit122d42687b97ec4df4c2a8c335d2de385cd1f1a1 (patch)
treec8d496050b6f8d75f3696a98a5cfa86377f3f365 /modules/sd_hijack_optimizations.py
parente00b4df7c6f0a13941d6f6ea425eebdaa2bc9318 (diff)
downloadstable-diffusion-webui-gfx803-122d42687b97ec4df4c2a8c335d2de385cd1f1a1.tar.gz
stable-diffusion-webui-gfx803-122d42687b97ec4df4c2a8c335d2de385cd1f1a1.tar.bz2
stable-diffusion-webui-gfx803-122d42687b97ec4df4c2a8c335d2de385cd1f1a1.zip
Fix VRAM Issue by only loading in hypernetwork when selected in settings
Diffstat (limited to 'modules/sd_hijack_optimizations.py')
-rw-r--r--modules/sd_hijack_optimizations.py6
1 files changed, 3 insertions, 3 deletions
diff --git a/modules/sd_hijack_optimizations.py b/modules/sd_hijack_optimizations.py
index c4396bb9..634fb4b2 100644
--- a/modules/sd_hijack_optimizations.py
+++ b/modules/sd_hijack_optimizations.py
@@ -28,7 +28,7 @@ def split_cross_attention_forward_v1(self, x, context=None, mask=None):
q_in = self.to_q(x)
context = default(context, x)
- hypernetwork = shared.selected_hypernetwork()
+ hypernetwork = shared.loaded_hypernetwork
hypernetwork_layers = (hypernetwork.layers if hypernetwork is not None else {}).get(context.shape[2], None)
if hypernetwork_layers is not None:
@@ -68,7 +68,7 @@ def split_cross_attention_forward(self, x, context=None, mask=None):
q_in = self.to_q(x)
context = default(context, x)
- hypernetwork = shared.selected_hypernetwork()
+ hypernetwork = shared.loaded_hypernetwork
hypernetwork_layers = (hypernetwork.layers if hypernetwork is not None else {}).get(context.shape[2], None)
if hypernetwork_layers is not None:
@@ -132,7 +132,7 @@ def xformers_attention_forward(self, x, context=None, mask=None):
h = self.heads
q_in = self.to_q(x)
context = default(context, x)
- hypernetwork = shared.selected_hypernetwork()
+ hypernetwork = shared.loaded_hypernetwork
hypernetwork_layers = (hypernetwork.layers if hypernetwork is not None else {}).get(context.shape[2], None)
if hypernetwork_layers is not None:
k_in = self.to_k(hypernetwork_layers[0](context))