diff options
author | Fampai <unknown> | 2022-10-09 02:37:35 +0000 |
---|---|---|
committer | AUTOMATIC1111 <16777216c@gmail.com> | 2022-10-09 08:08:11 +0000 |
commit | 122d42687b97ec4df4c2a8c335d2de385cd1f1a1 (patch) | |
tree | c8d496050b6f8d75f3696a98a5cfa86377f3f365 /modules/sd_hijack_optimizations.py | |
parent | e00b4df7c6f0a13941d6f6ea425eebdaa2bc9318 (diff) | |
download | stable-diffusion-webui-gfx803-122d42687b97ec4df4c2a8c335d2de385cd1f1a1.tar.gz stable-diffusion-webui-gfx803-122d42687b97ec4df4c2a8c335d2de385cd1f1a1.tar.bz2 stable-diffusion-webui-gfx803-122d42687b97ec4df4c2a8c335d2de385cd1f1a1.zip |
Fix VRAM Issue by only loading in hypernetwork when selected in settings
Diffstat (limited to 'modules/sd_hijack_optimizations.py')
-rw-r--r-- | modules/sd_hijack_optimizations.py | 6 |
1 files changed, 3 insertions, 3 deletions
diff --git a/modules/sd_hijack_optimizations.py b/modules/sd_hijack_optimizations.py index c4396bb9..634fb4b2 100644 --- a/modules/sd_hijack_optimizations.py +++ b/modules/sd_hijack_optimizations.py @@ -28,7 +28,7 @@ def split_cross_attention_forward_v1(self, x, context=None, mask=None): q_in = self.to_q(x)
context = default(context, x)
- hypernetwork = shared.selected_hypernetwork()
+ hypernetwork = shared.loaded_hypernetwork
hypernetwork_layers = (hypernetwork.layers if hypernetwork is not None else {}).get(context.shape[2], None)
if hypernetwork_layers is not None:
@@ -68,7 +68,7 @@ def split_cross_attention_forward(self, x, context=None, mask=None): q_in = self.to_q(x)
context = default(context, x)
- hypernetwork = shared.selected_hypernetwork()
+ hypernetwork = shared.loaded_hypernetwork
hypernetwork_layers = (hypernetwork.layers if hypernetwork is not None else {}).get(context.shape[2], None)
if hypernetwork_layers is not None:
@@ -132,7 +132,7 @@ def xformers_attention_forward(self, x, context=None, mask=None): h = self.heads
q_in = self.to_q(x)
context = default(context, x)
- hypernetwork = shared.selected_hypernetwork()
+ hypernetwork = shared.loaded_hypernetwork
hypernetwork_layers = (hypernetwork.layers if hypernetwork is not None else {}).get(context.shape[2], None)
if hypernetwork_layers is not None:
k_in = self.to_k(hypernetwork_layers[0](context))
|