aboutsummaryrefslogtreecommitdiffstats
path: root/modules/hypernetwork.py
diff options
context:
space:
mode:
authorFampai <unknown>2022-10-09 02:37:35 +0000
committerAUTOMATIC1111 <16777216c@gmail.com>2022-10-09 08:08:11 +0000
commit122d42687b97ec4df4c2a8c335d2de385cd1f1a1 (patch)
treec8d496050b6f8d75f3696a98a5cfa86377f3f365 /modules/hypernetwork.py
parente00b4df7c6f0a13941d6f6ea425eebdaa2bc9318 (diff)
downloadstable-diffusion-webui-gfx803-122d42687b97ec4df4c2a8c335d2de385cd1f1a1.tar.gz
stable-diffusion-webui-gfx803-122d42687b97ec4df4c2a8c335d2de385cd1f1a1.tar.bz2
stable-diffusion-webui-gfx803-122d42687b97ec4df4c2a8c335d2de385cd1f1a1.zip
Fix VRAM Issue by only loading in hypernetwork when selected in settings
Diffstat (limited to 'modules/hypernetwork.py')
-rw-r--r--modules/hypernetwork.py23
1 files changed, 15 insertions, 8 deletions
diff --git a/modules/hypernetwork.py b/modules/hypernetwork.py
index 7f062242..19f1c227 100644
--- a/modules/hypernetwork.py
+++ b/modules/hypernetwork.py
@@ -40,18 +40,25 @@ class Hypernetwork:
self.layers[size] = (HypernetworkModule(size, sd[0]), HypernetworkModule(size, sd[1]))
-def load_hypernetworks(path):
+def list_hypernetworks(path):
res = {}
-
for filename in glob.iglob(os.path.join(path, '**/*.pt'), recursive=True):
+ name = os.path.splitext(os.path.basename(filename))[0]
+ res[name] = filename
+ return res
+
+
+def load_hypernetwork(filename):
+ print(f"Loading hypernetwork {filename}")
+ path = shared.hypernetworks.get(filename, None)
+ if (path is not None):
try:
- hn = Hypernetwork(filename)
- res[hn.name] = hn
+ shared.loaded_hypernetwork = Hypernetwork(path)
except Exception:
- print(f"Error loading hypernetwork {filename}", file=sys.stderr)
+ print(f"Error loading hypernetwork {path}", file=sys.stderr)
print(traceback.format_exc(), file=sys.stderr)
-
- return res
+ else:
+ shared.loaded_hypernetwork = None
def attention_CrossAttention_forward(self, x, context=None, mask=None):
@@ -60,7 +67,7 @@ def attention_CrossAttention_forward(self, x, context=None, mask=None):
q = self.to_q(x)
context = default(context, x)
- hypernetwork = shared.selected_hypernetwork()
+ hypernetwork = shared.loaded_hypernetwork
hypernetwork_layers = (hypernetwork.layers if hypernetwork is not None else {}).get(context.shape[2], None)
if hypernetwork_layers is not None: