diff options
author | ssysm <me@theeditorstudio.com> | 2022-10-10 03:20:39 +0000 |
---|---|---|
committer | ssysm <me@theeditorstudio.com> | 2022-10-10 03:20:39 +0000 |
commit | 6fdad291bd5a5edacedec73cf4d0e3852d00300e (patch) | |
tree | 5901473f27ea0410728150a1a6f2da373ea949b1 /modules/hypernetwork.py | |
parent | cc92dc1f8d73dd4d574c4c8ccab78b7fc61e440b (diff) | |
parent | 45fbd1c5fec887988ab555aac75a999d4f3aff40 (diff) | |
download | stable-diffusion-webui-gfx803-6fdad291bd5a5edacedec73cf4d0e3852d00300e.tar.gz stable-diffusion-webui-gfx803-6fdad291bd5a5edacedec73cf4d0e3852d00300e.tar.bz2 stable-diffusion-webui-gfx803-6fdad291bd5a5edacedec73cf4d0e3852d00300e.zip |
Merge branch 'master' of https://github.com/AUTOMATIC1111/stable-diffusion-webui into upstream-master
Diffstat (limited to 'modules/hypernetwork.py')
-rw-r--r-- | modules/hypernetwork.py | 24 |
1 files changed, 17 insertions, 7 deletions
diff --git a/modules/hypernetwork.py b/modules/hypernetwork.py index 7f062242..498bc9d8 100644 --- a/modules/hypernetwork.py +++ b/modules/hypernetwork.py @@ -40,18 +40,28 @@ class Hypernetwork: self.layers[size] = (HypernetworkModule(size, sd[0]), HypernetworkModule(size, sd[1]))
-def load_hypernetworks(path):
+def list_hypernetworks(path):
res = {}
-
for filename in glob.iglob(os.path.join(path, '**/*.pt'), recursive=True):
+ name = os.path.splitext(os.path.basename(filename))[0]
+ res[name] = filename
+ return res
+
+
+def load_hypernetwork(filename):
+ path = shared.hypernetworks.get(filename, None)
+ if path is not None:
+ print(f"Loading hypernetwork {filename}")
try:
- hn = Hypernetwork(filename)
- res[hn.name] = hn
+ shared.loaded_hypernetwork = Hypernetwork(path)
except Exception:
- print(f"Error loading hypernetwork {filename}", file=sys.stderr)
+ print(f"Error loading hypernetwork {path}", file=sys.stderr)
print(traceback.format_exc(), file=sys.stderr)
+ else:
+ if shared.loaded_hypernetwork is not None:
+ print(f"Unloading hypernetwork")
- return res
+ shared.loaded_hypernetwork = None
def attention_CrossAttention_forward(self, x, context=None, mask=None):
@@ -60,7 +70,7 @@ def attention_CrossAttention_forward(self, x, context=None, mask=None): q = self.to_q(x)
context = default(context, x)
- hypernetwork = shared.selected_hypernetwork()
+ hypernetwork = shared.loaded_hypernetwork
hypernetwork_layers = (hypernetwork.layers if hypernetwork is not None else {}).get(context.shape[2], None)
if hypernetwork_layers is not None:
|