diff options
author | DepFA <35278260+dfaker@users.noreply.github.com> | 2022-10-09 23:38:54 +0000 |
---|---|---|
committer | GitHub <noreply@github.com> | 2022-10-09 23:38:54 +0000 |
commit | 4117afff11c7b0a2162c73ea02be8cfa30d02640 (patch) | |
tree | af26f1b0c9eac8c024d2e51ec8fb5ca4a4d45ed3 /modules/hypernetwork.py | |
parent | e2c2925eb4d634b186de2c76798162ec56e2f869 (diff) | |
parent | 45fbd1c5fec887988ab555aac75a999d4f3aff40 (diff) | |
download | stable-diffusion-webui-gfx803-4117afff11c7b0a2162c73ea02be8cfa30d02640.tar.gz stable-diffusion-webui-gfx803-4117afff11c7b0a2162c73ea02be8cfa30d02640.tar.bz2 stable-diffusion-webui-gfx803-4117afff11c7b0a2162c73ea02be8cfa30d02640.zip |
Merge branch 'master' into embed-embeddings-in-images
Diffstat (limited to 'modules/hypernetwork.py')
-rw-r--r-- | modules/hypernetwork.py | 24 |
1 files changed, 17 insertions, 7 deletions
diff --git a/modules/hypernetwork.py b/modules/hypernetwork.py index 7f062242..498bc9d8 100644 --- a/modules/hypernetwork.py +++ b/modules/hypernetwork.py @@ -40,18 +40,28 @@ class Hypernetwork: self.layers[size] = (HypernetworkModule(size, sd[0]), HypernetworkModule(size, sd[1]))
-def load_hypernetworks(path):
+def list_hypernetworks(path):
res = {}
-
for filename in glob.iglob(os.path.join(path, '**/*.pt'), recursive=True):
+ name = os.path.splitext(os.path.basename(filename))[0]
+ res[name] = filename
+ return res
+
+
+def load_hypernetwork(filename):
+ path = shared.hypernetworks.get(filename, None)
+ if path is not None:
+ print(f"Loading hypernetwork {filename}")
try:
- hn = Hypernetwork(filename)
- res[hn.name] = hn
+ shared.loaded_hypernetwork = Hypernetwork(path)
except Exception:
- print(f"Error loading hypernetwork {filename}", file=sys.stderr)
+ print(f"Error loading hypernetwork {path}", file=sys.stderr)
print(traceback.format_exc(), file=sys.stderr)
+ else:
+ if shared.loaded_hypernetwork is not None:
+ print(f"Unloading hypernetwork")
- return res
+ shared.loaded_hypernetwork = None
def attention_CrossAttention_forward(self, x, context=None, mask=None):
@@ -60,7 +70,7 @@ def attention_CrossAttention_forward(self, x, context=None, mask=None): q = self.to_q(x)
context = default(context, x)
- hypernetwork = shared.selected_hypernetwork()
+ hypernetwork = shared.loaded_hypernetwork
hypernetwork_layers = (hypernetwork.layers if hypernetwork is not None else {}).get(context.shape[2], None)
if hypernetwork_layers is not None:
|