diff options
author | Faber <faber8164@gmail.com> | 2023-01-05 20:38:37 +0000 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-01-05 20:38:37 +0000 |
commit | 81133d4168ae0bae9bf8bf1a1d4983319a589112 (patch) | |
tree | 12f6e58192c1dd7f21d1cfd41c88a792290ac2f0 /modules/textual_inversion/textual_inversion.py | |
parent | 310b71f669e4f2cea11b023c47f7ffedd82ab464 (diff) | |
download | stable-diffusion-webui-gfx803-81133d4168ae0bae9bf8bf1a1d4983319a589112.tar.gz stable-diffusion-webui-gfx803-81133d4168ae0bae9bf8bf1a1d4983319a589112.tar.bz2 stable-diffusion-webui-gfx803-81133d4168ae0bae9bf8bf1a1d4983319a589112.zip |
allow loading embeddings from subdirectories
Diffstat (limited to 'modules/textual_inversion/textual_inversion.py')
-rw-r--r-- | modules/textual_inversion/textual_inversion.py | 23 |
1 files changed, 12 insertions, 11 deletions
diff --git a/modules/textual_inversion/textual_inversion.py b/modules/textual_inversion/textual_inversion.py index 24b43045..0a059044 100644 --- a/modules/textual_inversion/textual_inversion.py +++ b/modules/textual_inversion/textual_inversion.py @@ -149,19 +149,20 @@ class EmbeddingDatabase: else:
self.skipped_embeddings[name] = embedding
- for fn in os.listdir(self.embeddings_dir):
- try:
- fullfn = os.path.join(self.embeddings_dir, fn)
-
- if os.stat(fullfn).st_size == 0:
+ for root, dirs, fns in os.walk(self.embeddings_dir):
+ for fn in fns:
+ try:
+ fullfn = os.path.join(root, fn)
+
+ if os.stat(fullfn).st_size == 0:
+ continue
+
+ process_file(fullfn, fn)
+ except Exception:
+ print(f"Error loading embedding {fn}:", file=sys.stderr)
+ print(traceback.format_exc(), file=sys.stderr)
continue
- process_file(fullfn, fn)
- except Exception:
- print(f"Error loading embedding {fn}:", file=sys.stderr)
- print(traceback.format_exc(), file=sys.stderr)
- continue
-
print(f"Textual inversion embeddings loaded({len(self.word_embeddings)}): {', '.join(self.word_embeddings.keys())}")
if len(self.skipped_embeddings) > 0:
print(f"Textual inversion embeddings skipped({len(self.skipped_embeddings)}): {', '.join(self.skipped_embeddings.keys())}")
|