aboutsummaryrefslogtreecommitdiffstats
path: root/modules/textual_inversion
diff options
context:
space:
mode:
authorYevhenii Hurin <evgeny.gurin@gmail.com>2023-01-29 13:47:14 +0000
committerYevhenii Hurin <evgeny.gurin@gmail.com>2023-01-29 13:47:14 +0000
commitc46f3ad98b1171d7ad42bf1653e6770b52147bfd (patch)
tree3eb596481afd559bf94a8c696b93d3f22b0e65c8 /modules/textual_inversion
parent7c53f81caf817a7e7dc9c2fafebfcce269ecb1d7 (diff)
parent00dab8f10defbbda579a1bc89c8d4e972c58a20d (diff)
downloadstable-diffusion-webui-gfx803-c46f3ad98b1171d7ad42bf1653e6770b52147bfd.tar.gz
stable-diffusion-webui-gfx803-c46f3ad98b1171d7ad42bf1653e6770b52147bfd.tar.bz2
stable-diffusion-webui-gfx803-c46f3ad98b1171d7ad42bf1653e6770b52147bfd.zip
Merge branch 'master' of https://github.com/AUTOMATIC1111/stable-diffusion-webui
Diffstat (limited to 'modules/textual_inversion')
-rw-r--r--modules/textual_inversion/textual_inversion.py10
1 files changed, 7 insertions, 3 deletions
diff --git a/modules/textual_inversion/textual_inversion.py b/modules/textual_inversion/textual_inversion.py
index 6cf00e65..a1a406c2 100644
--- a/modules/textual_inversion/textual_inversion.py
+++ b/modules/textual_inversion/textual_inversion.py
@@ -112,6 +112,7 @@ class EmbeddingDatabase:
self.skipped_embeddings = {}
self.expected_shape = -1
self.embedding_dirs = {}
+ self.previously_displayed_embeddings = ()
def add_embedding_dir(self, path):
self.embedding_dirs[path] = DirWithTextualInversionEmbeddings(path)
@@ -228,9 +229,12 @@ class EmbeddingDatabase:
self.load_from_dir(embdir)
embdir.update()
- print(f"Textual inversion embeddings loaded({len(self.word_embeddings)}): {', '.join(self.word_embeddings.keys())}")
- if len(self.skipped_embeddings) > 0:
- print(f"Textual inversion embeddings skipped({len(self.skipped_embeddings)}): {', '.join(self.skipped_embeddings.keys())}")
+ displayed_embeddings = (tuple(self.word_embeddings.keys()), tuple(self.skipped_embeddings.keys()))
+ if self.previously_displayed_embeddings != displayed_embeddings:
+ self.previously_displayed_embeddings = displayed_embeddings
+ print(f"Textual inversion embeddings loaded({len(self.word_embeddings)}): {', '.join(self.word_embeddings.keys())}")
+ if len(self.skipped_embeddings) > 0:
+ print(f"Textual inversion embeddings skipped({len(self.skipped_embeddings)}): {', '.join(self.skipped_embeddings.keys())}")
def find_embedding_at_position(self, tokens, offset):
token = tokens[offset]