diff options
author | AUTOMATIC <16777216c@gmail.com> | 2023-01-29 08:53:05 +0000 |
---|---|---|
committer | AUTOMATIC <16777216c@gmail.com> | 2023-01-29 08:53:05 +0000 |
commit | aa6e55e00140da6d73d3d360a5628c1b1316550d (patch) | |
tree | 2994d945a5b7270bc6b66d963ff42543e5384244 /modules/textual_inversion/textual_inversion.py | |
parent | 8d7382ab24756cdcc37e71406832814f4713c55e (diff) | |
download | stable-diffusion-webui-gfx803-aa6e55e00140da6d73d3d360a5628c1b1316550d.tar.gz stable-diffusion-webui-gfx803-aa6e55e00140da6d73d3d360a5628c1b1316550d.tar.bz2 stable-diffusion-webui-gfx803-aa6e55e00140da6d73d3d360a5628c1b1316550d.zip |
do not display the message for TI unless the list of loaded embeddings changed
Diffstat (limited to 'modules/textual_inversion/textual_inversion.py')
-rw-r--r-- | modules/textual_inversion/textual_inversion.py | 10 |
1 files changed, 7 insertions, 3 deletions
diff --git a/modules/textual_inversion/textual_inversion.py b/modules/textual_inversion/textual_inversion.py index 6cf00e65..a1a406c2 100644 --- a/modules/textual_inversion/textual_inversion.py +++ b/modules/textual_inversion/textual_inversion.py @@ -112,6 +112,7 @@ class EmbeddingDatabase: self.skipped_embeddings = {}
self.expected_shape = -1
self.embedding_dirs = {}
+ self.previously_displayed_embeddings = ()
def add_embedding_dir(self, path):
self.embedding_dirs[path] = DirWithTextualInversionEmbeddings(path)
@@ -228,9 +229,12 @@ class EmbeddingDatabase: self.load_from_dir(embdir)
embdir.update()
- print(f"Textual inversion embeddings loaded({len(self.word_embeddings)}): {', '.join(self.word_embeddings.keys())}")
- if len(self.skipped_embeddings) > 0:
- print(f"Textual inversion embeddings skipped({len(self.skipped_embeddings)}): {', '.join(self.skipped_embeddings.keys())}")
+ displayed_embeddings = (tuple(self.word_embeddings.keys()), tuple(self.skipped_embeddings.keys()))
+ if self.previously_displayed_embeddings != displayed_embeddings:
+ self.previously_displayed_embeddings = displayed_embeddings
+ print(f"Textual inversion embeddings loaded({len(self.word_embeddings)}): {', '.join(self.word_embeddings.keys())}")
+ if len(self.skipped_embeddings) > 0:
+ print(f"Textual inversion embeddings skipped({len(self.skipped_embeddings)}): {', '.join(self.skipped_embeddings.keys())}")
def find_embedding_at_position(self, tokens, offset):
token = tokens[offset]
|