diff options
author | Philpax <me@philpax.me> | 2023-01-02 01:21:22 +0000 |
---|---|---|
committer | Philpax <me@philpax.me> | 2023-01-02 01:21:48 +0000 |
commit | c65909ad16a1962129114c6251de092f49479b06 (patch) | |
tree | cf01139d7014b00bbeba986d188bfa4f1b7c2d1d /modules/textual_inversion/textual_inversion.py | |
parent | b5819d9bf1794071139c640b5f1e72c84a0e051a (diff) | |
download | stable-diffusion-webui-gfx803-c65909ad16a1962129114c6251de092f49479b06.tar.gz stable-diffusion-webui-gfx803-c65909ad16a1962129114c6251de092f49479b06.tar.bz2 stable-diffusion-webui-gfx803-c65909ad16a1962129114c6251de092f49479b06.zip |
feat(api): return more data for embeddings
Diffstat (limited to 'modules/textual_inversion/textual_inversion.py')
-rw-r--r-- | modules/textual_inversion/textual_inversion.py | 8 |
1 files changed, 4 insertions, 4 deletions
diff --git a/modules/textual_inversion/textual_inversion.py b/modules/textual_inversion/textual_inversion.py index 1e5722e7..fd253477 100644 --- a/modules/textual_inversion/textual_inversion.py +++ b/modules/textual_inversion/textual_inversion.py @@ -59,7 +59,7 @@ class EmbeddingDatabase: def __init__(self, embeddings_dir):
self.ids_lookup = {}
self.word_embeddings = {}
- self.skipped_embeddings = []
+ self.skipped_embeddings = {}
self.dir_mtime = None
self.embeddings_dir = embeddings_dir
self.expected_shape = -1
@@ -91,7 +91,7 @@ class EmbeddingDatabase: self.dir_mtime = mt
self.ids_lookup.clear()
self.word_embeddings.clear()
- self.skipped_embeddings = []
+ self.skipped_embeddings.clear()
self.expected_shape = self.get_expected_shape()
def process_file(path, filename):
@@ -136,7 +136,7 @@ class EmbeddingDatabase: if self.expected_shape == -1 or self.expected_shape == embedding.shape:
self.register_embedding(embedding, shared.sd_model)
else:
- self.skipped_embeddings.append(name)
+ self.skipped_embeddings[name] = embedding
for fn in os.listdir(self.embeddings_dir):
try:
@@ -153,7 +153,7 @@ class EmbeddingDatabase: print(f"Textual inversion embeddings loaded({len(self.word_embeddings)}): {', '.join(self.word_embeddings.keys())}")
if len(self.skipped_embeddings) > 0:
- print(f"Textual inversion embeddings skipped({len(self.skipped_embeddings)}): {', '.join(self.skipped_embeddings)}")
+ print(f"Textual inversion embeddings skipped({len(self.skipped_embeddings)}): {', '.join(self.skipped_embeddings.keys())}")
def find_embedding_at_position(self, tokens, offset):
token = tokens[offset]
|