diff options
author | AUTOMATIC1111 <16777216c@gmail.com> | 2023-01-02 03:11:10 +0000 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-01-02 03:11:10 +0000 |
commit | fd4461d44c7256d56889f5b5ed9fb660a859172f (patch) | |
tree | b768bab8ba99b90099bdf1db82947be9c3b6623b /modules/textual_inversion | |
parent | f39a79d1430cb2a07853b9e1ba0fde0db8a0d5d5 (diff) | |
parent | c65909ad16a1962129114c6251de092f49479b06 (diff) | |
download | stable-diffusion-webui-gfx803-fd4461d44c7256d56889f5b5ed9fb660a859172f.tar.gz stable-diffusion-webui-gfx803-fd4461d44c7256d56889f5b5ed9fb660a859172f.tar.bz2 stable-diffusion-webui-gfx803-fd4461d44c7256d56889f5b5ed9fb660a859172f.zip |
Merge pull request #6196 from philpax/add-embeddings-api
feat(api): add /sdapi/v1/embeddings
Diffstat (limited to 'modules/textual_inversion')
-rw-r--r-- | modules/textual_inversion/textual_inversion.py | 8 |
1 files changed, 4 insertions, 4 deletions
diff --git a/modules/textual_inversion/textual_inversion.py b/modules/textual_inversion/textual_inversion.py index 1e5722e7..fd253477 100644 --- a/modules/textual_inversion/textual_inversion.py +++ b/modules/textual_inversion/textual_inversion.py @@ -59,7 +59,7 @@ class EmbeddingDatabase: def __init__(self, embeddings_dir):
self.ids_lookup = {}
self.word_embeddings = {}
- self.skipped_embeddings = []
+ self.skipped_embeddings = {}
self.dir_mtime = None
self.embeddings_dir = embeddings_dir
self.expected_shape = -1
@@ -91,7 +91,7 @@ class EmbeddingDatabase: self.dir_mtime = mt
self.ids_lookup.clear()
self.word_embeddings.clear()
- self.skipped_embeddings = []
+ self.skipped_embeddings.clear()
self.expected_shape = self.get_expected_shape()
def process_file(path, filename):
@@ -136,7 +136,7 @@ class EmbeddingDatabase: if self.expected_shape == -1 or self.expected_shape == embedding.shape:
self.register_embedding(embedding, shared.sd_model)
else:
- self.skipped_embeddings.append(name)
+ self.skipped_embeddings[name] = embedding
for fn in os.listdir(self.embeddings_dir):
try:
@@ -153,7 +153,7 @@ class EmbeddingDatabase: print(f"Textual inversion embeddings loaded({len(self.word_embeddings)}): {', '.join(self.word_embeddings.keys())}")
if len(self.skipped_embeddings) > 0:
- print(f"Textual inversion embeddings skipped({len(self.skipped_embeddings)}): {', '.join(self.skipped_embeddings)}")
+ print(f"Textual inversion embeddings skipped({len(self.skipped_embeddings)}): {', '.join(self.skipped_embeddings.keys())}")
def find_embedding_at_position(self, tokens, offset):
token = tokens[offset]
|