aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorBrad Smith <bradster@infinitewarp.com>2023-04-14 03:12:33 +0000
committerBrad Smith <bradster@infinitewarp.com>2023-04-14 03:19:10 +0000
commitdab5002c59ce1f68deae5e6e0c03e5e2c27155db (patch)
treebf5130519d6e7c79f4bc9a8bbb31d613cda76d69
parent27b9ec60e4ede748ec23615fecddb70e48daa623 (diff)
downloadstable-diffusion-webui-gfx803-dab5002c59ce1f68deae5e6e0c03e5e2c27155db.tar.gz
stable-diffusion-webui-gfx803-dab5002c59ce1f68deae5e6e0c03e5e2c27155db.tar.bz2
stable-diffusion-webui-gfx803-dab5002c59ce1f68deae5e6e0c03e5e2c27155db.zip
sort self.word_embeddings without instantiating it a new dict
-rw-r--r--modules/textual_inversion/textual_inversion.py9
1 files changed, 6 insertions, 3 deletions
diff --git a/modules/textual_inversion/textual_inversion.py b/modules/textual_inversion/textual_inversion.py
index 7c50839f..379df243 100644
--- a/modules/textual_inversion/textual_inversion.py
+++ b/modules/textual_inversion/textual_inversion.py
@@ -2,7 +2,7 @@ import os
import sys
import traceback
import inspect
-from collections import namedtuple, OrderedDict
+from collections import namedtuple
import torch
import tqdm
@@ -108,7 +108,7 @@ class DirWithTextualInversionEmbeddings:
class EmbeddingDatabase:
def __init__(self):
self.ids_lookup = {}
- self.word_embeddings = OrderedDict()
+ self.word_embeddings = {}
self.skipped_embeddings = {}
self.expected_shape = -1
self.embedding_dirs = {}
@@ -234,7 +234,10 @@ class EmbeddingDatabase:
embdir.update()
# re-sort word_embeddings because load_from_dir may not load in alphabetic order.
- self.word_embeddings = {e.name: e for e in sorted(self.word_embeddings.values(), key=lambda e: e.name.lower())}
+ # using a temporary copy so we don't reinitialize self.word_embeddings in case other objects have a reference to it.
+ sorted_word_embeddings = {e.name: e for e in sorted(self.word_embeddings.values(), key=lambda e: e.name.lower())}
+ self.word_embeddings.clear()
+ self.word_embeddings.update(sorted_word_embeddings)
displayed_embeddings = (tuple(self.word_embeddings.keys()), tuple(self.skipped_embeddings.keys()))
if self.previously_displayed_embeddings != displayed_embeddings: