aboutsummaryrefslogtreecommitdiffstats
path: root/modules/sd_models_xl.py
diff options
context:
space:
mode:
authorAUTOMATIC1111 <16777216c@gmail.com>2023-07-29 12:15:06 +0000
committerAUTOMATIC1111 <16777216c@gmail.com>2023-07-29 12:15:06 +0000
commit6f0abbb71a3f29d6df63fed82d5d5e196ca0d4de (patch)
treea0e78ef494964799b8e7bdd3347fd7047cdd0508 /modules/sd_models_xl.py
parent4ca9f70b592c219c93a25d7ede187284d625f1e6 (diff)
downloadstable-diffusion-webui-gfx803-6f0abbb71a3f29d6df63fed82d5d5e196ca0d4de.tar.gz
stable-diffusion-webui-gfx803-6f0abbb71a3f29d6df63fed82d5d5e196ca0d4de.tar.bz2
stable-diffusion-webui-gfx803-6f0abbb71a3f29d6df63fed82d5d5e196ca0d4de.zip
textual inversion support for SDXL
Diffstat (limited to 'modules/sd_models_xl.py')
-rw-r--r--modules/sd_models_xl.py9
1 files changed, 9 insertions, 0 deletions
diff --git a/modules/sd_models_xl.py b/modules/sd_models_xl.py
index 40559208..bc219508 100644
--- a/modules/sd_models_xl.py
+++ b/modules/sd_models_xl.py
@@ -56,6 +56,14 @@ def encode_embedding_init_text(self: sgm.modules.GeneralConditioner, init_text,
return torch.cat(res, dim=1)
+def tokenize(self: sgm.modules.GeneralConditioner, texts):
+ for embedder in [embedder for embedder in self.embedders if hasattr(embedder, 'tokenize')]:
+ return embedder.tokenize(texts)
+
+ raise AssertionError('no tokenizer available')
+
+
+
def process_texts(self, texts):
for embedder in [embedder for embedder in self.embedders if hasattr(embedder, 'process_texts')]:
return embedder.process_texts(texts)
@@ -68,6 +76,7 @@ def get_target_prompt_token_count(self, token_count):
# those additions to GeneralConditioner make it possible to use it as model.cond_stage_model from SD1.5 in exist
sgm.modules.GeneralConditioner.encode_embedding_init_text = encode_embedding_init_text
+sgm.modules.GeneralConditioner.tokenize = tokenize
sgm.modules.GeneralConditioner.process_texts = process_texts
sgm.modules.GeneralConditioner.get_target_prompt_token_count = get_target_prompt_token_count