aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorAUTOMATIC <16777216c@gmail.com>2022-10-21 13:38:06 +0000
committerAUTOMATIC <16777216c@gmail.com>2022-10-21 13:38:06 +0000
commit9286fe53de2eef91f13cc3ad5938ddf67ecc8413 (patch)
tree57f93412b2e1a5e4446d9fe83ba1d8c72d52f493
parente89e2f7c2c89ba25b9976af38ad648cbfb793a82 (diff)
downloadstable-diffusion-webui-gfx803-9286fe53de2eef91f13cc3ad5938ddf67ecc8413.tar.gz
stable-diffusion-webui-gfx803-9286fe53de2eef91f13cc3ad5938ddf67ecc8413.tar.bz2
stable-diffusion-webui-gfx803-9286fe53de2eef91f13cc3ad5938ddf67ecc8413.zip
make aestetic embedding ciompatible with prompts longer than 75 tokens
-rw-r--r--modules/sd_hijack.py2
1 files changed, 1 insertions, 1 deletions
diff --git a/modules/sd_hijack.py b/modules/sd_hijack.py
index 36198a3c..1f8587d1 100644
--- a/modules/sd_hijack.py
+++ b/modules/sd_hijack.py
@@ -332,8 +332,8 @@ class FrozenCLIPEmbedderWithCustomWords(torch.nn.Module):
multipliers.append([1.0] * 75)
z1 = self.process_tokens(tokens, multipliers)
+ z1 = shared.aesthetic_clip(z1, remade_batch_tokens)
z = z1 if z is None else torch.cat((z, z1), axis=-2)
- z = shared.aesthetic_clip(z, remade_batch_tokens)
remade_batch_tokens = rem_tokens
batch_multipliers = rem_multipliers