aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorguaneec <guaneec@users.noreply.github.com>2022-09-25 06:13:03 +0000
committerAUTOMATIC1111 <16777216c@gmail.com>2022-09-25 06:30:02 +0000
commit615b2fc9ce8cb0c61424aa03655f82209f425d21 (patch)
treed82a048bfc74311f757a25291f115681a9fce444
parentb8eae5de9324df0c1cd30561ec75d85490aced59 (diff)
downloadstable-diffusion-webui-gfx803-615b2fc9ce8cb0c61424aa03655f82209f425d21.tar.gz
stable-diffusion-webui-gfx803-615b2fc9ce8cb0c61424aa03655f82209f425d21.tar.bz2
stable-diffusion-webui-gfx803-615b2fc9ce8cb0c61424aa03655f82209f425d21.zip
Fix token max length
-rw-r--r--modules/sd_hijack.py2
1 files changed, 1 insertions, 1 deletions
diff --git a/modules/sd_hijack.py b/modules/sd_hijack.py
index 62ba9101..ccbaa9ad 100644
--- a/modules/sd_hijack.py
+++ b/modules/sd_hijack.py
@@ -300,7 +300,7 @@ class FrozenCLIPEmbedderWithCustomWords(torch.nn.Module):
remade_batch_tokens = []
id_start = self.wrapped.tokenizer.bos_token_id
id_end = self.wrapped.tokenizer.eos_token_id
- maxlen = self.wrapped.max_length - 2
+ maxlen = self.wrapped.max_length
used_custom_terms = []
cache = {}