diff options
author | guaneec <guaneec@users.noreply.github.com> | 2022-09-25 06:13:03 +0000 |
---|---|---|
committer | AUTOMATIC1111 <16777216c@gmail.com> | 2022-09-25 06:30:02 +0000 |
commit | 615b2fc9ce8cb0c61424aa03655f82209f425d21 (patch) | |
tree | d82a048bfc74311f757a25291f115681a9fce444 | |
parent | b8eae5de9324df0c1cd30561ec75d85490aced59 (diff) | |
download | stable-diffusion-webui-gfx803-615b2fc9ce8cb0c61424aa03655f82209f425d21.tar.gz stable-diffusion-webui-gfx803-615b2fc9ce8cb0c61424aa03655f82209f425d21.tar.bz2 stable-diffusion-webui-gfx803-615b2fc9ce8cb0c61424aa03655f82209f425d21.zip |
Fix token max length
-rw-r--r-- | modules/sd_hijack.py | 2 |
1 files changed, 1 insertions, 1 deletions
diff --git a/modules/sd_hijack.py b/modules/sd_hijack.py index 62ba9101..ccbaa9ad 100644 --- a/modules/sd_hijack.py +++ b/modules/sd_hijack.py @@ -300,7 +300,7 @@ class FrozenCLIPEmbedderWithCustomWords(torch.nn.Module): remade_batch_tokens = []
id_start = self.wrapped.tokenizer.bos_token_id
id_end = self.wrapped.tokenizer.eos_token_id
- maxlen = self.wrapped.max_length - 2
+ maxlen = self.wrapped.max_length
used_custom_terms = []
cache = {}
|