diff options
author | AUTOMATIC <16777216c@gmail.com> | 2022-09-20 21:20:11 +0000 |
---|---|---|
committer | AUTOMATIC <16777216c@gmail.com> | 2022-09-20 21:20:11 +0000 |
commit | 1578859305f3593fa213309e505905e607a0e52c (patch) | |
tree | 1ad1d6724a9a784594c870b98b701c9a4da7d202 /modules/sd_hijack.py | |
parent | e452c961edb333d3a00af65561d9b1d3f8e6f94d (diff) | |
download | stable-diffusion-webui-gfx803-1578859305f3593fa213309e505905e607a0e52c.tar.gz stable-diffusion-webui-gfx803-1578859305f3593fa213309e505905e607a0e52c.tar.bz2 stable-diffusion-webui-gfx803-1578859305f3593fa213309e505905e607a0e52c.zip |
fix for too large embeddings causing an error
Diffstat (limited to 'modules/sd_hijack.py')
-rw-r--r-- | modules/sd_hijack.py | 2 |
1 files changed, 1 insertions, 1 deletions
diff --git a/modules/sd_hijack.py b/modules/sd_hijack.py index ec83c0cb..bd1a0936 100644 --- a/modules/sd_hijack.py +++ b/modules/sd_hijack.py @@ -400,7 +400,7 @@ class EmbeddingsWithFixes(torch.nn.Module): for fixes, tensor in zip(batch_fixes, inputs_embeds):
for offset, word in fixes:
emb = self.embeddings.word_embeddings[word]
- emb_len = min(tensor.shape[0]-offset, emb.shape[0])
+ emb_len = min(tensor.shape[0]-offset-1, emb.shape[0])
tensor[offset+1:offset+1+emb_len] = self.embeddings.word_embeddings[word][0:emb_len]
return inputs_embeds
|