diff options
author | hentailord85ez <112723046+hentailord85ez@users.noreply.github.com> | 2022-10-10 15:09:06 +0000 |
---|---|---|
committer | AUTOMATIC1111 <16777216c@gmail.com> | 2022-10-10 15:54:48 +0000 |
commit | 460bbae58726c177beddfcddf351f27e205d3fb2 (patch) | |
tree | 8063b1fb0d18ef4838760ff9067286fd7fc88b1a /modules/sd_hijack.py | |
parent | b340439586d844e76782149ca1857c8de35773ec (diff) | |
download | stable-diffusion-webui-gfx803-460bbae58726c177beddfcddf351f27e205d3fb2.tar.gz stable-diffusion-webui-gfx803-460bbae58726c177beddfcddf351f27e205d3fb2.tar.bz2 stable-diffusion-webui-gfx803-460bbae58726c177beddfcddf351f27e205d3fb2.zip |
Pad beginning of textual inversion embedding
Diffstat (limited to 'modules/sd_hijack.py')
-rw-r--r-- | modules/sd_hijack.py | 5 |
1 files changed, 5 insertions, 0 deletions
diff --git a/modules/sd_hijack.py b/modules/sd_hijack.py index 8d5c77d8..3a60cd63 100644 --- a/modules/sd_hijack.py +++ b/modules/sd_hijack.py @@ -151,6 +151,11 @@ class FrozenCLIPEmbedderWithCustomWords(torch.nn.Module): else:
emb_len = int(embedding.vec.shape[0])
iteration = len(remade_tokens) // 75
+ if (len(remade_tokens) + emb_len) // 75 != iteration:
+ rem = (75 * (iteration + 1) - len(remade_tokens))
+ remade_tokens += [id_end] * rem
+ multipliers += [1.0] * rem
+ iteration += 1
fixes.append((iteration, (len(remade_tokens) % 75, embedding)))
remade_tokens += [0] * emb_len
multipliers += [weight] * emb_len
|