aboutsummaryrefslogtreecommitdiffstats
path: root/modules/sd_hijack.py
diff options
context:
space:
mode:
authorMrCheeze <fishycheeze@yahoo.ca>2022-10-07 21:48:34 +0000
committerAUTOMATIC1111 <16777216c@gmail.com>2022-10-08 12:48:04 +0000
commit5f85a74b00c0154bfd559dc67edfa7e30342b7c9 (patch)
tree8b89b19ce3c3b599376f6ff24490d73e83024932 /modules/sd_hijack.py
parent32e428ff19c28c87bb2ed362316b928b372e3a70 (diff)
downloadstable-diffusion-webui-gfx803-5f85a74b00c0154bfd559dc67edfa7e30342b7c9.tar.gz
stable-diffusion-webui-gfx803-5f85a74b00c0154bfd559dc67edfa7e30342b7c9.tar.bz2
stable-diffusion-webui-gfx803-5f85a74b00c0154bfd559dc67edfa7e30342b7c9.zip
fix bug where when using prompt composition, hijack_comments generated before the final AND will be dropped
Diffstat (limited to 'modules/sd_hijack.py')
-rw-r--r--modules/sd_hijack.py5
1 files changed, 4 insertions, 1 deletions
diff --git a/modules/sd_hijack.py b/modules/sd_hijack.py
index 7e7fde0f..ba808a39 100644
--- a/modules/sd_hijack.py
+++ b/modules/sd_hijack.py
@@ -88,6 +88,9 @@ class StableDiffusionModelHijack:
for layer in [layer for layer in self.layers if type(layer) == torch.nn.Conv2d]:
layer.padding_mode = 'circular' if enable else 'zeros'
+ def clear_comments(self):
+ self.comments = []
+
def tokenize(self, text):
_, remade_batch_tokens, _, _, _, token_count = self.clip.process_text([text])
return remade_batch_tokens[0], token_count, get_target_prompt_token_count(token_count)
@@ -260,7 +263,7 @@ class FrozenCLIPEmbedderWithCustomWords(torch.nn.Module):
batch_multipliers, remade_batch_tokens, used_custom_terms, hijack_comments, hijack_fixes, token_count = self.process_text(text)
self.hijack.fixes = hijack_fixes
- self.hijack.comments = hijack_comments
+ self.hijack.comments += hijack_comments
if len(used_custom_terms) > 0:
self.hijack.comments.append("Used embeddings: " + ", ".join([f'{word} [{checksum}]' for word, checksum in used_custom_terms]))