aboutsummaryrefslogtreecommitdiffstats
path: root/modules/ui.py
diff options
context:
space:
mode:
authorAUTOMATIC <16777216c@gmail.com>2023-01-06 22:45:28 +0000
committerAUTOMATIC <16777216c@gmail.com>2023-01-06 22:46:13 +0000
commit79e39fae6110c20a3ee6255e2841c877f65e8cbd (patch)
tree8211b701f49da2b970d653789b3b008ef69a4c38 /modules/ui.py
parent3246a2d6b898da6a98fe9df4dc67944635a41bd3 (diff)
downloadstable-diffusion-webui-gfx803-79e39fae6110c20a3ee6255e2841c877f65e8cbd.tar.gz
stable-diffusion-webui-gfx803-79e39fae6110c20a3ee6255e2841c877f65e8cbd.tar.bz2
stable-diffusion-webui-gfx803-79e39fae6110c20a3ee6255e2841c877f65e8cbd.zip
CLIP hijack rework
Diffstat (limited to 'modules/ui.py')
-rw-r--r--modules/ui.py2
1 files changed, 1 insertions, 1 deletions
diff --git a/modules/ui.py b/modules/ui.py
index b79d24ee..5d2f5bad 100644
--- a/modules/ui.py
+++ b/modules/ui.py
@@ -368,7 +368,7 @@ def update_token_counter(text, steps):
flat_prompts = reduce(lambda list1, list2: list1+list2, prompt_schedules)
prompts = [prompt_text for step, prompt_text in flat_prompts]
- tokens, token_count, max_length = max([model_hijack.tokenize(prompt) for prompt in prompts], key=lambda args: args[1])
+ token_count, max_length = max([model_hijack.get_prompt_lengths(prompt) for prompt in prompts], key=lambda args: args[0])
style_class = ' class="red"' if (token_count > max_length) else ""
return f"<span {style_class}>{token_count}/{max_length}</span>"