diff options
author | ljleb <set> | 2023-07-24 19:43:06 +0000 |
---|---|---|
committer | ljleb <set> | 2023-07-24 19:43:06 +0000 |
commit | 5b06607476d1ef2c9d16fe8b21c786b2ca13b95c (patch) | |
tree | 1f889b348ce267bde81ba36d9e3ee8e2f46bbf97 | |
parent | 6b68b590321fcac2ad6d71c5aee1ac02687328d7 (diff) | |
download | stable-diffusion-webui-gfx803-5b06607476d1ef2c9d16fe8b21c786b2ca13b95c.tar.gz stable-diffusion-webui-gfx803-5b06607476d1ef2c9d16fe8b21c786b2ca13b95c.tar.bz2 stable-diffusion-webui-gfx803-5b06607476d1ef2c9d16fe8b21c786b2ca13b95c.zip |
simplify
-rw-r--r-- | modules/processing.py | 8 |
1 files changed, 5 insertions, 3 deletions
diff --git a/modules/processing.py b/modules/processing.py index 7043477f..6dc178e1 100644 --- a/modules/processing.py +++ b/modules/processing.py @@ -718,24 +718,26 @@ def process_images_inner(p: StableDiffusionProcessing) -> Processed: def infotext(iteration=0, position_in_batch=0, use_main_prompt=False):
all_prompts = p.all_prompts[:]
+ all_negative_prompts = p.all_negative_prompts[:]
all_seeds = p.all_seeds[:]
all_subseeds = p.all_subseeds[:]
# apply changes to generation data
all_prompts[iteration * p.batch_size:(iteration + 1) * p.batch_size] = p.prompts
+ all_negative_prompts[iteration * p.batch_size:(iteration + 1) * p.batch_size] = p.negative_prompts
all_seeds[iteration * p.batch_size:(iteration + 1) * p.batch_size] = p.seeds
all_subseeds[iteration * p.batch_size:(iteration + 1) * p.batch_size] = p.subseeds
# update p.all_negative_prompts in case extensions changed the size of the batch
# create_infotext below uses it
- old_negative_prompts = p.all_negative_prompts[iteration * p.batch_size:(iteration + 1) * p.batch_size]
- p.all_negative_prompts[iteration * p.batch_size:(iteration + 1) * p.batch_size] = p.negative_prompts
+ old_negative_prompts = p.all_negative_prompts
+ p.all_negative_prompts = all_negative_prompts
try:
return create_infotext(p, all_prompts, all_seeds, all_subseeds, comments, iteration, position_in_batch, use_main_prompt)
finally:
# restore p.all_negative_prompts in case extensions changed the size of the batch
- p.all_negative_prompts[iteration * p.batch_size:iteration * p.batch_size + len(p.negative_prompts)] = old_negative_prompts
+ p.all_negative_prompts = old_negative_prompts
if os.path.exists(cmd_opts.embeddings_dir) and not p.do_not_reload_embeddings:
model_hijack.embedding_db.load_textual_inversion_embeddings()
|