From 27fbf3de4adf6ba8dfa43876db3599bb8159ef44 Mon Sep 17 00:00:00 2001 From: shirase-0 Date: Sun, 2 Oct 2022 00:43:24 +1000 Subject: Added tag parsing for prompts from file --- scripts/prompts_from_file.py | 58 +++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 57 insertions(+), 1 deletion(-) (limited to 'scripts/prompts_from_file.py') diff --git a/scripts/prompts_from_file.py b/scripts/prompts_from_file.py index 513d9a1c..36e199b3 100644 --- a/scripts/prompts_from_file.py +++ b/scripts/prompts_from_file.py @@ -2,6 +2,7 @@ import math import os import sys import traceback +from xml.etree.ElementTree import tostring import modules.scripts as scripts import gradio as gr @@ -29,6 +30,44 @@ class Script(scripts.Script): checkbox_txt.change(fn=lambda x: [gr.File.update(visible = not x), gr.TextArea.update(visible = x)], inputs=[checkbox_txt], outputs=[file, prompt_txt]) return [checkbox_txt, file, prompt_txt] + def process_string_tag(self, tag): + return tag[1:-2] + + def process_int_tag(self, tag): + return int(tag) + + def process_float_tag(self, tag): + return float(tag) + + def process_boolean_tag(self, tag): + return True if (tag == "true") else False + + prompt_tags = { + "sd_model": None, + "outpath_samples": process_string_tag, + "outpath_grids": process_string_tag, + "prompt_for_display": process_string_tag, + "prompt": process_string_tag, + "negative_prompt": process_string_tag, + "styles": process_string_tag, + "seed": process_int_tag, + "subseed_strength": process_float_tag, + "subseed": process_int_tag, + "seed_resize_from_h": process_int_tag, + "seed_resize_from_w": process_int_tag, + "sampler_index": process_int_tag, + "batch_size": process_int_tag, + "n_iter": process_int_tag, + "steps": process_int_tag, + "cfg_scale": process_float_tag, + "width": process_int_tag, + "height": process_int_tag, + "restore_faces": process_boolean_tag, + "tiling": process_boolean_tag, + "do_not_save_samples": process_boolean_tag, + "do_not_save_grid": process_boolean_tag + } + def run(self, p, checkbox_txt, data: bytes, prompt_txt: str): if (checkbox_txt): lines = [x.strip() for x in prompt_txt.splitlines()] @@ -39,6 +78,7 @@ class Script(scripts.Script): img_count = len(lines) * p.n_iter batch_count = math.ceil(img_count / p.batch_size) loop_count = math.ceil(batch_count / p.n_iter) + # These numbers no longer accurately reflect the total images and number of batches print(f"Will process {img_count} images in {batch_count} batches.") p.do_not_save_grid = True @@ -48,7 +88,23 @@ class Script(scripts.Script): images = [] for loop_no in range(loop_count): state.job = f"{loop_no + 1} out of {loop_count}" - p.prompt = lines[loop_no*p.batch_size:(loop_no+1)*p.batch_size] * p.n_iter + # The following line may need revising to remove batch_size references + current_line = lines[loop_no*p.batch_size:(loop_no+1)*p.batch_size] * p.n_iter + if(current_line[0][:2] != "--"): + p.prompt = current_line + else: + tokenized_line = current_line[0].split("--") + + for tag in tokenized_line: + tag_split = tag.split(" ", 1) + if(tag_split[0] != ''): + value_func = self.prompt_tags.get(tag_split[0], None) + if(value_func != None): + value = value_func(self, tag_split[1]) + setattr(p, tag_split[0], value) + else: + print(f"Unknown option \"{tag_split}\"") + proc = process_images(p) images += proc.images -- cgit v1.2.3 From 0e77ee24b0b651d6a564245243850e4fb9831e31 Mon Sep 17 00:00:00 2001 From: shirase-0 Date: Sun, 2 Oct 2022 00:57:29 +1000 Subject: Removed unnecessary library call and added some comments --- scripts/prompts_from_file.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'scripts/prompts_from_file.py') diff --git a/scripts/prompts_from_file.py b/scripts/prompts_from_file.py index 36e199b3..0a862a5b 100644 --- a/scripts/prompts_from_file.py +++ b/scripts/prompts_from_file.py @@ -2,7 +2,6 @@ import math import os import sys import traceback -from xml.etree.ElementTree import tostring import modules.scripts as scripts import gradio as gr @@ -90,6 +89,8 @@ class Script(scripts.Script): state.job = f"{loop_no + 1} out of {loop_count}" # The following line may need revising to remove batch_size references current_line = lines[loop_no*p.batch_size:(loop_no+1)*p.batch_size] * p.n_iter + + # If the current line has no tags, parse the whole line as a prompt, else parse each tag if(current_line[0][:2] != "--"): p.prompt = current_line else: -- cgit v1.2.3 From e2930f9821c197da94e208b5ae73711002844efc Mon Sep 17 00:00:00 2001 From: Tony Beeman Date: Fri, 7 Oct 2022 17:46:39 -0700 Subject: Fix for Prompts_from_file showing extra textbox. --- scripts/prompts_from_file.py | 4 ++++ 1 file changed, 4 insertions(+) (limited to 'scripts/prompts_from_file.py') diff --git a/scripts/prompts_from_file.py b/scripts/prompts_from_file.py index 513d9a1c..110889a6 100644 --- a/scripts/prompts_from_file.py +++ b/scripts/prompts_from_file.py @@ -10,6 +10,7 @@ from modules.processing import Processed, process_images from PIL import Image from modules.shared import opts, cmd_opts, state +g_txt_mode = False class Script(scripts.Script): def title(self): @@ -29,6 +30,9 @@ class Script(scripts.Script): checkbox_txt.change(fn=lambda x: [gr.File.update(visible = not x), gr.TextArea.update(visible = x)], inputs=[checkbox_txt], outputs=[file, prompt_txt]) return [checkbox_txt, file, prompt_txt] + def on_show(self, checkbox_txt, file, prompt_txt): + return [ gr.Checkbox.update(visible = True), gr.File.update(visible = not checkbox_txt), gr.TextArea.update(visible = checkbox_txt) ] + def run(self, p, checkbox_txt, data: bytes, prompt_txt: str): if (checkbox_txt): lines = [x.strip() for x in prompt_txt.splitlines()] -- cgit v1.2.3 From 86cb16886f8f48169cee4658ad0c5e5443beed2a Mon Sep 17 00:00:00 2001 From: Tony Beeman Date: Fri, 7 Oct 2022 23:51:50 -0700 Subject: Pull Request Code Review Fixes --- scripts/prompts_from_file.py | 2 -- 1 file changed, 2 deletions(-) (limited to 'scripts/prompts_from_file.py') diff --git a/scripts/prompts_from_file.py b/scripts/prompts_from_file.py index 110889a6..b24f1a80 100644 --- a/scripts/prompts_from_file.py +++ b/scripts/prompts_from_file.py @@ -10,8 +10,6 @@ from modules.processing import Processed, process_images from PIL import Image from modules.shared import opts, cmd_opts, state -g_txt_mode = False - class Script(scripts.Script): def title(self): return "Prompts from file or textbox" -- cgit v1.2.3 From 7d6042b908c064774ee10961309d396eabdc6c4a Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Sat, 15 Oct 2022 12:00:31 +0300 Subject: update for commandline args for btch prompts to parse string properly --- scripts/prompts_from_file.py | 172 ++++++++++++++++++++++++++----------------- 1 file changed, 104 insertions(+), 68 deletions(-) (limited to 'scripts/prompts_from_file.py') diff --git a/scripts/prompts_from_file.py b/scripts/prompts_from_file.py index 5732623f..1266be6f 100644 --- a/scripts/prompts_from_file.py +++ b/scripts/prompts_from_file.py @@ -1,7 +1,9 @@ +import copy import math import os import sys import traceback +import shlex import modules.scripts as scripts import gradio as gr @@ -10,6 +12,75 @@ from modules.processing import Processed, process_images from PIL import Image from modules.shared import opts, cmd_opts, state + +def process_string_tag(tag): + return tag + + +def process_int_tag(tag): + return int(tag) + + +def process_float_tag(tag): + return float(tag) + + +def process_boolean_tag(tag): + return True if (tag == "true") else False + + +prompt_tags = { + "sd_model": None, + "outpath_samples": process_string_tag, + "outpath_grids": process_string_tag, + "prompt_for_display": process_string_tag, + "prompt": process_string_tag, + "negative_prompt": process_string_tag, + "styles": process_string_tag, + "seed": process_int_tag, + "subseed_strength": process_float_tag, + "subseed": process_int_tag, + "seed_resize_from_h": process_int_tag, + "seed_resize_from_w": process_int_tag, + "sampler_index": process_int_tag, + "batch_size": process_int_tag, + "n_iter": process_int_tag, + "steps": process_int_tag, + "cfg_scale": process_float_tag, + "width": process_int_tag, + "height": process_int_tag, + "restore_faces": process_boolean_tag, + "tiling": process_boolean_tag, + "do_not_save_samples": process_boolean_tag, + "do_not_save_grid": process_boolean_tag +} + + +def cmdargs(line): + args = shlex.split(line) + pos = 0 + res = {} + + while pos < len(args): + arg = args[pos] + + assert arg.startswith("--"), f'must start with "--": {arg}' + tag = arg[2:] + + func = prompt_tags.get(tag, None) + assert func, f'unknown commandline option: {arg}' + + assert pos+1 < len(args), f'missing argument for command line option {arg}' + + val = args[pos+1] + + res[tag] = func(val) + + pos += 2 + + return res + + class Script(scripts.Script): def title(self): return "Prompts from file or textbox" @@ -28,87 +99,52 @@ class Script(scripts.Script): checkbox_txt.change(fn=lambda x: [gr.File.update(visible = not x), gr.TextArea.update(visible = x)], inputs=[checkbox_txt], outputs=[file, prompt_txt]) return [checkbox_txt, file, prompt_txt] - def process_string_tag(self, tag): - return tag[1:-2] - - def process_int_tag(self, tag): - return int(tag) - - def process_float_tag(self, tag): - return float(tag) - - def process_boolean_tag(self, tag): - return True if (tag == "true") else False - - prompt_tags = { - "sd_model": None, - "outpath_samples": process_string_tag, - "outpath_grids": process_string_tag, - "prompt_for_display": process_string_tag, - "prompt": process_string_tag, - "negative_prompt": process_string_tag, - "styles": process_string_tag, - "seed": process_int_tag, - "subseed_strength": process_float_tag, - "subseed": process_int_tag, - "seed_resize_from_h": process_int_tag, - "seed_resize_from_w": process_int_tag, - "sampler_index": process_int_tag, - "batch_size": process_int_tag, - "n_iter": process_int_tag, - "steps": process_int_tag, - "cfg_scale": process_float_tag, - "width": process_int_tag, - "height": process_int_tag, - "restore_faces": process_boolean_tag, - "tiling": process_boolean_tag, - "do_not_save_samples": process_boolean_tag, - "do_not_save_grid": process_boolean_tag - } - def on_show(self, checkbox_txt, file, prompt_txt): return [ gr.Checkbox.update(visible = True), gr.File.update(visible = not checkbox_txt), gr.TextArea.update(visible = checkbox_txt) ] def run(self, p, checkbox_txt, data: bytes, prompt_txt: str): - if (checkbox_txt): + if checkbox_txt: lines = [x.strip() for x in prompt_txt.splitlines()] else: lines = [x.strip() for x in data.decode('utf8', errors='ignore').split("\n")] lines = [x for x in lines if len(x) > 0] - img_count = len(lines) * p.n_iter - batch_count = math.ceil(img_count / p.batch_size) - loop_count = math.ceil(batch_count / p.n_iter) - # These numbers no longer accurately reflect the total images and number of batches - print(f"Will process {img_count} images in {batch_count} batches.") - p.do_not_save_grid = True - state.job_count = batch_count + job_count = 0 + jobs = [] + + for line in lines: + if "--" in line: + try: + args = cmdargs(line) + except Exception: + print(f"Error parsing line [line] as commandline:", file=sys.stderr) + print(traceback.format_exc(), file=sys.stderr) + args = {"prompt": line} + else: + args = {"prompt": line} - images = [] - for loop_no in range(loop_count): - state.job = f"{loop_no + 1} out of {loop_count}" - # The following line may need revising to remove batch_size references - current_line = lines[loop_no*p.batch_size:(loop_no+1)*p.batch_size] * p.n_iter - - # If the current line has no tags, parse the whole line as a prompt, else parse each tag - if(current_line[0][:2] != "--"): - p.prompt = current_line + n_iter = args.get("n_iter", 1) + if n_iter != 1: + job_count += n_iter else: - tokenized_line = current_line[0].split("--") - - for tag in tokenized_line: - tag_split = tag.split(" ", 1) - if(tag_split[0] != ''): - value_func = self.prompt_tags.get(tag_split[0], None) - if(value_func != None): - value = value_func(self, tag_split[1]) - setattr(p, tag_split[0], value) - else: - print(f"Unknown option \"{tag_split}\"") - - proc = process_images(p) + job_count += 1 + + jobs.append(args) + + print(f"Will process {len(lines)} lines in {job_count} jobs.") + state.job_count = job_count + + images = [] + for n, args in enumerate(jobs): + state.job = f"{state.job_no + 1} out of {state.job_count}" + + copy_p = copy.copy(p) + for k, v in args.items(): + setattr(copy_p, k, v) + + proc = process_images(copy_p) images += proc.images return Processed(p, images, p.seed, "") -- cgit v1.2.3 From 99d728b5b18829c8a6b7b2d69c9b9327dd257896 Mon Sep 17 00:00:00 2001 From: Tony Beeman Date: Sun, 23 Oct 2022 23:16:47 -0700 Subject: Add Iterate Button and Improve PFF UI --- scripts/prompts_from_file.py | 54 ++++++++++++++++++++++++++------------------ 1 file changed, 32 insertions(+), 22 deletions(-) (limited to 'scripts/prompts_from_file.py') diff --git a/scripts/prompts_from_file.py b/scripts/prompts_from_file.py index 1266be6f..1be22960 100644 --- a/scripts/prompts_from_file.py +++ b/scripts/prompts_from_file.py @@ -1,6 +1,7 @@ import copy import math import os +import random import sys import traceback import shlex @@ -81,32 +82,34 @@ def cmdargs(line): return res +def load_prompt_file(file): + if (file is None): + lines = [] + else: + lines = [x.strip() for x in file.decode('utf8', errors='ignore').split("\n")] + + return None, "\n".join(lines), gr.update(lines=7) + class Script(scripts.Script): def title(self): return "Prompts from file or textbox" def ui(self, is_img2img): - # This checkbox would look nicer as two tabs, but there are two problems: - # 1) There is a bug in Gradio 3.3 that prevents visibility from working on Tabs - # 2) Even with Gradio 3.3.1, returning a control (like Tabs) that can't be used as input - # causes a AttributeError: 'Tabs' object has no attribute 'preprocess' assert, - # due to the way Script assumes all controls returned can be used as inputs. - # Therefore, there's no good way to use grouping components right now, - # so we will use a checkbox! :) - checkbox_txt = gr.Checkbox(label="Show Textbox", value=False) - file = gr.File(label="File with inputs", type='bytes') - prompt_txt = gr.TextArea(label="Prompts") - checkbox_txt.change(fn=lambda x: [gr.File.update(visible = not x), gr.TextArea.update(visible = x)], inputs=[checkbox_txt], outputs=[file, prompt_txt]) - return [checkbox_txt, file, prompt_txt] - - def on_show(self, checkbox_txt, file, prompt_txt): - return [ gr.Checkbox.update(visible = True), gr.File.update(visible = not checkbox_txt), gr.TextArea.update(visible = checkbox_txt) ] - - def run(self, p, checkbox_txt, data: bytes, prompt_txt: str): - if checkbox_txt: - lines = [x.strip() for x in prompt_txt.splitlines()] - else: - lines = [x.strip() for x in data.decode('utf8', errors='ignore').split("\n")] + checkbox_iterate = gr.Checkbox(label="Iterate seed every line", value=False) + + prompt_txt = gr.Textbox(label="List of prompt inputs", lines=1) + file = gr.File(label="Upload prompt inputs", type='bytes') + + file.change(fn=load_prompt_file, inputs=[file], outputs=[file, prompt_txt, prompt_txt]) + + # We start at one line. When the text changes, we jump to seven lines, or two lines if no \n. + # We don't shrink back to 1, because that causes the control to ignore [enter], and it may + # be unclear to the user that shift-enter is needed. + prompt_txt.change(lambda tb: gr.update(lines=7) if ("\n" in tb) else gr.update(lines=2), inputs=[prompt_txt], outputs=[prompt_txt]) + return [checkbox_iterate, file, prompt_txt] + + def run(self, p, checkbox_iterate, file, prompt_txt: str): + lines = [x.strip() for x in prompt_txt.splitlines()] lines = [x for x in lines if len(x) > 0] p.do_not_save_grid = True @@ -134,6 +137,9 @@ class Script(scripts.Script): jobs.append(args) print(f"Will process {len(lines)} lines in {job_count} jobs.") + if (checkbox_iterate and p.seed == -1): + p.seed = int(random.randrange(4294967294)) + state.job_count = job_count images = [] @@ -146,5 +152,9 @@ class Script(scripts.Script): proc = process_images(copy_p) images += proc.images + + if (checkbox_iterate): + p.seed = p.seed + (p.batch_size * p.n_iter) + - return Processed(p, images, p.seed, "") + return Processed(p, images, p.seed, "") \ No newline at end of file -- cgit v1.2.3 From 315bd7c9e8a20a28fa7fd1ddd5fddbf3b5a9b41c Mon Sep 17 00:00:00 2001 From: Keith Dreibelbis Date: Tue, 1 Nov 2022 19:45:35 -0700 Subject: prompts_from_file: allow random seeds to be preserved for the list of prompts --- scripts/prompts_from_file.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) (limited to 'scripts/prompts_from_file.py') diff --git a/scripts/prompts_from_file.py b/scripts/prompts_from_file.py index 1be22960..8d4911ae 100644 --- a/scripts/prompts_from_file.py +++ b/scripts/prompts_from_file.py @@ -96,6 +96,7 @@ class Script(scripts.Script): def ui(self, is_img2img): checkbox_iterate = gr.Checkbox(label="Iterate seed every line", value=False) + checkbox_iterate_batch = gr.Checkbox(label="Preserve random seed across lines (for use with \"Generate Forever\")", value=False) prompt_txt = gr.Textbox(label="List of prompt inputs", lines=1) file = gr.File(label="Upload prompt inputs", type='bytes') @@ -106,9 +107,9 @@ class Script(scripts.Script): # We don't shrink back to 1, because that causes the control to ignore [enter], and it may # be unclear to the user that shift-enter is needed. prompt_txt.change(lambda tb: gr.update(lines=7) if ("\n" in tb) else gr.update(lines=2), inputs=[prompt_txt], outputs=[prompt_txt]) - return [checkbox_iterate, file, prompt_txt] + return [checkbox_iterate, checkbox_iterate_batch, file, prompt_txt] - def run(self, p, checkbox_iterate, file, prompt_txt: str): + def run(self, p, checkbox_iterate, checkbox_iterate_batch, file, prompt_txt: str): lines = [x.strip() for x in prompt_txt.splitlines()] lines = [x for x in lines if len(x) > 0] @@ -137,7 +138,7 @@ class Script(scripts.Script): jobs.append(args) print(f"Will process {len(lines)} lines in {job_count} jobs.") - if (checkbox_iterate and p.seed == -1): + if ((checkbox_iterate or checkbox_iterate_batch) and p.seed == -1): p.seed = int(random.randrange(4294967294)) state.job_count = job_count -- cgit v1.2.3 From 55688c48806f9383f3a56f6b9a0ab8fbf205edd2 Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Wed, 2 Nov 2022 07:02:45 +0300 Subject: rename the seed option from #4146 --- scripts/prompts_from_file.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'scripts/prompts_from_file.py') diff --git a/scripts/prompts_from_file.py b/scripts/prompts_from_file.py index 8d4911ae..d187cd9c 100644 --- a/scripts/prompts_from_file.py +++ b/scripts/prompts_from_file.py @@ -96,7 +96,7 @@ class Script(scripts.Script): def ui(self, is_img2img): checkbox_iterate = gr.Checkbox(label="Iterate seed every line", value=False) - checkbox_iterate_batch = gr.Checkbox(label="Preserve random seed across lines (for use with \"Generate Forever\")", value=False) + checkbox_iterate_batch = gr.Checkbox(label="Use same random seed for all lines", value=False) prompt_txt = gr.Textbox(label="List of prompt inputs", lines=1) file = gr.File(label="Upload prompt inputs", type='bytes') @@ -138,7 +138,7 @@ class Script(scripts.Script): jobs.append(args) print(f"Will process {len(lines)} lines in {job_count} jobs.") - if ((checkbox_iterate or checkbox_iterate_batch) and p.seed == -1): + if (checkbox_iterate or checkbox_iterate_batch) and p.seed == -1: p.seed = int(random.randrange(4294967294)) state.job_count = job_count @@ -154,7 +154,7 @@ class Script(scripts.Script): proc = process_images(copy_p) images += proc.images - if (checkbox_iterate): + if checkbox_iterate: p.seed = p.seed + (p.batch_size * p.n_iter) -- cgit v1.2.3 From 4dd898b8c15e342f817d3fb1c8dc9f2d5d111022 Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Fri, 4 Nov 2022 08:38:11 +0300 Subject: do not mess with components' visibility for scripts; instead create group components and show/hide those; this will break scripts that create invisible components and rely on UI but the earlier i make this change the better --- scripts/prompts_from_file.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) (limited to 'scripts/prompts_from_file.py') diff --git a/scripts/prompts_from_file.py b/scripts/prompts_from_file.py index d187cd9c..3388bc77 100644 --- a/scripts/prompts_from_file.py +++ b/scripts/prompts_from_file.py @@ -83,13 +83,14 @@ def cmdargs(line): def load_prompt_file(file): - if (file is None): + if file is None: lines = [] else: lines = [x.strip() for x in file.decode('utf8', errors='ignore').split("\n")] return None, "\n".join(lines), gr.update(lines=7) + class Script(scripts.Script): def title(self): return "Prompts from file or textbox" @@ -107,9 +108,9 @@ class Script(scripts.Script): # We don't shrink back to 1, because that causes the control to ignore [enter], and it may # be unclear to the user that shift-enter is needed. prompt_txt.change(lambda tb: gr.update(lines=7) if ("\n" in tb) else gr.update(lines=2), inputs=[prompt_txt], outputs=[prompt_txt]) - return [checkbox_iterate, checkbox_iterate_batch, file, prompt_txt] + return [checkbox_iterate, checkbox_iterate_batch, prompt_txt] - def run(self, p, checkbox_iterate, checkbox_iterate_batch, file, prompt_txt: str): + def run(self, p, checkbox_iterate, checkbox_iterate_batch, prompt_txt: str): lines = [x.strip() for x in prompt_txt.splitlines()] lines = [x for x in lines if len(x) > 0] @@ -157,5 +158,4 @@ class Script(scripts.Script): if checkbox_iterate: p.seed = p.seed + (p.batch_size * p.n_iter) - - return Processed(p, images, p.seed, "") \ No newline at end of file + return Processed(p, images, p.seed, "") -- cgit v1.2.3 From 81f2575df91a50e4aa9ca816e02e3f77342eedc8 Mon Sep 17 00:00:00 2001 From: Liam Date: Wed, 9 Nov 2022 15:24:31 -0500 Subject: updating the displayed generation info when user clicks images in the gallery. feature request 4415 --- scripts/prompts_from_file.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) (limited to 'scripts/prompts_from_file.py') diff --git a/scripts/prompts_from_file.py b/scripts/prompts_from_file.py index 3388bc77..32fe6bdb 100644 --- a/scripts/prompts_from_file.py +++ b/scripts/prompts_from_file.py @@ -145,6 +145,8 @@ class Script(scripts.Script): state.job_count = job_count images = [] + all_prompts = [] + infotexts = [] for n, args in enumerate(jobs): state.job = f"{state.job_no + 1} out of {state.job_count}" @@ -157,5 +159,7 @@ class Script(scripts.Script): if checkbox_iterate: p.seed = p.seed + (p.batch_size * p.n_iter) + all_prompts += proc.all_prompts + infotexts += proc.infotexts - return Processed(p, images, p.seed, "") + return Processed(p, images, p.seed, "", all_prompts=all_prompts, infotexts=infotexts) -- cgit v1.2.3 From 27c0504bc4d17eec6e58148ab33c75f5ed2e6f00 Mon Sep 17 00:00:00 2001 From: David Vorick Date: Tue, 13 Dec 2022 12:03:16 -0500 Subject: add support for prompts, negative prompts, and sampler-by-name in text file script --- scripts/prompts_from_file.py | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) (limited to 'scripts/prompts_from_file.py') diff --git a/scripts/prompts_from_file.py b/scripts/prompts_from_file.py index 32fe6bdb..6e118ddb 100644 --- a/scripts/prompts_from_file.py +++ b/scripts/prompts_from_file.py @@ -9,6 +9,7 @@ import shlex import modules.scripts as scripts import gradio as gr +from modules import sd_samplers from modules.processing import Processed, process_images from PIL import Image from modules.shared import opts, cmd_opts, state @@ -44,6 +45,7 @@ prompt_tags = { "seed_resize_from_h": process_int_tag, "seed_resize_from_w": process_int_tag, "sampler_index": process_int_tag, + "sampler_name": process_string_tag, "batch_size": process_int_tag, "n_iter": process_int_tag, "steps": process_int_tag, @@ -66,14 +68,28 @@ def cmdargs(line): arg = args[pos] assert arg.startswith("--"), f'must start with "--": {arg}' + assert pos+1 < len(args), f'missing argument for command line option {arg}' + tag = arg[2:] + if tag == "prompt" or tag == "negative_prompt": + pos += 1 + prompt = args[pos] + pos += 1 + while pos < len(args) and not args[pos].startswith("--"): + prompt += " " + prompt += args[pos] + pos += 1 + res[tag] = prompt + continue + + func = prompt_tags.get(tag, None) assert func, f'unknown commandline option: {arg}' - assert pos+1 < len(args), f'missing argument for command line option {arg}' - val = args[pos+1] + if tag == "sampler_name": + val = sd_samplers.samplers_map.get(val.lower(), None) res[tag] = func(val) -- cgit v1.2.3 From 3bf5591efe9a9f219c6088be322a87adc4f48f95 Mon Sep 17 00:00:00 2001 From: Yuval Aboulafia Date: Sat, 24 Dec 2022 21:35:29 +0200 Subject: fix F541 f-string without any placeholders --- scripts/prompts_from_file.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'scripts/prompts_from_file.py') diff --git a/scripts/prompts_from_file.py b/scripts/prompts_from_file.py index 6e118ddb..e8386ed2 100644 --- a/scripts/prompts_from_file.py +++ b/scripts/prompts_from_file.py @@ -140,7 +140,7 @@ class Script(scripts.Script): try: args = cmdargs(line) except Exception: - print(f"Error parsing line [line] as commandline:", file=sys.stderr) + print(f"Error parsing line {line} as commandline:", file=sys.stderr) print(traceback.format_exc(), file=sys.stderr) args = {"prompt": line} else: -- cgit v1.2.3