aboutsummaryrefslogtreecommitdiffstats
path: root/modules/shared.py
diff options
context:
space:
mode:
authortimntorres <timothynarcisotorres@gmail.com>2023-01-05 15:24:22 +0000
committertimntorres <timothynarcisotorres@gmail.com>2023-01-05 15:24:22 +0000
commiteea8fc40e16664ddc8a9aec77206da704a35dde0 (patch)
tree95443d38af1000193427083ac7e85fbbd05340f4 /modules/shared.py
parentf8d0cf6a6ec4911559cfecb9a9d1d46b547b38e8 (diff)
downloadstable-diffusion-webui-gfx803-eea8fc40e16664ddc8a9aec77206da704a35dde0.tar.gz
stable-diffusion-webui-gfx803-eea8fc40e16664ddc8a9aec77206da704a35dde0.tar.bz2
stable-diffusion-webui-gfx803-eea8fc40e16664ddc8a9aec77206da704a35dde0.zip
Add option to save ti settings to file.
Diffstat (limited to 'modules/shared.py')
-rw-r--r--modules/shared.py1
1 files changed, 1 insertions, 0 deletions
diff --git a/modules/shared.py b/modules/shared.py
index e0f44c6d..933cd738 100644
--- a/modules/shared.py
+++ b/modules/shared.py
@@ -362,6 +362,7 @@ options_templates.update(options_section(('training', "Training"), {
"unload_models_when_training": OptionInfo(False, "Move VAE and CLIP to RAM when training if possible. Saves VRAM."),
"pin_memory": OptionInfo(False, "Turn on pin_memory for DataLoader. Makes training slightly faster but can increase memory usage."),
"save_optimizer_state": OptionInfo(False, "Saves Optimizer state as separate *.optim file. Training of embedding or HN can be resumed with the matching optim file."),
+ "save_train_settings_to_txt": OptionInfo(False, "Save textual inversion and hypernet settings to a text file when training starts."),
"dataset_filename_word_regex": OptionInfo("", "Filename word regex"),
"dataset_filename_join_string": OptionInfo(" ", "Filename join string"),
"training_image_repeats_per_epoch": OptionInfo(1, "Number of repeats for a single input image per epoch; used only for displaying epoch number", gr.Number, {"precision": 0}),