aboutsummaryrefslogtreecommitdiffstats
path: root/modules/shared.py
diff options
context:
space:
mode:
authorAUTOMATIC1111 <16777216c@gmail.com>2022-11-05 13:15:00 +0000
committerGitHub <noreply@github.com>2022-11-05 13:15:00 +0000
commite96c4344952fba680aa0e7d919a8b3b1896fbeb5 (patch)
treef24476c5262f978d79c4bfff23deb02121d89b31 /modules/shared.py
parent477c09f4e73dd21eb598dfe072e42b700fcb757d (diff)
parent7278897982bfb640ee95f144c97ed25fb3f77ea3 (diff)
downloadstable-diffusion-webui-gfx803-e96c4344952fba680aa0e7d919a8b3b1896fbeb5.tar.gz
stable-diffusion-webui-gfx803-e96c4344952fba680aa0e7d919a8b3b1896fbeb5.tar.bz2
stable-diffusion-webui-gfx803-e96c4344952fba680aa0e7d919a8b3b1896fbeb5.zip
Merge pull request #3975 from aria1th/force-push-patch-13
Save/loading AdamW optimizer (for hypernetworks)
Diffstat (limited to 'modules/shared.py')
-rw-r--r--modules/shared.py1
1 files changed, 1 insertions, 0 deletions
diff --git a/modules/shared.py b/modules/shared.py
index 7a20c3af..71587557 100644
--- a/modules/shared.py
+++ b/modules/shared.py
@@ -320,6 +320,7 @@ options_templates.update(options_section(('system', "System"), {
options_templates.update(options_section(('training', "Training"), {
"unload_models_when_training": OptionInfo(False, "Move VAE and CLIP to RAM when training if possible. Saves VRAM."),
+ "save_optimizer_state": OptionInfo(False, "Saves Optimizer state as separate *.optim file. Training can be resumed with HN itself and matching optim file."),
"dataset_filename_word_regex": OptionInfo("", "Filename word regex"),
"dataset_filename_join_string": OptionInfo(" ", "Filename join string"),
"training_image_repeats_per_epoch": OptionInfo(1, "Number of repeats for a single input image per epoch; used only for displaying epoch number", gr.Number, {"precision": 0}),