diff options
author | Fampai <> | 2022-10-31 11:26:08 +0000 |
---|---|---|
committer | Fampai <> | 2022-10-31 11:26:08 +0000 |
commit | 006756f9cd6258eae418e9209cfc13f940ec53e1 (patch) | |
tree | 79e555dbfecadf9ab6af4ad7f4fbbb80e81e1ca6 /modules/shared.py | |
parent | 700162a603d7bc8cfe582ca5787e88ee0c6573c8 (diff) | |
download | stable-diffusion-webui-gfx803-006756f9cd6258eae418e9209cfc13f940ec53e1.tar.gz stable-diffusion-webui-gfx803-006756f9cd6258eae418e9209cfc13f940ec53e1.tar.bz2 stable-diffusion-webui-gfx803-006756f9cd6258eae418e9209cfc13f940ec53e1.zip |
Added TI training optimizations
option to use xattention optimizations when training
option to unload vae when training
Diffstat (limited to 'modules/shared.py')
-rw-r--r-- | modules/shared.py | 3 |
1 files changed, 2 insertions, 1 deletions
diff --git a/modules/shared.py b/modules/shared.py index fb84afd8..4c3d0ce7 100644 --- a/modules/shared.py +++ b/modules/shared.py @@ -256,11 +256,12 @@ options_templates.update(options_section(('system', "System"), { }))
options_templates.update(options_section(('training', "Training"), {
- "unload_models_when_training": OptionInfo(False, "Move VAE and CLIP to RAM when training hypernetwork. Saves VRAM."),
+ "unload_models_when_training": OptionInfo(False, "Move VAE and CLIP to RAM when training if possible. Saves VRAM."),
"dataset_filename_word_regex": OptionInfo("", "Filename word regex"),
"dataset_filename_join_string": OptionInfo(" ", "Filename join string"),
"training_image_repeats_per_epoch": OptionInfo(1, "Number of repeats for a single input image per epoch; used only for displaying epoch number", gr.Number, {"precision": 0}),
"training_write_csv_every": OptionInfo(500, "Save an csv containing the loss to log directory every N steps, 0 to disable"),
+ "training_xattention_optimizations": OptionInfo(False, "Use cross attention optimizations while training"),
}))
options_templates.update(options_section(('sd', "Stable Diffusion"), {
|