aboutsummaryrefslogtreecommitdiffstats
path: root/modules/textual_inversion/ui.py
diff options
context:
space:
mode:
authorFampai <>2022-10-31 11:26:08 +0000
committerFampai <>2022-10-31 11:26:08 +0000
commit006756f9cd6258eae418e9209cfc13f940ec53e1 (patch)
tree79e555dbfecadf9ab6af4ad7f4fbbb80e81e1ca6 /modules/textual_inversion/ui.py
parent700162a603d7bc8cfe582ca5787e88ee0c6573c8 (diff)
downloadstable-diffusion-webui-gfx803-006756f9cd6258eae418e9209cfc13f940ec53e1.tar.gz
stable-diffusion-webui-gfx803-006756f9cd6258eae418e9209cfc13f940ec53e1.tar.bz2
stable-diffusion-webui-gfx803-006756f9cd6258eae418e9209cfc13f940ec53e1.zip
Added TI training optimizations
option to use xattention optimizations when training option to unload vae when training
Diffstat (limited to 'modules/textual_inversion/ui.py')
-rw-r--r--modules/textual_inversion/ui.py7
1 files changed, 5 insertions, 2 deletions
diff --git a/modules/textual_inversion/ui.py b/modules/textual_inversion/ui.py
index e712284d..d679e6f4 100644
--- a/modules/textual_inversion/ui.py
+++ b/modules/textual_inversion/ui.py
@@ -25,8 +25,10 @@ def train_embedding(*args):
assert not shared.cmd_opts.lowvram, 'Training models with lowvram not possible'
+ apply_optimizations = shared.opts.training_xattention_optimizations
try:
- sd_hijack.undo_optimizations()
+ if not apply_optimizations:
+ sd_hijack.undo_optimizations()
embedding, filename = modules.textual_inversion.textual_inversion.train_embedding(*args)
@@ -38,5 +40,6 @@ Embedding saved to {html.escape(filename)}
except Exception:
raise
finally:
- sd_hijack.apply_optimizations()
+ if not apply_optimizations:
+ sd_hijack.apply_optimizations()