aboutsummaryrefslogtreecommitdiffstats
path: root/modules/textual_inversion/ui.py
diff options
context:
space:
mode:
authorAUTOMATIC1111 <16777216c@gmail.com>2023-01-04 15:39:57 +0000
committerGitHub <noreply@github.com>2023-01-04 15:39:57 +0000
commit37aafdb059fc038df7217a907048f7eb61f0beee (patch)
tree5ddecfd6d96d6bb1fea556a75fff58782ffcec3b /modules/textual_inversion/ui.py
parent4fbdbddc18b21f712acae58bf41740d27023285f (diff)
parenta8eb9e3bf814f72293e474c11e9ff0098859a942 (diff)
downloadstable-diffusion-webui-gfx803-37aafdb059fc038df7217a907048f7eb61f0beee.tar.gz
stable-diffusion-webui-gfx803-37aafdb059fc038df7217a907048f7eb61f0beee.tar.bz2
stable-diffusion-webui-gfx803-37aafdb059fc038df7217a907048f7eb61f0beee.zip
Merge branch 'master' into master
Diffstat (limited to 'modules/textual_inversion/ui.py')
-rw-r--r--modules/textual_inversion/ui.py13
1 files changed, 8 insertions, 5 deletions
diff --git a/modules/textual_inversion/ui.py b/modules/textual_inversion/ui.py
index 36881e7a..35c4feef 100644
--- a/modules/textual_inversion/ui.py
+++ b/modules/textual_inversion/ui.py
@@ -7,8 +7,8 @@ import modules.textual_inversion.preprocess
from modules import sd_hijack, shared
-def create_embedding(name, initialization_text, nvpt):
- filename = modules.textual_inversion.textual_inversion.create_embedding(name, nvpt, init_text=initialization_text)
+def create_embedding(name, initialization_text, nvpt, overwrite_old):
+ filename = modules.textual_inversion.textual_inversion.create_embedding(name, nvpt, overwrite_old, init_text=initialization_text)
sd_hijack.model_hijack.embedding_db.load_textual_inversion_embeddings()
@@ -18,15 +18,17 @@ def create_embedding(name, initialization_text, nvpt):
def preprocess(*args):
modules.textual_inversion.preprocess.preprocess(*args)
- return "Preprocessing finished.", ""
+ return f"Preprocessing {'interrupted' if shared.state.interrupted else 'finished'}.", ""
def train_embedding(*args):
assert not shared.cmd_opts.lowvram, 'Training models with lowvram not possible'
+ apply_optimizations = shared.opts.training_xattention_optimizations
try:
- sd_hijack.undo_optimizations()
+ if not apply_optimizations:
+ sd_hijack.undo_optimizations()
embedding, filename = modules.textual_inversion.textual_inversion.train_embedding(*args)
@@ -38,5 +40,6 @@ Embedding saved to {html.escape(filename)}
except Exception:
raise
finally:
- sd_hijack.apply_optimizations()
+ if not apply_optimizations:
+ sd_hijack.apply_optimizations()