aboutsummaryrefslogtreecommitdiffstats
path: root/modules/textual_inversion/textual_inversion.py
diff options
context:
space:
mode:
authorKuma <36082288+KumiIT@users.noreply.github.com>2023-01-05 17:44:19 +0000
committerGitHub <noreply@github.com>2023-01-05 17:44:19 +0000
commitfda04e620d529031e2134520e74756d0efa30464 (patch)
tree697155b70c0a894f0c063dcbd064b466142f23a9 /modules/textual_inversion/textual_inversion.py
parentf8d0cf6a6ec4911559cfecb9a9d1d46b547b38e8 (diff)
downloadstable-diffusion-webui-gfx803-fda04e620d529031e2134520e74756d0efa30464.tar.gz
stable-diffusion-webui-gfx803-fda04e620d529031e2134520e74756d0efa30464.tar.bz2
stable-diffusion-webui-gfx803-fda04e620d529031e2134520e74756d0efa30464.zip
typo in TI
Diffstat (limited to 'modules/textual_inversion/textual_inversion.py')
-rw-r--r--modules/textual_inversion/textual_inversion.py2
1 files changed, 1 insertions, 1 deletions
diff --git a/modules/textual_inversion/textual_inversion.py b/modules/textual_inversion/textual_inversion.py
index 71e07bcc..24b43045 100644
--- a/modules/textual_inversion/textual_inversion.py
+++ b/modules/textual_inversion/textual_inversion.py
@@ -298,7 +298,7 @@ def train_embedding(embedding_name, learn_rate, batch_size, gradient_step, data_
torch.nn.utils.clip_grad_norm_ if clip_grad_mode == "norm" else \
None
if clip_grad:
- clip_grad_sched = LearnRateScheduler(clip_grad_value, steps, ititial_step, verbose=False)
+ clip_grad_sched = LearnRateScheduler(clip_grad_value, steps, initial_step, verbose=False)
# dataset loading may take a while, so input validations and early returns should be done before this
shared.state.textinfo = f"Preparing dataset from {html.escape(data_root)}..."
old_parallel_processing_allowed = shared.parallel_processing_allowed