diff options
author | AUTOMATIC1111 <16777216c@gmail.com> | 2023-01-05 19:10:07 +0000 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-01-05 19:10:07 +0000 |
commit | 310b71f669e4f2cea11b023c47f7ffedd82ab464 (patch) | |
tree | 9b0ae7a50d0a192850e5cd807d86ff1c3577b6e3 | |
parent | 847f869c67c7108e3e792fc193331d0e6acca29c (diff) | |
parent | fda04e620d529031e2134520e74756d0efa30464 (diff) | |
download | stable-diffusion-webui-gfx803-310b71f669e4f2cea11b023c47f7ffedd82ab464.tar.gz stable-diffusion-webui-gfx803-310b71f669e4f2cea11b023c47f7ffedd82ab464.tar.bz2 stable-diffusion-webui-gfx803-310b71f669e4f2cea11b023c47f7ffedd82ab464.zip |
Merge pull request #6376 from KumiIT/master
typo in TI
-rw-r--r-- | modules/textual_inversion/textual_inversion.py | 2 |
1 files changed, 1 insertions, 1 deletions
diff --git a/modules/textual_inversion/textual_inversion.py b/modules/textual_inversion/textual_inversion.py index 71e07bcc..24b43045 100644 --- a/modules/textual_inversion/textual_inversion.py +++ b/modules/textual_inversion/textual_inversion.py @@ -298,7 +298,7 @@ def train_embedding(embedding_name, learn_rate, batch_size, gradient_step, data_ torch.nn.utils.clip_grad_norm_ if clip_grad_mode == "norm" else \
None
if clip_grad:
- clip_grad_sched = LearnRateScheduler(clip_grad_value, steps, ititial_step, verbose=False)
+ clip_grad_sched = LearnRateScheduler(clip_grad_value, steps, initial_step, verbose=False)
# dataset loading may take a while, so input validations and early returns should be done before this
shared.state.textinfo = f"Preparing dataset from {html.escape(data_root)}..."
old_parallel_processing_allowed = shared.parallel_processing_allowed
|