aboutsummaryrefslogtreecommitdiffstats
path: root/modules/hypernetworks
diff options
context:
space:
mode:
authordan <guaneec@gmail.com>2023-01-07 17:36:00 +0000
committerdan <guaneec@gmail.com>2023-01-07 18:57:36 +0000
commit72497895b9b1948f86d9309fe897cbb70c20ba7e (patch)
treed4ee04160f107036da878db8f24b58a562fe7055 /modules/hypernetworks
parent669fb18d5222f53ae48abe0f30393d846c50ad91 (diff)
downloadstable-diffusion-webui-gfx803-72497895b9b1948f86d9309fe897cbb70c20ba7e.tar.gz
stable-diffusion-webui-gfx803-72497895b9b1948f86d9309fe897cbb70c20ba7e.tar.bz2
stable-diffusion-webui-gfx803-72497895b9b1948f86d9309fe897cbb70c20ba7e.zip
Move batchsize check
Diffstat (limited to 'modules/hypernetworks')
-rw-r--r--modules/hypernetworks/hypernetwork.py2
1 files changed, 1 insertions, 1 deletions
diff --git a/modules/hypernetworks/hypernetwork.py b/modules/hypernetworks/hypernetwork.py
index dba52841..32c67ccc 100644
--- a/modules/hypernetworks/hypernetwork.py
+++ b/modules/hypernetworks/hypernetwork.py
@@ -456,7 +456,7 @@ def train_hypernetwork(hypernetwork_name, learn_rate, batch_size, gradient_step,
pin_memory = shared.opts.pin_memory
- ds = modules.textual_inversion.dataset.PersonalizedBase(data_root=data_root, width=training_width, height=training_height, repeats=shared.opts.training_image_repeats_per_epoch, placeholder_token=hypernetwork_name, model=shared.sd_model, cond_model=shared.sd_model.cond_stage_model, device=devices.device, template_file=template_file, include_cond=True, batch_size=batch_size, gradient_step=gradient_step, shuffle_tags=shuffle_tags, tag_drop_out=tag_drop_out, latent_sampling_method=latent_sampling_method)
+ ds = modules.textual_inversion.dataset.PersonalizedBase(data_root=data_root, width=training_width, height=training_height, repeats=shared.opts.training_image_repeats_per_epoch, placeholder_token=hypernetwork_name, model=shared.sd_model, cond_model=shared.sd_model.cond_stage_model, device=devices.device, template_file=template_file, include_cond=True, batch_size=batch_size, gradient_step=gradient_step, shuffle_tags=shuffle_tags, tag_drop_out=tag_drop_out, latent_sampling_method=latent_sampling_method, varsize=varsize)
if shared.opts.save_training_settings_to_txt:
saved_params = dict(