diff options
author | Aarni Koskela <akx@iki.fi> | 2023-06-13 10:07:39 +0000 |
---|---|---|
committer | Aarni Koskela <akx@iki.fi> | 2023-06-13 10:07:39 +0000 |
commit | d8071647760a2213aaf33a533addb4d84ba86816 (patch) | |
tree | 4331b04b3c87d48fdf22cc068d60051d0b3f552e | |
parent | 8ce9b36e0fe51002e72f90ec4dbdc53b564c8fad (diff) | |
download | stable-diffusion-webui-gfx803-d8071647760a2213aaf33a533addb4d84ba86816.tar.gz stable-diffusion-webui-gfx803-d8071647760a2213aaf33a533addb4d84ba86816.tar.bz2 stable-diffusion-webui-gfx803-d8071647760a2213aaf33a533addb4d84ba86816.zip |
textual_inversion/logging.py: clean up duplicate key from sets (and sort them) (Ruff B033)
-rw-r--r-- | modules/textual_inversion/logging.py | 48 |
1 files changed, 44 insertions, 4 deletions
diff --git a/modules/textual_inversion/logging.py b/modules/textual_inversion/logging.py index 734a4b6f..45823eb1 100644 --- a/modules/textual_inversion/logging.py +++ b/modules/textual_inversion/logging.py @@ -2,11 +2,51 @@ import datetime import json
import os
-saved_params_shared = {"model_name", "model_hash", "initial_step", "num_of_dataset_images", "learn_rate", "batch_size", "clip_grad_mode", "clip_grad_value", "gradient_step", "data_root", "log_directory", "training_width", "training_height", "steps", "create_image_every", "template_file", "gradient_step", "latent_sampling_method"}
-saved_params_ti = {"embedding_name", "num_vectors_per_token", "save_embedding_every", "save_image_with_stored_embedding"}
-saved_params_hypernet = {"hypernetwork_name", "layer_structure", "activation_func", "weight_init", "add_layer_norm", "use_dropout", "save_hypernetwork_every"}
+saved_params_shared = {
+ "batch_size",
+ "clip_grad_mode",
+ "clip_grad_value",
+ "create_image_every",
+ "data_root",
+ "gradient_step",
+ "initial_step",
+ "latent_sampling_method",
+ "learn_rate",
+ "log_directory",
+ "model_hash",
+ "model_name",
+ "num_of_dataset_images",
+ "steps",
+ "template_file",
+ "training_height",
+ "training_width",
+}
+saved_params_ti = {
+ "embedding_name",
+ "num_vectors_per_token",
+ "save_embedding_every",
+ "save_image_with_stored_embedding",
+}
+saved_params_hypernet = {
+ "activation_func",
+ "add_layer_norm",
+ "hypernetwork_name",
+ "layer_structure",
+ "save_hypernetwork_every",
+ "use_dropout",
+ "weight_init",
+}
saved_params_all = saved_params_shared | saved_params_ti | saved_params_hypernet
-saved_params_previews = {"preview_prompt", "preview_negative_prompt", "preview_steps", "preview_sampler_index", "preview_cfg_scale", "preview_seed", "preview_width", "preview_height"}
+saved_params_previews = {
+ "preview_cfg_scale",
+ "preview_height",
+ "preview_negative_prompt",
+ "preview_prompt",
+ "preview_sampler_index",
+ "preview_seed",
+ "preview_steps",
+ "preview_width",
+}
def save_settings_to_file(log_directory, all_params):
|