aboutsummaryrefslogtreecommitdiffstats
path: root/modules/sd_vae.py
diff options
context:
space:
mode:
Diffstat (limited to 'modules/sd_vae.py')
-rw-r--r--modules/sd_vae.py66
1 files changed, 41 insertions, 25 deletions
diff --git a/modules/sd_vae.py b/modules/sd_vae.py
index 9c120975..ac71d62d 100644
--- a/modules/sd_vae.py
+++ b/modules/sd_vae.py
@@ -1,9 +1,11 @@
import torch
import os
+import collections
from collections import namedtuple
from modules import shared, devices, script_callbacks
from modules.paths import models_path
import glob
+from copy import deepcopy
model_dir = "Stable-diffusion"
@@ -15,7 +17,7 @@ vae_path = os.path.abspath(os.path.join(models_path, vae_dir))
vae_ignore_keys = {"model_ema.decay", "model_ema.num_updates"}
-default_vae_dict = {"auto": "auto", "None": "None"}
+default_vae_dict = {"auto": "auto", "None": None, None: None}
default_vae_list = ["auto", "None"]
@@ -29,6 +31,7 @@ base_vae = None
loaded_vae_file = None
checkpoint_info = None
+checkpoints_loaded = collections.OrderedDict()
def get_base_vae(model):
if base_vae is not None and checkpoint_info == model.sd_checkpoint_info and model:
@@ -39,7 +42,8 @@ def get_base_vae(model):
def store_base_vae(model):
global base_vae, checkpoint_info
if checkpoint_info != model.sd_checkpoint_info:
- base_vae = model.first_stage_model.state_dict().copy()
+ assert not loaded_vae_file, "Trying to store non-base VAE!"
+ base_vae = deepcopy(model.first_stage_model.state_dict())
checkpoint_info = model.sd_checkpoint_info
@@ -50,9 +54,11 @@ def delete_base_vae():
def restore_base_vae(model):
- global base_vae, checkpoint_info
+ global loaded_vae_file
if base_vae is not None and checkpoint_info == model.sd_checkpoint_info:
- load_vae_dict(model, base_vae)
+ print("Restoring base VAE")
+ _load_vae_dict(model, base_vae)
+ loaded_vae_file = None
delete_base_vae()
@@ -145,12 +151,30 @@ def load_vae(model, vae_file=None):
global first_load, vae_dict, vae_list, loaded_vae_file
# save_settings = False
+ cache_enabled = shared.opts.sd_vae_checkpoint_cache > 0
+
if vae_file:
- assert os.path.isfile(vae_file), f"VAE file doesn't exist: {vae_file}"
- print(f"Loading VAE weights from: {vae_file}")
- vae_ckpt = torch.load(vae_file, map_location=shared.weight_load_location)
- vae_dict_1 = {k: v for k, v in vae_ckpt["state_dict"].items() if k[0:4] != "loss" and k not in vae_ignore_keys}
- load_vae_dict(model, vae_dict_1)
+ if cache_enabled and vae_file in checkpoints_loaded:
+ # use vae checkpoint cache
+ print(f"Loading VAE weights [{get_filename(vae_file)}] from cache")
+ store_base_vae(model)
+ _load_vae_dict(model, checkpoints_loaded[vae_file])
+ else:
+ assert os.path.isfile(vae_file), f"VAE file doesn't exist: {vae_file}"
+ print(f"Loading VAE weights from: {vae_file}")
+ store_base_vae(model)
+ vae_ckpt = torch.load(vae_file, map_location=shared.weight_load_location)
+ vae_dict_1 = {k: v for k, v in vae_ckpt["state_dict"].items() if k[0:4] != "loss" and k not in vae_ignore_keys}
+ _load_vae_dict(model, vae_dict_1)
+
+ if cache_enabled:
+ # cache newly loaded vae
+ checkpoints_loaded[vae_file] = vae_dict_1.copy()
+
+ # clean up cache if limit is reached
+ if cache_enabled:
+ while len(checkpoints_loaded) > shared.opts.sd_vae_checkpoint_cache + 1: # we need to count the current model
+ checkpoints_loaded.popitem(last=False) # LRU
# If vae used is not in dict, update it
# It will be removed on refresh though
@@ -158,30 +182,22 @@ def load_vae(model, vae_file=None):
if vae_opt not in vae_dict:
vae_dict[vae_opt] = vae_file
vae_list.append(vae_opt)
+ elif loaded_vae_file:
+ restore_base_vae(model)
loaded_vae_file = vae_file
- """
- # Save current VAE to VAE settings, maybe? will it work?
- if save_settings:
- if vae_file is None:
- vae_opt = "None"
-
- # shared.opts.sd_vae = vae_opt
- """
-
first_load = False
# don't call this from outside
-def load_vae_dict(model, vae_dict_1=None):
- if vae_dict_1:
- store_base_vae(model)
- model.first_stage_model.load_state_dict(vae_dict_1)
- else:
- restore_base_vae()
+def _load_vae_dict(model, vae_dict_1):
+ model.first_stage_model.load_state_dict(vae_dict_1)
model.first_stage_model.to(devices.dtype_vae)
+def clear_loaded_vae():
+ global loaded_vae_file
+ loaded_vae_file = None
def reload_vae_weights(sd_model=None, vae_file="auto"):
from modules import lowvram, devices, sd_hijack
@@ -211,5 +227,5 @@ def reload_vae_weights(sd_model=None, vae_file="auto"):
if not shared.cmd_opts.lowvram and not shared.cmd_opts.medvram:
sd_model.to(devices.device)
- print(f"VAE Weights loaded.")
+ print("VAE Weights loaded.")
return sd_model