diff options
author | AUTOMATIC1111 <16777216c@gmail.com> | 2023-01-09 19:45:39 +0000 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-01-09 19:45:39 +0000 |
commit | 18c001792a3f034245c2a9c38cb568d31c147fed (patch) | |
tree | f43e79374dd7e74074dcbf48fc579f1b12b4d1a8 /modules/sd_vae.py | |
parent | 72497895b9b1948f86d9309fe897cbb70c20ba7e (diff) | |
parent | 2b94ec78869db7d2beaad23bdff47340416edf85 (diff) | |
download | stable-diffusion-webui-gfx803-18c001792a3f034245c2a9c38cb568d31c147fed.tar.gz stable-diffusion-webui-gfx803-18c001792a3f034245c2a9c38cb568d31c147fed.tar.bz2 stable-diffusion-webui-gfx803-18c001792a3f034245c2a9c38cb568d31c147fed.zip |
Merge branch 'master' into varsize
Diffstat (limited to 'modules/sd_vae.py')
-rw-r--r-- | modules/sd_vae.py | 20 |
1 files changed, 16 insertions, 4 deletions
diff --git a/modules/sd_vae.py b/modules/sd_vae.py index ac71d62d..0a49daa1 100644 --- a/modules/sd_vae.py +++ b/modules/sd_vae.py @@ -1,8 +1,9 @@ import torch +import safetensors.torch import os import collections from collections import namedtuple -from modules import shared, devices, script_callbacks +from modules import shared, devices, script_callbacks, sd_models from modules.paths import models_path import glob from copy import deepcopy @@ -72,8 +73,10 @@ def refresh_vae_list(vae_path=vae_path, model_path=model_path): candidates = [ *glob.iglob(os.path.join(model_path, '**/*.vae.ckpt'), recursive=True), *glob.iglob(os.path.join(model_path, '**/*.vae.pt'), recursive=True), + *glob.iglob(os.path.join(model_path, '**/*.vae.safetensors'), recursive=True), *glob.iglob(os.path.join(vae_path, '**/*.ckpt'), recursive=True), - *glob.iglob(os.path.join(vae_path, '**/*.pt'), recursive=True) + *glob.iglob(os.path.join(vae_path, '**/*.pt'), recursive=True), + *glob.iglob(os.path.join(vae_path, '**/*.safetensors'), recursive=True), ] if shared.cmd_opts.vae_path is not None and os.path.isfile(shared.cmd_opts.vae_path): candidates.append(shared.cmd_opts.vae_path) @@ -137,6 +140,12 @@ def resolve_vae(checkpoint_file=None, vae_file="auto"): if os.path.isfile(vae_file_try): vae_file = vae_file_try print(f"Using VAE found similar to selected model: {vae_file}") + # if still not found, try look for ".vae.safetensors" beside model + if vae_file == "auto": + vae_file_try = model_path + ".vae.safetensors" + if os.path.isfile(vae_file_try): + vae_file = vae_file_try + print(f"Using VAE found similar to selected model: {vae_file}") # No more fallbacks for auto if vae_file == "auto": vae_file = None @@ -163,8 +172,9 @@ def load_vae(model, vae_file=None): assert os.path.isfile(vae_file), f"VAE file doesn't exist: {vae_file}" print(f"Loading VAE weights from: {vae_file}") store_base_vae(model) - vae_ckpt = torch.load(vae_file, map_location=shared.weight_load_location) - vae_dict_1 = {k: v for k, v in vae_ckpt["state_dict"].items() if k[0:4] != "loss" and k not in vae_ignore_keys} + + vae_ckpt = sd_models.read_state_dict(vae_file, map_location=shared.weight_load_location) + vae_dict_1 = {k: v for k, v in vae_ckpt.items() if k[0:4] != "loss" and k not in vae_ignore_keys} _load_vae_dict(model, vae_dict_1) if cache_enabled: @@ -195,10 +205,12 @@ def _load_vae_dict(model, vae_dict_1): model.first_stage_model.load_state_dict(vae_dict_1) model.first_stage_model.to(devices.dtype_vae) + def clear_loaded_vae(): global loaded_vae_file loaded_vae_file = None + def reload_vae_weights(sd_model=None, vae_file="auto"): from modules import lowvram, devices, sd_hijack |