diff options
author | Dynamic <bradje@naver.com> | 2022-10-29 13:33:06 +0000 |
---|---|---|
committer | GitHub <noreply@github.com> | 2022-10-29 13:33:06 +0000 |
commit | 3d36d62d61425a2538270c489dc5fe827c125ad1 (patch) | |
tree | d3ebb7d5f5fa23f355a02288f0c645c102b8e6a0 /modules/sd_models.py | |
parent | a668444110743cd163474ec563b0e69025dea3d2 (diff) | |
parent | 35c45df28b303a05d56a13cb56d4046f08cf8c25 (diff) | |
download | stable-diffusion-webui-gfx803-3d36d62d61425a2538270c489dc5fe827c125ad1.tar.gz stable-diffusion-webui-gfx803-3d36d62d61425a2538270c489dc5fe827c125ad1.tar.bz2 stable-diffusion-webui-gfx803-3d36d62d61425a2538270c489dc5fe827c125ad1.zip |
Merge branch 'AUTOMATIC1111:master' into kr-localization
Diffstat (limited to 'modules/sd_models.py')
-rw-r--r-- | modules/sd_models.py | 18 |
1 files changed, 12 insertions, 6 deletions
diff --git a/modules/sd_models.py b/modules/sd_models.py index e697bb72..f86dc3ed 100644 --- a/modules/sd_models.py +++ b/modules/sd_models.py @@ -3,6 +3,7 @@ import os.path import sys
from collections import namedtuple
import torch
+import re
from omegaconf import OmegaConf
from ldm.util import instantiate_from_config
@@ -35,8 +36,10 @@ def setup_model(): list_models()
-def checkpoint_tiles():
- return sorted([x.title for x in checkpoints_list.values()])
+def checkpoint_tiles():
+ convert = lambda name: int(name) if name.isdigit() else name.lower()
+ alphanumeric_key = lambda key: [convert(c) for c in re.split('([0-9]+)', key)]
+ return sorted([x.title for x in checkpoints_list.values()], key = alphanumeric_key)
def list_models():
@@ -170,7 +173,9 @@ def load_model_weights(model, checkpoint_info): print(f"Global Step: {pl_sd['global_step']}")
sd = get_state_dict_from_checkpoint(pl_sd)
- missing, extra = model.load_state_dict(sd, strict=False)
+ del pl_sd
+ model.load_state_dict(sd, strict=False)
+ del sd
if shared.cmd_opts.opt_channelslast:
model.to(memory_format=torch.channels_last)
@@ -194,9 +199,10 @@ def load_model_weights(model, checkpoint_info): model.first_stage_model.to(devices.dtype_vae)
- checkpoints_loaded[checkpoint_info] = model.state_dict().copy()
- while len(checkpoints_loaded) > shared.opts.sd_checkpoint_cache:
- checkpoints_loaded.popitem(last=False) # LRU
+ if shared.opts.sd_checkpoint_cache > 0:
+ checkpoints_loaded[checkpoint_info] = model.state_dict().copy()
+ while len(checkpoints_loaded) > shared.opts.sd_checkpoint_cache:
+ checkpoints_loaded.popitem(last=False) # LRU
else:
print(f"Loading weights [{sd_model_hash}] from cache")
checkpoints_loaded.move_to_end(checkpoint_info)
|