diff options
author | AUTOMATIC <16777216c@gmail.com> | 2022-10-09 07:23:31 +0000 |
---|---|---|
committer | AUTOMATIC <16777216c@gmail.com> | 2022-10-09 07:23:31 +0000 |
commit | c77c89cc83c618472ad352cf8a28fde28c3a1377 (patch) | |
tree | a5b6e4a15eee659a05b5bdc32343178ce5f9bb9a | |
parent | 050a6a798cec90ae2f881c2ddd3f0221e69907dc (diff) | |
download | stable-diffusion-webui-gfx803-c77c89cc83c618472ad352cf8a28fde28c3a1377.tar.gz stable-diffusion-webui-gfx803-c77c89cc83c618472ad352cf8a28fde28c3a1377.tar.bz2 stable-diffusion-webui-gfx803-c77c89cc83c618472ad352cf8a28fde28c3a1377.zip |
make main model loading and model merger use the same code
-rw-r--r-- | modules/extras.py | 6 | ||||
-rw-r--r-- | modules/sd_models.py | 14 |
2 files changed, 12 insertions, 8 deletions
diff --git a/modules/extras.py b/modules/extras.py index 1d9e64e5..ef6e6de7 100644 --- a/modules/extras.py +++ b/modules/extras.py @@ -169,9 +169,9 @@ def run_modelmerger(primary_model_name, secondary_model_name, interp_method, int print(f"Loading {secondary_model_info.filename}...")
secondary_model = torch.load(secondary_model_info.filename, map_location='cpu')
-
- theta_0 = primary_model['state_dict']
- theta_1 = secondary_model['state_dict']
+
+ theta_0 = sd_models.get_state_dict_from_checkpoint(primary_model)
+ theta_1 = sd_models.get_state_dict_from_checkpoint(secondary_model)
theta_funcs = {
"Weighted Sum": weighted_sum,
diff --git a/modules/sd_models.py b/modules/sd_models.py index cb3982b1..18fb8c2e 100644 --- a/modules/sd_models.py +++ b/modules/sd_models.py @@ -122,6 +122,13 @@ def select_checkpoint(): return checkpoint_info
+def get_state_dict_from_checkpoint(pl_sd):
+ if "state_dict" in pl_sd:
+ return pl_sd["state_dict"]
+
+ return pl_sd
+
+
def load_model_weights(model, checkpoint_info):
checkpoint_file = checkpoint_info.filename
sd_model_hash = checkpoint_info.hash
@@ -131,11 +138,8 @@ def load_model_weights(model, checkpoint_info): pl_sd = torch.load(checkpoint_file, map_location="cpu")
if "global_step" in pl_sd:
print(f"Global Step: {pl_sd['global_step']}")
-
- if "state_dict" in pl_sd:
- sd = pl_sd["state_dict"]
- else:
- sd = pl_sd
+
+ sd = get_state_dict_from_checkpoint(pl_sd)
model.load_state_dict(sd, strict=False)
|