diff options
author | DepFA <35278260+dfaker@users.noreply.github.com> | 2022-10-10 00:38:54 +0100 |
---|---|---|
committer | GitHub <noreply@github.com> | 2022-10-10 00:38:54 +0100 |
commit | 4117afff11c7b0a2162c73ea02be8cfa30d02640 (patch) | |
tree | af26f1b0c9eac8c024d2e51ec8fb5ca4a4d45ed3 /modules/sd_models.py | |
parent | e2c2925eb4d634b186de2c76798162ec56e2f869 (diff) | |
parent | 45fbd1c5fec887988ab555aac75a999d4f3aff40 (diff) |
Merge branch 'master' into embed-embeddings-in-images
Diffstat (limited to 'modules/sd_models.py')
-rw-r--r-- | modules/sd_models.py | 20 |
1 files changed, 12 insertions, 8 deletions
diff --git a/modules/sd_models.py b/modules/sd_models.py index cb3982b1..e63d3c29 100644 --- a/modules/sd_models.py +++ b/modules/sd_models.py @@ -5,7 +5,6 @@ from collections import namedtuple import torch
from omegaconf import OmegaConf
-
from ldm.util import instantiate_from_config
from modules import shared, modelloader, devices
@@ -122,6 +121,13 @@ def select_checkpoint(): return checkpoint_info
+def get_state_dict_from_checkpoint(pl_sd):
+ if "state_dict" in pl_sd:
+ return pl_sd["state_dict"]
+
+ return pl_sd
+
+
def load_model_weights(model, checkpoint_info):
checkpoint_file = checkpoint_info.filename
sd_model_hash = checkpoint_info.hash
@@ -131,11 +137,8 @@ def load_model_weights(model, checkpoint_info): pl_sd = torch.load(checkpoint_file, map_location="cpu")
if "global_step" in pl_sd:
print(f"Global Step: {pl_sd['global_step']}")
-
- if "state_dict" in pl_sd:
- sd = pl_sd["state_dict"]
- else:
- sd = pl_sd
+
+ sd = get_state_dict_from_checkpoint(pl_sd)
model.load_state_dict(sd, strict=False)
@@ -165,7 +168,7 @@ def load_model(): checkpoint_info = select_checkpoint()
if checkpoint_info.config != shared.cmd_opts.config:
- print(f"Loading config from: {shared.cmd_opts.config}")
+ print(f"Loading config from: {checkpoint_info.config}")
sd_config = OmegaConf.load(checkpoint_info.config)
sd_model = instantiate_from_config(sd_config.model)
@@ -192,7 +195,8 @@ def reload_model_weights(sd_model, info=None): return
if sd_model.sd_checkpoint_info.config != checkpoint_info.config:
- return load_model()
+ shared.sd_model = load_model()
+ return shared.sd_model
if shared.cmd_opts.lowvram or shared.cmd_opts.medvram:
lowvram.send_everything_to_cpu()
|