diff options
author | hako-mikan <122196982+hako-mikan@users.noreply.github.com> | 2024-02-09 23:17:40 +0900 |
---|---|---|
committer | GitHub <noreply@github.com> | 2024-02-09 23:17:40 +0900 |
commit | 0bc7867ccd4ac24f5f270cb767c4642d0a0c001c (patch) | |
tree | 2ad13a0cf77bc189a8c9097bd507f9674f993da6 /modules/sd_models.py | |
parent | 816096e642187a18b11e2729c42c0b5f677f047d (diff) | |
parent | cf2772fab0af5573da775e7437e6acdca424f26e (diff) |
Merge branch 'AUTOMATIC1111:master' into master
Diffstat (limited to 'modules/sd_models.py')
-rw-r--r-- | modules/sd_models.py | 17 |
1 files changed, 13 insertions, 4 deletions
diff --git a/modules/sd_models.py b/modules/sd_models.py index 841402e8..9355f1e1 100644 --- a/modules/sd_models.py +++ b/modules/sd_models.py @@ -230,15 +230,19 @@ def select_checkpoint(): return checkpoint_info
-checkpoint_dict_replacements = {
+checkpoint_dict_replacements_sd1 = {
'cond_stage_model.transformer.embeddings.': 'cond_stage_model.transformer.text_model.embeddings.',
'cond_stage_model.transformer.encoder.': 'cond_stage_model.transformer.text_model.encoder.',
'cond_stage_model.transformer.final_layer_norm.': 'cond_stage_model.transformer.text_model.final_layer_norm.',
}
+checkpoint_dict_replacements_sd2_turbo = { # Converts SD 2.1 Turbo from SGM to LDM format.
+ 'conditioner.embedders.0.': 'cond_stage_model.',
+}
+
-def transform_checkpoint_dict_key(k):
- for text, replacement in checkpoint_dict_replacements.items():
+def transform_checkpoint_dict_key(k, replacements):
+ for text, replacement in replacements.items():
if k.startswith(text):
k = replacement + k[len(text):]
@@ -249,9 +253,14 @@ def get_state_dict_from_checkpoint(pl_sd): pl_sd = pl_sd.pop("state_dict", pl_sd)
pl_sd.pop("state_dict", None)
+ is_sd2_turbo = 'conditioner.embedders.0.model.ln_final.weight' in pl_sd and pl_sd['conditioner.embedders.0.model.ln_final.weight'].size()[0] == 1024
+
sd = {}
for k, v in pl_sd.items():
- new_key = transform_checkpoint_dict_key(k)
+ if is_sd2_turbo:
+ new_key = transform_checkpoint_dict_key(k, checkpoint_dict_replacements_sd2_turbo)
+ else:
+ new_key = transform_checkpoint_dict_key(k, checkpoint_dict_replacements_sd1)
if new_key is not None:
sd[new_key] = v
|