aboutsummaryrefslogtreecommitdiff
path: root/modules/sd_models.py
diff options
context:
space:
mode:
authormissionfloyd <missionfloyd@users.noreply.github.com>2023-03-26 21:47:05 -0600
committermissionfloyd <missionfloyd@users.noreply.github.com>2023-03-26 21:47:05 -0600
commitefac2cf1ab6645f3f5134158c1401c6305c2ffea (patch)
tree24b69e980c07d03618a6e5b2447704cdb30a6a20 /modules/sd_models.py
parent1d096ed1456c9b9b662477839853621848705e68 (diff)
parenta336c7fe233fa7dff062f5187c0f4d01ab26e80b (diff)
Merge branch 'extra-network-preview-lazyload' of https://github.com/missionfloyd/stable-diffusion-webui into extra-network-preview-lazyload
Diffstat (limited to 'modules/sd_models.py')
-rw-r--r--modules/sd_models.py26
1 files changed, 23 insertions, 3 deletions
diff --git a/modules/sd_models.py b/modules/sd_models.py
index f0cb1240..86218c08 100644
--- a/modules/sd_models.py
+++ b/modules/sd_models.py
@@ -178,7 +178,7 @@ def select_checkpoint():
return checkpoint_info
-chckpoint_dict_replacements = {
+checkpoint_dict_replacements = {
'cond_stage_model.transformer.embeddings.': 'cond_stage_model.transformer.text_model.embeddings.',
'cond_stage_model.transformer.encoder.': 'cond_stage_model.transformer.text_model.encoder.',
'cond_stage_model.transformer.final_layer_norm.': 'cond_stage_model.transformer.text_model.final_layer_norm.',
@@ -186,7 +186,7 @@ chckpoint_dict_replacements = {
def transform_checkpoint_dict_key(k):
- for text, replacement in chckpoint_dict_replacements.items():
+ for text, replacement in checkpoint_dict_replacements.items():
if k.startswith(text):
k = replacement + k[len(text):]
@@ -494,7 +494,7 @@ def reload_model_weights(sd_model=None, info=None):
if sd_model is None or checkpoint_config != sd_model.used_config:
del sd_model
checkpoints_loaded.clear()
- load_model(checkpoint_info, already_loaded_state_dict=state_dict, time_taken_to_load_state_dict=timer.records["load weights from disk"])
+ load_model(checkpoint_info, already_loaded_state_dict=state_dict)
return shared.sd_model
try:
@@ -517,3 +517,23 @@ def reload_model_weights(sd_model=None, info=None):
print(f"Weights loaded in {timer.summary()}.")
return sd_model
+
+def unload_model_weights(sd_model=None, info=None):
+ from modules import lowvram, devices, sd_hijack
+ timer = Timer()
+
+ if shared.sd_model:
+
+ # shared.sd_model.cond_stage_model.to(devices.cpu)
+ # shared.sd_model.first_stage_model.to(devices.cpu)
+ shared.sd_model.to(devices.cpu)
+ sd_hijack.model_hijack.undo_hijack(shared.sd_model)
+ shared.sd_model = None
+ sd_model = None
+ gc.collect()
+ devices.torch_gc()
+ torch.cuda.empty_cache()
+
+ print(f"Unloaded weights {timer.summary()}.")
+
+ return sd_model \ No newline at end of file