From 210cb4c128afdd65fa998229a97d0694154983ea Mon Sep 17 00:00:00 2001 From: Tim Patton <38817597+pattontim@users.noreply.github.com> Date: Mon, 21 Nov 2022 16:40:18 -0500 Subject: Use GPU for loading safetensors, disable export --- modules/sd_models.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) (limited to 'modules/sd_models.py') diff --git a/modules/sd_models.py b/modules/sd_models.py index 2f8c2c48..2bbb3bf5 100644 --- a/modules/sd_models.py +++ b/modules/sd_models.py @@ -147,8 +147,9 @@ def torch_load(model_filename, model_info, map_override=None): map_override=shared.weight_load_location if not map_override else map_override if(checkpoint_types[model_info.exttype] == 'safetensors'): # safely load weights - # TODO: safetensors supports zero copy fast load to gpu, see issue #684 - return load_file(model_filename, device=map_override) + # TODO: safetensors supports zero copy fast load to gpu, see issue #684. + # GPU only for now, see https://github.com/huggingface/safetensors/issues/95 + return load_file(model_filename, device='cuda') else: return torch.load(model_filename, map_location=map_override) -- cgit v1.2.1