diff options
author | Zac Liu <liuguang@baai.ac.cn> | 2022-12-06 09:17:57 +0800 |
---|---|---|
committer | GitHub <noreply@github.com> | 2022-12-06 09:17:57 +0800 |
commit | a25dfebeed5b3411f2dc0f558c2b87a7c1cda420 (patch) | |
tree | 77f08a174aba15584282670ab3714ab88a76612c /modules | |
parent | 3ebf977a6e4f478ab918e44506974beee32da276 (diff) | |
parent | 4929503258d80abbc4b5f40da034298fe3803906 (diff) |
Merge pull request #3 from 920232796/master
fix device support for mps
update the support for SD2.0
Diffstat (limited to 'modules')
-rw-r--r-- | modules/devices.py | 4 | ||||
-rw-r--r-- | modules/sd_hijack.py | 2 | ||||
-rw-r--r-- | modules/shared.py | 6 |
3 files changed, 8 insertions, 4 deletions
diff --git a/modules/devices.py b/modules/devices.py index 397b4b95..f8cffae1 100644 --- a/modules/devices.py +++ b/modules/devices.py @@ -38,8 +38,8 @@ def get_optimal_device(): if torch.cuda.is_available(): return torch.device(get_cuda_device_string()) - # if has_mps(): - # return torch.device("mps") + if has_mps(): + return torch.device("mps") return cpu diff --git a/modules/sd_hijack.py b/modules/sd_hijack.py index eb679ef9..9b5890e7 100644 --- a/modules/sd_hijack.py +++ b/modules/sd_hijack.py @@ -29,7 +29,7 @@ diffusionmodules_model_AttnBlock_forward = ldm.modules.diffusionmodules.model.At # new memory efficient cross attention blocks do not support hypernets and we already
# have memory efficient cross attention anyway, so this disables SD2.0's memory efficient cross attention
ldm.modules.attention.MemoryEfficientCrossAttention = ldm.modules.attention.CrossAttention
-# ldm.modules.attention.BasicTransformerBlock.ATTENTION_MODES["softmax-xformers"] = ldm.modules.attention.CrossAttention
+ldm.modules.attention.BasicTransformerBlock.ATTENTION_MODES["softmax-xformers"] = ldm.modules.attention.CrossAttention
# silence new console spam from SD2
ldm.modules.attention.print = lambda *args: None
diff --git a/modules/shared.py b/modules/shared.py index 7d82f4ee..522c56c1 100644 --- a/modules/shared.py +++ b/modules/shared.py @@ -110,7 +110,11 @@ restricted_opts = { from omegaconf import OmegaConf
config = OmegaConf.load(f"{cmd_opts.config}")
# XLMR-Large
-text_model_name = config.model.params.cond_stage_config.params.name
+try:
+ text_model_name = config.model.params.cond_stage_config.params.name
+
+except :
+ text_model_name = "stable_diffusion"
cmd_opts.disable_extension_access = (cmd_opts.share or cmd_opts.listen or cmd_opts.server_name) and not cmd_opts.enable_insecure_extension_access
|