aboutsummaryrefslogtreecommitdiff
path: root/modules/sd_hijack.py
diff options
context:
space:
mode:
authorZac Liu <liuguang@baai.ac.cn>2022-12-06 09:17:57 +0800
committerGitHub <noreply@github.com>2022-12-06 09:17:57 +0800
commita25dfebeed5b3411f2dc0f558c2b87a7c1cda420 (patch)
tree77f08a174aba15584282670ab3714ab88a76612c /modules/sd_hijack.py
parent3ebf977a6e4f478ab918e44506974beee32da276 (diff)
parent4929503258d80abbc4b5f40da034298fe3803906 (diff)
Merge pull request #3 from 920232796/master
fix device support for mps update the support for SD2.0
Diffstat (limited to 'modules/sd_hijack.py')
-rw-r--r--modules/sd_hijack.py2
1 files changed, 1 insertions, 1 deletions
diff --git a/modules/sd_hijack.py b/modules/sd_hijack.py
index eb679ef9..9b5890e7 100644
--- a/modules/sd_hijack.py
+++ b/modules/sd_hijack.py
@@ -29,7 +29,7 @@ diffusionmodules_model_AttnBlock_forward = ldm.modules.diffusionmodules.model.At
# new memory efficient cross attention blocks do not support hypernets and we already
# have memory efficient cross attention anyway, so this disables SD2.0's memory efficient cross attention
ldm.modules.attention.MemoryEfficientCrossAttention = ldm.modules.attention.CrossAttention
-# ldm.modules.attention.BasicTransformerBlock.ATTENTION_MODES["softmax-xformers"] = ldm.modules.attention.CrossAttention
+ldm.modules.attention.BasicTransformerBlock.ATTENTION_MODES["softmax-xformers"] = ldm.modules.attention.CrossAttention
# silence new console spam from SD2
ldm.modules.attention.print = lambda *args: None