From ac4ccfa1369e74492b467294eab96c3f558b297b Mon Sep 17 00:00:00 2001 From: AUTOMATIC1111 <16777216c@gmail.com> Date: Thu, 13 Jul 2023 09:30:33 +0300 Subject: get attention optimizations to work --- modules/sd_models_xl.py | 3 +++ 1 file changed, 3 insertions(+) (limited to 'modules/sd_models_xl.py') diff --git a/modules/sd_models_xl.py b/modules/sd_models_xl.py index 9224c1a3..4d1aa497 100644 --- a/modules/sd_models_xl.py +++ b/modules/sd_models_xl.py @@ -55,3 +55,6 @@ sgm.modules.diffusionmodules.model.print = lambda *args: None sgm.modules.diffusionmodules.openaimodel.print = lambda *args: None sgm.modules.encoders.modules.print = lambda *args: None +# this gets the code to load the vanilla attention that we override +sgm.modules.attention.SDP_IS_AVAILABLE = True +sgm.modules.attention.XFORMERS_IS_AVAILABLE = False \ No newline at end of file -- cgit v1.2.1