diff options
Diffstat (limited to 'modules/sd_hijack.py')
-rw-r--r-- | modules/sd_hijack.py | 12 |
1 files changed, 3 insertions, 9 deletions
diff --git a/modules/sd_hijack.py b/modules/sd_hijack.py index fa7eaeb8..317e0c4c 100644 --- a/modules/sd_hijack.py +++ b/modules/sd_hijack.py @@ -13,6 +13,7 @@ from ldm.util import default from einops import rearrange
import ldm.modules.attention
import ldm.modules.diffusionmodules.model
+from torch.nn.functional import silu
# see https://github.com/basujindal/stable-diffusion/pull/117 for discussion
@@ -101,14 +102,6 @@ def split_cross_attention_forward(self, x, context=None, mask=None): return self.to_out(r2)
-def nonlinearity_hijack(x):
- # swish
- t = torch.sigmoid(x)
- x *= t
- del t
-
- return x
-
def cross_attention_attnblock_forward(self, x):
h_ = x
h_ = self.norm(h_)
@@ -253,11 +246,12 @@ class StableDiffusionModelHijack: self.clip = m.cond_stage_model
+ ldm.modules.diffusionmodules.model.nonlinearity = silu
+
if cmd_opts.opt_split_attention_v1:
ldm.modules.attention.CrossAttention.forward = split_cross_attention_forward_v1
elif not cmd_opts.disable_opt_split_attention and (cmd_opts.opt_split_attention or torch.cuda.is_available()):
ldm.modules.attention.CrossAttention.forward = split_cross_attention_forward
- ldm.modules.diffusionmodules.model.nonlinearity = nonlinearity_hijack
ldm.modules.diffusionmodules.model.AttnBlock.forward = cross_attention_attnblock_forward
def flatten(el):
|