diff options
author | AUTOMATIC <16777216c@gmail.com> | 2022-11-26 16:45:57 +0300 |
---|---|---|
committer | AUTOMATIC <16777216c@gmail.com> | 2022-11-26 16:45:57 +0300 |
commit | 64c7b7975cedeb2aaa1a9c8eb4a479fc575843f8 (patch) | |
tree | de42cf84bc0a1973abbdb14b32bcea390d3c1081 /modules/sd_hijack.py | |
parent | 1123f52cadf8d86c006177791b3191e5b8388b5a (diff) |
restore hypernetworks to seemingly working state
Diffstat (limited to 'modules/sd_hijack.py')
-rw-r--r-- | modules/sd_hijack.py | 3 |
1 files changed, 2 insertions, 1 deletions
diff --git a/modules/sd_hijack.py b/modules/sd_hijack.py index d5243fd3..64655eb1 100644 --- a/modules/sd_hijack.py +++ b/modules/sd_hijack.py @@ -9,6 +9,7 @@ from torch.nn.functional import silu import modules.textual_inversion.textual_inversion
from modules import prompt_parser, devices, sd_hijack_optimizations, shared
+from modules.hypernetworks import hypernetwork
from modules.shared import cmd_opts
from modules import sd_hijack_clip, sd_hijack_open_clip
@@ -60,7 +61,7 @@ def apply_optimizations(): def undo_optimizations():
- ldm.modules.attention.CrossAttention.forward = attention_CrossAttention_forward # this stops hypernets from working
+ ldm.modules.attention.CrossAttention.forward = hypernetwork.attention_CrossAttention_forward
ldm.modules.diffusionmodules.model.nonlinearity = diffusionmodules_model_nonlinearity
ldm.modules.diffusionmodules.model.AttnBlock.forward = diffusionmodules_model_AttnBlock_forward
|