diff options
author | AUTOMATIC <16777216c@gmail.com> | 2022-10-07 23:22:22 +0300 |
---|---|---|
committer | AUTOMATIC <16777216c@gmail.com> | 2022-10-07 23:22:22 +0300 |
commit | 12c4d5c6b5bf9dd50d0601c36af4f99b65316d58 (patch) | |
tree | 30bbd10c806afd8168cbc0cd0f33529bde8d1f47 /modules/sd_hijack_optimizations.py | |
parent | f7c787eb7c295c27439f4fbdf78c26b8389560be (diff) |
hypernetwork training mk1
Diffstat (limited to 'modules/sd_hijack_optimizations.py')
-rw-r--r-- | modules/sd_hijack_optimizations.py | 3 |
1 files changed, 1 insertions, 2 deletions
diff --git a/modules/sd_hijack_optimizations.py b/modules/sd_hijack_optimizations.py index d9cca485..3f32e020 100644 --- a/modules/sd_hijack_optimizations.py +++ b/modules/sd_hijack_optimizations.py @@ -45,8 +45,7 @@ def split_cross_attention_forward(self, x, context=None, mask=None): q_in = self.to_q(x)
context = default(context, x)
- hypernetwork = shared.selected_hypernetwork()
- hypernetwork_layers = (hypernetwork.layers if hypernetwork is not None else {}).get(context.shape[2], None)
+ hypernetwork_layers = (shared.hypernetwork.layers if shared.hypernetwork is not None else {}).get(context.shape[2], None)
if hypernetwork_layers is not None:
k_in = self.to_k(hypernetwork_layers[0](context))
|