diff options
author | AUTOMATIC1111 <16777216c@gmail.com> | 2023-07-17 15:51:59 +0300 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-07-17 15:51:59 +0300 |
commit | 2164578738edba6f56c4e95409c56f9ccef442e0 (patch) | |
tree | eedb5d43663dc108f15e7b36ea51b0570e919c7b /extensions-builtin/Lora/lyco_helpers.py | |
parent | 05d23c78376ce73d3de932c7e7b8871914295675 (diff) | |
parent | 35510f7529dc05437a82496187ef06b852be9ab1 (diff) |
Merge pull request #11821 from AUTOMATIC1111/lora_lyco
lora extension rework to include other types of networks
Diffstat (limited to 'extensions-builtin/Lora/lyco_helpers.py')
-rw-r--r-- | extensions-builtin/Lora/lyco_helpers.py | 21 |
1 files changed, 21 insertions, 0 deletions
diff --git a/extensions-builtin/Lora/lyco_helpers.py b/extensions-builtin/Lora/lyco_helpers.py new file mode 100644 index 00000000..279b34bc --- /dev/null +++ b/extensions-builtin/Lora/lyco_helpers.py @@ -0,0 +1,21 @@ +import torch
+
+
+def make_weight_cp(t, wa, wb):
+ temp = torch.einsum('i j k l, j r -> i r k l', t, wb)
+ return torch.einsum('i j k l, i r -> r j k l', temp, wa)
+
+
+def rebuild_conventional(up, down, shape, dyn_dim=None):
+ up = up.reshape(up.size(0), -1)
+ down = down.reshape(down.size(0), -1)
+ if dyn_dim is not None:
+ up = up[:, :dyn_dim]
+ down = down[:dyn_dim, :]
+ return (up @ down).reshape(shape)
+
+
+def rebuild_cp_decomposition(up, down, mid):
+ up = up.reshape(up.size(0), -1)
+ down = down.reshape(down.size(0), -1)
+ return torch.einsum('n m k l, i n, m j -> i j k l', mid, up, down)
|