aboutsummaryrefslogtreecommitdiff
path: root/modules/hypernetworks/ui.py
diff options
context:
space:
mode:
authorpapuSpartan <30642826+papuSpartan@users.noreply.github.com>2022-10-31 15:08:54 -0500
committerGitHub <noreply@github.com>2022-10-31 15:08:54 -0500
commit25de9df3648f6d936ec7dbbb91c6c04bc3939a62 (patch)
tree8dbf732357d5ed094350827aff4caa7741f5a4cc /modules/hypernetworks/ui.py
parentce42879438bf2dbd76b5b346be656292e42ffb2b (diff)
parent5c9b3625fa03f18649e1843b5e9f2df2d4de94f9 (diff)
Merge branch 'AUTOMATIC1111:master' into master
Diffstat (limited to 'modules/hypernetworks/ui.py')
-rw-r--r--modules/hypernetworks/ui.py13
1 files changed, 8 insertions, 5 deletions
diff --git a/modules/hypernetworks/ui.py b/modules/hypernetworks/ui.py
index e6f50a1f..aad09ffc 100644
--- a/modules/hypernetworks/ui.py
+++ b/modules/hypernetworks/ui.py
@@ -3,14 +3,15 @@ import os
import re
import gradio as gr
-
-import modules.textual_inversion.textual_inversion
import modules.textual_inversion.preprocess
-from modules import sd_hijack, shared, devices
+import modules.textual_inversion.textual_inversion
+from modules import devices, sd_hijack, shared
from modules.hypernetworks import hypernetwork
+not_available = ["hardswish", "multiheadattention"]
+keys = ["linear"] + list(x for x in hypernetwork.HypernetworkModule.activation_dict.keys() if x not in not_available)
-def create_hypernetwork(name, enable_sizes, overwrite_old, layer_structure=None, add_layer_norm=False, activation_func=None):
+def create_hypernetwork(name, enable_sizes, overwrite_old, layer_structure=None, activation_func=None, weight_init=None, add_layer_norm=False, use_dropout=False):
# Remove illegal characters from name.
name = "".join( x for x in name if (x.isalnum() or x in "._- "))
@@ -25,8 +26,10 @@ def create_hypernetwork(name, enable_sizes, overwrite_old, layer_structure=None,
name=name,
enable_sizes=[int(x) for x in enable_sizes],
layer_structure=layer_structure,
- add_layer_norm=add_layer_norm,
activation_func=activation_func,
+ weight_init=weight_init,
+ add_layer_norm=add_layer_norm,
+ use_dropout=use_dropout,
)
hypernet.save(fn)