diff options
author | AUTOMATIC <16777216c@gmail.com> | 2023-05-13 20:25:03 +0300 |
---|---|---|
committer | AUTOMATIC <16777216c@gmail.com> | 2023-05-13 20:25:03 +0300 |
commit | 2053745c8fc29a0d3c1bbfa07858fb0b21b32e6d (patch) | |
tree | 8a80f841fc4a5d588e1214111990aca7472a8cc1 | |
parent | 231562ea13e4f697953bdbabd6b76b22a88c587b (diff) | |
parent | 27f7fbf35cd72d547d830f97828ee13d3d2009aa (diff) |
Merge branch 'v1.2.0-hotfix' into release_candidate
-rw-r--r-- | CHANGELOG.md | 11 | ||||
-rw-r--r-- | extensions-builtin/Lora/lora.py | 6 | ||||
-rw-r--r-- | extensions-builtin/Lora/scripts/lora_script.py | 1 | ||||
-rw-r--r-- | extensions-builtin/Lora/ui_extra_networks_lora.py | 8 | ||||
-rw-r--r-- | modules/modelloader.py | 27 | ||||
-rw-r--r-- | modules/safe.py | 2 | ||||
-rw-r--r-- | modules/shared.py | 9 | ||||
-rw-r--r-- | style.css | 3 | ||||
-rw-r--r-- | webui.py | 6 |
9 files changed, 47 insertions, 26 deletions
diff --git a/CHANGELOG.md b/CHANGELOG.md index d1727864..b586b271 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,14 @@ +## Upcoming 1.2.1
+
+### Features:
+ * add an option to always refer to lora by filenames
+
+### Bug Fixes:
+ * never refer to lora by an alias if multiple loras have same alias or the alias is called none
+ * fix upscalers disappearing after the user reloads UI
+ * allow bf16 in safe unpickler (resolves problems with loading some loras)
+ * allow web UI to be ran fully offline
+
## 1.2.0
### Features:
diff --git a/extensions-builtin/Lora/lora.py b/extensions-builtin/Lora/lora.py index ba1293df..6fa80006 100644 --- a/extensions-builtin/Lora/lora.py +++ b/extensions-builtin/Lora/lora.py @@ -393,6 +393,8 @@ def lora_MultiheadAttention_load_state_dict(self, *args, **kwargs): def list_available_loras():
available_loras.clear()
available_lora_aliases.clear()
+ forbidden_lora_aliases.clear()
+ forbidden_lora_aliases.update({"none": 1})
os.makedirs(shared.cmd_opts.lora_dir, exist_ok=True)
@@ -406,6 +408,9 @@ def list_available_loras(): available_loras[name] = entry
+ if entry.alias in available_lora_aliases:
+ forbidden_lora_aliases[entry.alias.lower()] = 1
+
available_lora_aliases[name] = entry
available_lora_aliases[entry.alias] = entry
@@ -445,6 +450,7 @@ def infotext_pasted(infotext, params): available_loras = {}
available_lora_aliases = {}
+forbidden_lora_aliases = {}
loaded_loras = []
list_available_loras()
diff --git a/extensions-builtin/Lora/scripts/lora_script.py b/extensions-builtin/Lora/scripts/lora_script.py index 7db971fd..060bda05 100644 --- a/extensions-builtin/Lora/scripts/lora_script.py +++ b/extensions-builtin/Lora/scripts/lora_script.py @@ -54,6 +54,7 @@ script_callbacks.on_infotext_pasted(lora.infotext_pasted) shared.options_templates.update(shared.options_section(('extra_networks', "Extra Networks"), {
"sd_lora": shared.OptionInfo("None", "Add Lora to prompt", gr.Dropdown, lambda: {"choices": ["None"] + [x for x in lora.available_loras]}, refresh=lora.list_available_loras),
+ "lora_preferred_name": shared.OptionInfo("Alias from file", "When adding to prompt, refer to lora by", gr.Radio, {"choices": ["Alias from file", "Filename"]}),
}))
diff --git a/extensions-builtin/Lora/ui_extra_networks_lora.py b/extensions-builtin/Lora/ui_extra_networks_lora.py index a0edbc1e..2050e3fa 100644 --- a/extensions-builtin/Lora/ui_extra_networks_lora.py +++ b/extensions-builtin/Lora/ui_extra_networks_lora.py @@ -15,13 +15,19 @@ class ExtraNetworksPageLora(ui_extra_networks.ExtraNetworksPage): def list_items(self):
for name, lora_on_disk in lora.available_loras.items():
path, ext = os.path.splitext(lora_on_disk.filename)
+
+ if shared.opts.lora_preferred_name == "Filename" or lora_on_disk.alias.lower() in lora.forbidden_lora_aliases:
+ alias = name
+ else:
+ alias = lora_on_disk.alias
+
yield {
"name": name,
"filename": path,
"preview": self.find_preview(path),
"description": self.find_description(path),
"search_term": self.search_terms_from_path(lora_on_disk.filename),
- "prompt": json.dumps(f"<lora:{lora_on_disk.alias}:") + " + opts.extra_networks_default_multiplier + " + json.dumps(">"),
+ "prompt": json.dumps(f"<lora:{alias}:") + " + opts.extra_networks_default_multiplier + " + json.dumps(">"),
"local_preview": f"{path}.{shared.opts.samples_format}",
"metadata": json.dumps(lora_on_disk.metadata, indent=4) if lora_on_disk.metadata else None,
}
diff --git a/modules/modelloader.py b/modules/modelloader.py index cb85ac4f..a70aa0e3 100644 --- a/modules/modelloader.py +++ b/modules/modelloader.py @@ -117,20 +117,6 @@ def move_files(src_path: str, dest_path: str, ext_filter: str = None): pass -builtin_upscaler_classes = [] -forbidden_upscaler_classes = set() - - -def list_builtin_upscalers(): - builtin_upscaler_classes.clear() - builtin_upscaler_classes.extend(Upscaler.__subclasses__()) - -def forbid_loaded_nonbuiltin_upscalers(): - for cls in Upscaler.__subclasses__(): - if cls not in builtin_upscaler_classes: - forbidden_upscaler_classes.add(cls) - - def load_upscalers(): # We can only do this 'magic' method to dynamically load upscalers if they are referenced, # so we'll try to import any _model.py files before looking in __subclasses__ @@ -146,10 +132,17 @@ def load_upscalers(): datas = [] commandline_options = vars(shared.cmd_opts) - for cls in Upscaler.__subclasses__(): - if cls in forbidden_upscaler_classes: - continue + # some of upscaler classes will not go away after reloading their modules, and we'll end + # up with two copies of those classes. The newest copy will always be the last in the list, + # so we go from end to beginning and ignore duplicates + used_classes = {} + for cls in reversed(Upscaler.__subclasses__()): + classname = str(cls) + if classname not in used_classes: + used_classes[classname] = cls + + for cls in reversed(used_classes.values()): name = cls.__name__ cmd_name = f"{name.lower().replace('upscaler', '')}_models_path" scaler = cls(commandline_options.get(cmd_name, None)) diff --git a/modules/safe.py b/modules/safe.py index e6c2f2c0..e1a67f73 100644 --- a/modules/safe.py +++ b/modules/safe.py @@ -40,7 +40,7 @@ class RestrictedUnpickler(pickle.Unpickler): return getattr(collections, name)
if module == 'torch._utils' and name in ['_rebuild_tensor_v2', '_rebuild_parameter', '_rebuild_device_tensor_from_numpy']:
return getattr(torch._utils, name)
- if module == 'torch' and name in ['FloatStorage', 'HalfStorage', 'IntStorage', 'LongStorage', 'DoubleStorage', 'ByteStorage', 'float32']:
+ if module == 'torch' and name in ['FloatStorage', 'HalfStorage', 'IntStorage', 'LongStorage', 'DoubleStorage', 'ByteStorage', 'float32', 'BFloat16Storage']:
return getattr(torch, name)
if module == 'torch.nn.modules.container' and name in ['ParameterDict']:
return getattr(torch.nn.modules.container, name)
diff --git a/modules/shared.py b/modules/shared.py index 4631965b..b3508883 100644 --- a/modules/shared.py +++ b/modules/shared.py @@ -667,14 +667,19 @@ def reload_gradio_theme(theme_name=None): if not theme_name:
theme_name = opts.gradio_theme
+ default_theme_args = dict(
+ font=["Source Sans Pro", 'ui-sans-serif', 'system-ui', 'sans-serif'],
+ font_mono=['IBM Plex Mono', 'ui-monospace', 'Consolas', 'monospace'],
+ )
+
if theme_name == "Default":
- gradio_theme = gr.themes.Default()
+ gradio_theme = gr.themes.Default(**default_theme_args)
else:
try:
gradio_theme = gr.themes.ThemeClass.from_hub(theme_name)
except Exception as e:
errors.display(e, "changing gradio theme")
- gradio_theme = gr.themes.Default()
+ gradio_theme = gr.themes.Default(**default_theme_args)
@@ -1,3 +1,6 @@ +/* temporary fix to load default gradio font in frontend instead of backend */
+
+@import url('https://fonts.googleapis.com/css2?family=Source+Sans+Pro:wght@400;600&display=swap');
/* general gradio fixes */
@@ -181,14 +181,11 @@ def initialize(): gfpgan.setup_model(cmd_opts.gfpgan_models_path)
startup_timer.record("setup gfpgan")
- modelloader.list_builtin_upscalers()
- startup_timer.record("list builtin upscalers")
-
modules.scripts.load_scripts()
startup_timer.record("load scripts")
modelloader.load_upscalers()
- #startup_timer.record("load upscalers") #Is this necessary? I don't know.
+ startup_timer.record("load upscalers") #Is this necessary? I don't know.
modules.sd_vae.refresh_vae_list()
startup_timer.record("refresh VAE")
@@ -388,7 +385,6 @@ def webui(): localization.list_localizations(cmd_opts.localizations_dir)
- modelloader.forbid_loaded_nonbuiltin_upscalers()
modules.scripts.reload_scripts()
startup_timer.record("load scripts")
|