From 740070ea9cdb254209f66417418f2a4af8b099d6 Mon Sep 17 00:00:00 2001 From: d8ahazard Date: Mon, 26 Sep 2022 09:29:50 -0500 Subject: Re-implement universal model loading --- modules/ldsr_model.py | 45 ++++++++++++++++++++++++++------------------- 1 file changed, 26 insertions(+), 19 deletions(-) (limited to 'modules/ldsr_model.py') diff --git a/modules/ldsr_model.py b/modules/ldsr_model.py index 95e84659..e6e7ff74 100644 --- a/modules/ldsr_model.py +++ b/modules/ldsr_model.py @@ -3,11 +3,14 @@ import sys import traceback from collections import namedtuple -from basicsr.utils.download_util import load_file_from_url +from modules import shared, images, modelloader, paths +from modules.paths import models_path -import modules.images -from modules import shared -from modules.paths import script_path +model_dir = "LDSR" +model_path = os.path.join(models_path, model_dir) +cmd_path = None +model_url = "https://heibox.uni-heidelberg.de/f/578df07c8fc04ffbadf3/?dl=1" +yaml_url = "https://heibox.uni-heidelberg.de/f/31a76b13ea27482981b4/?dl=1" LDSRModelInfo = namedtuple("LDSRModelInfo", ["name", "location", "model", "netscale"]) @@ -25,28 +28,32 @@ class UpscalerLDSR(modules.images.Upscaler): return upscale_with_ldsr(img) -def add_lsdr(): - modules.shared.sd_upscalers.append(UpscalerLDSR(100)) +def setup_model(dirname): + global cmd_path + global model_path + if not os.path.exists(model_path): + os.makedirs(model_path) + cmd_path = dirname + shared.sd_upscalers.append(UpscalerLDSR(100)) -def setup_ldsr(): - path = modules.paths.paths.get("LDSR", None) +def prepare_ldsr(): + path = paths.paths.get("LDSR", None) if path is None: return global have_ldsr global LDSR_obj try: from LDSR import LDSR - model_url = "https://heibox.uni-heidelberg.de/f/578df07c8fc04ffbadf3/?dl=1" - yaml_url = "https://heibox.uni-heidelberg.de/f/31a76b13ea27482981b4/?dl=1" - repo_path = 'latent-diffusion/experiments/pretrained_models/' - model_path = load_file_from_url(url=model_url, model_dir=os.path.join("repositories", repo_path), - progress=True, file_name="model.chkpt") - yaml_path = load_file_from_url(url=yaml_url, model_dir=os.path.join("repositories", repo_path), - progress=True, file_name="project.yaml") - have_ldsr = True - LDSR_obj = LDSR(model_path, yaml_path) - + model_files = modelloader.load_models(model_path, model_url, cmd_path, dl_name="model.ckpt", ext_filter=[".ckpt"]) + yaml_files = modelloader.load_models(model_path, yaml_url, cmd_path, dl_name="project.yaml", ext_filter=[".yaml"]) + if len(model_files) != 0 and len(yaml_files) != 0: + model_file = model_files[0] + yaml_file = yaml_files[0] + have_ldsr = True + LDSR_obj = LDSR(model_file, yaml_file) + else: + return except Exception: print("Error importing LDSR:", file=sys.stderr) @@ -55,7 +62,7 @@ def setup_ldsr(): def upscale_with_ldsr(image): - setup_ldsr() + prepare_ldsr() if not have_ldsr or LDSR_obj is None: return image -- cgit v1.2.1 From 7d5c29b674bacc5654f8613af134632b7cbdb158 Mon Sep 17 00:00:00 2001 From: d8ahazard Date: Mon, 26 Sep 2022 10:27:18 -0500 Subject: Cleanup existing directories, fixes --- modules/ldsr_model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'modules/ldsr_model.py') diff --git a/modules/ldsr_model.py b/modules/ldsr_model.py index e6e7ff74..4f9b1657 100644 --- a/modules/ldsr_model.py +++ b/modules/ldsr_model.py @@ -19,7 +19,7 @@ have_ldsr = False LDSR_obj = None -class UpscalerLDSR(modules.images.Upscaler): +class UpscalerLDSR(images.Upscaler): def __init__(self, steps): self.steps = steps self.name = "LDSR" -- cgit v1.2.1 From 0dce0df1ee63b2f158805c1a1f1a3743cc4a104b Mon Sep 17 00:00:00 2001 From: d8ahazard Date: Thu, 29 Sep 2022 17:46:23 -0500 Subject: Holy $hit. Yep. Fix gfpgan_model_arch requirement(s). Add Upscaler base class, move from images. Add a lot of methods to Upscaler. Re-work all the child upscalers to be proper classes. Add BSRGAN scaler. Add ldsr_model_arch class, removing the dependency for another repo that just uses regular latent-diffusion stuff. Add one universal method that will always find and load new upscaler models without having to add new "setup_model" calls. Still need to add command line params, but that could probably be automated. Add a "self.scale" property to all Upscalers so the scalers themselves can do "things" in response to the requested upscaling size. Ensure LDSR doesn't get stuck in a longer loop of "upscale/downscale/upscale" as we try to reach the target upscale size. Add typehints for IDE sanity. PEP-8 improvements. Moar. --- modules/ldsr_model.py | 103 ++++++++++++++++++-------------------------------- 1 file changed, 37 insertions(+), 66 deletions(-) (limited to 'modules/ldsr_model.py') diff --git a/modules/ldsr_model.py b/modules/ldsr_model.py index 4f9b1657..969d1a0d 100644 --- a/modules/ldsr_model.py +++ b/modules/ldsr_model.py @@ -1,74 +1,45 @@ import os import sys import traceback -from collections import namedtuple -from modules import shared, images, modelloader, paths -from modules.paths import models_path - -model_dir = "LDSR" -model_path = os.path.join(models_path, model_dir) -cmd_path = None -model_url = "https://heibox.uni-heidelberg.de/f/578df07c8fc04ffbadf3/?dl=1" -yaml_url = "https://heibox.uni-heidelberg.de/f/31a76b13ea27482981b4/?dl=1" +from basicsr.utils.download_util import load_file_from_url -LDSRModelInfo = namedtuple("LDSRModelInfo", ["name", "location", "model", "netscale"]) - -ldsr_models = [] -have_ldsr = False -LDSR_obj = None +from modules.upscaler import Upscaler, UpscalerData +from modules.ldsr_model_arch import LDSR +from modules import shared +from modules.paths import models_path -class UpscalerLDSR(images.Upscaler): - def __init__(self, steps): - self.steps = steps +class UpscalerLDSR(Upscaler): + def __init__(self, user_path): self.name = "LDSR" - - def do_upscale(self, img): - return upscale_with_ldsr(img) - - -def setup_model(dirname): - global cmd_path - global model_path - if not os.path.exists(model_path): - os.makedirs(model_path) - cmd_path = dirname - shared.sd_upscalers.append(UpscalerLDSR(100)) - - -def prepare_ldsr(): - path = paths.paths.get("LDSR", None) - if path is None: - return - global have_ldsr - global LDSR_obj - try: - from LDSR import LDSR - model_files = modelloader.load_models(model_path, model_url, cmd_path, dl_name="model.ckpt", ext_filter=[".ckpt"]) - yaml_files = modelloader.load_models(model_path, yaml_url, cmd_path, dl_name="project.yaml", ext_filter=[".yaml"]) - if len(model_files) != 0 and len(yaml_files) != 0: - model_file = model_files[0] - yaml_file = yaml_files[0] - have_ldsr = True - LDSR_obj = LDSR(model_file, yaml_file) - else: - return - - except Exception: - print("Error importing LDSR:", file=sys.stderr) - print(traceback.format_exc(), file=sys.stderr) - have_ldsr = False - - -def upscale_with_ldsr(image): - prepare_ldsr() - if not have_ldsr or LDSR_obj is None: - return image - - ddim_steps = shared.opts.ldsr_steps - pre_scale = shared.opts.ldsr_pre_down - post_scale = shared.opts.ldsr_post_down - - image = LDSR_obj.super_resolution(image, ddim_steps, pre_scale, post_scale) - return image + self.model_path = os.path.join(models_path, self.name) + self.user_path = user_path + self.model_url = "https://heibox.uni-heidelberg.de/f/578df07c8fc04ffbadf3/?dl=1" + self.yaml_url = "https://heibox.uni-heidelberg.de/f/31a76b13ea27482981b4/?dl=1" + super().__init__() + scaler_data = UpscalerData("LDSR", None, self) + self.scalers = [scaler_data] + + def load_model(self, path: str): + model = load_file_from_url(url=self.model_url, model_dir=self.model_path, + file_name="model.pth", progress=True) + yaml = load_file_from_url(url=self.model_url, model_dir=self.model_path, + file_name="project.yaml", progress=True) + + try: + return LDSR(model, yaml) + + except Exception: + print("Error importing LDSR:", file=sys.stderr) + print(traceback.format_exc(), file=sys.stderr) + return None + + def do_upscale(self, img, path): + ldsr = self.load_model(path) + if ldsr is None: + print("NO LDSR!") + return img + ddim_steps = shared.opts.ldsr_steps + pre_scale = shared.opts.ldsr_pre_down + return ldsr.super_resolution(img, ddim_steps, self.scale) -- cgit v1.2.1