From 904121fecc0a1f11db76a73ca8649fb21e05ac5b Mon Sep 17 00:00:00 2001 From: Nandaka Date: Thu, 24 Nov 2022 02:39:09 +0000 Subject: Support NAI exif for PNG Info --- modules/extras.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) (limited to 'modules/extras.py') diff --git a/modules/extras.py b/modules/extras.py index 71b93a06..af4cd97d 100644 --- a/modules/extras.py +++ b/modules/extras.py @@ -233,6 +233,20 @@ def run_pnginfo(image): geninfo = items.get('parameters', geninfo) + # nai prompt + if "Software" in items.keys() and items["Software"] == "NovelAI": + import json + json_info = json.loads(items["Comment"]) + geninfo = f'{items["Description"]}\r\nNegative prompt: {json_info["uc"]}\r\n' + sampler = "Euler a" + if json_info["sampler"] == "k_euler_ancestral": + sampler = "Euler a" + elif json_info["sampler"] == "k_euler": + sampler = "Euler" + model_hash = '925997e9' # assuming this is the correct model hash + # not sure with noise and strength parameter + geninfo += f'Steps: {json_info["steps"]}, Sampler: {sampler}, CFG scale: {json_info["scale"]}, Seed: {json_info["seed"]}, Size: {image.width}x{image.height}, Model hash: {model_hash}' # , Denoising strength: {json_info["noise"]}' + info = '' for key, text in items.items(): info += f""" -- cgit v1.2.1 From dac9b6f15de5e675053d9490a20e0457dcd1a23e Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Sun, 27 Nov 2022 15:51:29 +0300 Subject: add safetensors support for model merging #4869 --- modules/extras.py | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) (limited to 'modules/extras.py') diff --git a/modules/extras.py b/modules/extras.py index 71b93a06..3d65d90a 100644 --- a/modules/extras.py +++ b/modules/extras.py @@ -20,6 +20,7 @@ import modules.codeformer_model import piexif import piexif.helper import gradio as gr +import safetensors.torch class LruCache(OrderedDict): @@ -249,7 +250,7 @@ def run_pnginfo(image): return '', geninfo, info -def run_modelmerger(primary_model_name, secondary_model_name, teritary_model_name, interp_method, multiplier, save_as_half, custom_name): +def run_modelmerger(primary_model_name, secondary_model_name, teritary_model_name, interp_method, multiplier, save_as_half, custom_name, checkpoint_format): def weighted_sum(theta0, theta1, alpha): return ((1 - alpha) * theta0) + (alpha * theta1) @@ -264,19 +265,15 @@ def run_modelmerger(primary_model_name, secondary_model_name, teritary_model_nam teritary_model_info = sd_models.checkpoints_list.get(teritary_model_name, None) print(f"Loading {primary_model_info.filename}...") - primary_model = torch.load(primary_model_info.filename, map_location='cpu') - theta_0 = sd_models.get_state_dict_from_checkpoint(primary_model) + theta_0 = sd_models.read_state_dict(primary_model_info.filename, map_location='cpu') print(f"Loading {secondary_model_info.filename}...") - secondary_model = torch.load(secondary_model_info.filename, map_location='cpu') - theta_1 = sd_models.get_state_dict_from_checkpoint(secondary_model) + theta_1 = sd_models.read_state_dict(secondary_model_info.filename, map_location='cpu') if teritary_model_info is not None: print(f"Loading {teritary_model_info.filename}...") - teritary_model = torch.load(teritary_model_info.filename, map_location='cpu') - theta_2 = sd_models.get_state_dict_from_checkpoint(teritary_model) + theta_2 = sd_models.read_state_dict(teritary_model_info.filename, map_location='cpu') else: - teritary_model = None theta_2 = None theta_funcs = { @@ -295,7 +292,7 @@ def run_modelmerger(primary_model_name, secondary_model_name, teritary_model_nam theta_1[key] = theta_func1(theta_1[key], t2) else: theta_1[key] = torch.zeros_like(theta_1[key]) - del theta_2, teritary_model + del theta_2 for key in tqdm.tqdm(theta_0.keys()): if 'model' in key and key in theta_1: @@ -314,12 +311,17 @@ def run_modelmerger(primary_model_name, secondary_model_name, teritary_model_nam ckpt_dir = shared.cmd_opts.ckpt_dir or sd_models.model_path - filename = primary_model_info.model_name + '_' + str(round(1-multiplier, 2)) + '-' + secondary_model_info.model_name + '_' + str(round(multiplier, 2)) + '-' + interp_method.replace(" ", "_") + '-merged.ckpt' - filename = filename if custom_name == '' else (custom_name + '.ckpt') + filename = primary_model_info.model_name + '_' + str(round(1-multiplier, 2)) + '-' + secondary_model_info.model_name + '_' + str(round(multiplier, 2)) + '-' + interp_method.replace(" ", "_") + '-merged.' + checkpoint_format + filename = filename if custom_name == '' else (custom_name + '.' + checkpoint_format) output_modelname = os.path.join(ckpt_dir, filename) print(f"Saving to {output_modelname}...") - torch.save(primary_model, output_modelname) + + _, extension = os.path.splitext(output_modelname) + if extension.lower() == ".safetensors": + safetensors.torch.save_file(theta_0, output_modelname, metadata={"format": "pt"}) + else: + torch.save(theta_0, output_modelname) sd_models.list_models() -- cgit v1.2.1 From 506d529d19f135f57e142371271f84d4971b456f Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Sun, 27 Nov 2022 16:28:32 +0300 Subject: rework #5012 to also work for pictures dragged into the prompt and also add Clip skip + ENSD to parameters --- modules/extras.py | 40 +++++----------------------------------- 1 file changed, 5 insertions(+), 35 deletions(-) (limited to 'modules/extras.py') diff --git a/modules/extras.py b/modules/extras.py index 0057bf9c..6021a024 100644 --- a/modules/extras.py +++ b/modules/extras.py @@ -1,6 +1,8 @@ from __future__ import annotations import math import os +import sys +import traceback import numpy as np from PIL import Image @@ -12,7 +14,7 @@ from typing import Callable, List, OrderedDict, Tuple from functools import partial from dataclasses import dataclass -from modules import processing, shared, images, devices, sd_models +from modules import processing, shared, images, devices, sd_models, sd_samplers from modules.shared import opts import modules.gfpgan_model from modules.ui import plaintext_to_html @@ -22,7 +24,6 @@ import piexif.helper import gradio as gr import safetensors.torch - class LruCache(OrderedDict): @dataclass(frozen=True) class Key: @@ -214,39 +215,8 @@ def run_pnginfo(image): if image is None: return '', '', '' - items = image.info - geninfo = '' - - if "exif" in image.info: - exif = piexif.load(image.info["exif"]) - exif_comment = (exif or {}).get("Exif", {}).get(piexif.ExifIFD.UserComment, b'') - try: - exif_comment = piexif.helper.UserComment.load(exif_comment) - except ValueError: - exif_comment = exif_comment.decode('utf8', errors="ignore") - - items['exif comment'] = exif_comment - geninfo = exif_comment - - for field in ['jfif', 'jfif_version', 'jfif_unit', 'jfif_density', 'dpi', 'exif', - 'loop', 'background', 'timestamp', 'duration']: - items.pop(field, None) - - geninfo = items.get('parameters', geninfo) - - # nai prompt - if "Software" in items.keys() and items["Software"] == "NovelAI": - import json - json_info = json.loads(items["Comment"]) - geninfo = f'{items["Description"]}\r\nNegative prompt: {json_info["uc"]}\r\n' - sampler = "Euler a" - if json_info["sampler"] == "k_euler_ancestral": - sampler = "Euler a" - elif json_info["sampler"] == "k_euler": - sampler = "Euler" - model_hash = '925997e9' # assuming this is the correct model hash - # not sure with noise and strength parameter - geninfo += f'Steps: {json_info["steps"]}, Sampler: {sampler}, CFG scale: {json_info["scale"]}, Seed: {json_info["seed"]}, Size: {image.width}x{image.height}, Model hash: {model_hash}' # , Denoising strength: {json_info["noise"]}' + geninfo, items = images.read_info_from_image(image) + items = {**{'parameters': geninfo}, **items} info = '' for key, text in items.items(): -- cgit v1.2.1