From df004be2fc4b2c68adfb75565d97551a1a5e7ed6 Mon Sep 17 00:00:00 2001 From: Aarni Koskela Date: Sun, 21 May 2023 00:26:16 +0300 Subject: Add a couple `from __future__ import annotations`es for Py3.9 compat --- modules/sd_hijack_optimizations.py | 1 + 1 file changed, 1 insertion(+) (limited to 'modules/sd_hijack_optimizations.py') diff --git a/modules/sd_hijack_optimizations.py b/modules/sd_hijack_optimizations.py index 0eb4c525..2ec0b049 100644 --- a/modules/sd_hijack_optimizations.py +++ b/modules/sd_hijack_optimizations.py @@ -1,3 +1,4 @@ +from __future__ import annotations import math import sys import traceback -- cgit v1.2.1 From 00dfe27f59727407c5b408a80ff2a262934df495 Mon Sep 17 00:00:00 2001 From: Aarni Koskela Date: Mon, 29 May 2023 08:54:13 +0300 Subject: Add & use modules.errors.print_error where currently printing exception info by hand --- modules/sd_hijack_optimizations.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) (limited to 'modules/sd_hijack_optimizations.py') diff --git a/modules/sd_hijack_optimizations.py b/modules/sd_hijack_optimizations.py index 2ec0b049..fd186fa2 100644 --- a/modules/sd_hijack_optimizations.py +++ b/modules/sd_hijack_optimizations.py @@ -1,7 +1,5 @@ from __future__ import annotations import math -import sys -import traceback import psutil import torch @@ -11,6 +9,7 @@ from ldm.util import default from einops import rearrange from modules import shared, errors, devices, sub_quadratic_attention +from modules.errors import print_error from modules.hypernetworks import hypernetwork import ldm.modules.attention @@ -140,8 +139,7 @@ if shared.cmd_opts.xformers or shared.cmd_opts.force_enable_xformers: import xformers.ops shared.xformers_available = True except Exception: - print("Cannot import xformers", file=sys.stderr) - print(traceback.format_exc(), file=sys.stderr) + print_error("Cannot import xformers", exc_info=True) def get_available_vram(): -- cgit v1.2.1 From 05933840f0676dd1a90a7e2ad3f2a0672624b2cd Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Wed, 31 May 2023 19:56:37 +0300 Subject: rename print_error to report, use it with together with package name --- modules/sd_hijack_optimizations.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) (limited to 'modules/sd_hijack_optimizations.py') diff --git a/modules/sd_hijack_optimizations.py b/modules/sd_hijack_optimizations.py index fd186fa2..5f0ff513 100644 --- a/modules/sd_hijack_optimizations.py +++ b/modules/sd_hijack_optimizations.py @@ -9,7 +9,6 @@ from ldm.util import default from einops import rearrange from modules import shared, errors, devices, sub_quadratic_attention -from modules.errors import print_error from modules.hypernetworks import hypernetwork import ldm.modules.attention @@ -139,7 +138,7 @@ if shared.cmd_opts.xformers or shared.cmd_opts.force_enable_xformers: import xformers.ops shared.xformers_available = True except Exception: - print_error("Cannot import xformers", exc_info=True) + errors.report("Cannot import xformers", exc_info=True) def get_available_vram(): -- cgit v1.2.1 From 36888092afa82ee248bc947229f813b453629317 Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Thu, 1 Jun 2023 08:12:06 +0300 Subject: revert default cross attention optimization to Doggettx make --disable-opt-split-attention command line option work again --- modules/sd_hijack_optimizations.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'modules/sd_hijack_optimizations.py') diff --git a/modules/sd_hijack_optimizations.py b/modules/sd_hijack_optimizations.py index 5f0ff513..b41aa419 100644 --- a/modules/sd_hijack_optimizations.py +++ b/modules/sd_hijack_optimizations.py @@ -57,7 +57,7 @@ class SdOptimizationSdpNoMem(SdOptimization): name = "sdp-no-mem" label = "scaled dot product without memory efficient attention" cmd_opt = "opt_sdp_no_mem_attention" - priority = 90 + priority = 80 def is_available(self): return hasattr(torch.nn.functional, "scaled_dot_product_attention") and callable(torch.nn.functional.scaled_dot_product_attention) @@ -71,7 +71,7 @@ class SdOptimizationSdp(SdOptimizationSdpNoMem): name = "sdp" label = "scaled dot product" cmd_opt = "opt_sdp_attention" - priority = 80 + priority = 70 def apply(self): ldm.modules.attention.CrossAttention.forward = scaled_dot_product_attention_forward @@ -114,7 +114,7 @@ class SdOptimizationInvokeAI(SdOptimization): class SdOptimizationDoggettx(SdOptimization): name = "Doggettx" cmd_opt = "opt_split_attention" - priority = 20 + priority = 90 def apply(self): ldm.modules.attention.CrossAttention.forward = split_cross_attention_forward -- cgit v1.2.1