From 9d40212485febe05a662dd0346e6def83e456288 Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Tue, 13 Sep 2022 21:49:58 +0300 Subject: first attempt to produce crrect seeds in batch --- modules/sd_samplers.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) (limited to 'modules/sd_samplers.py') diff --git a/modules/sd_samplers.py b/modules/sd_samplers.py index 7ef507f1..f77fe43f 100644 --- a/modules/sd_samplers.py +++ b/modules/sd_samplers.py @@ -93,6 +93,10 @@ class VanillaStableDiffusionSampler: self.mask = None self.nmask = None self.init_latent = None + self.sampler_noises = None + + def number_of_needed_noises(self, p): + return 0 def sample_img2img(self, p, x, noise, conditioning, unconditional_conditioning): t_enc = int(min(p.denoising_strength, 0.999) * p.steps) @@ -171,16 +175,37 @@ def extended_trange(count, *args, **kwargs): shared.total_tqdm.update() +original_randn_like = torch.randn_like + class KDiffusionSampler: def __init__(self, funcname, sd_model): self.model_wrap = k_diffusion.external.CompVisDenoiser(sd_model) self.funcname = funcname self.func = getattr(k_diffusion.sampling, self.funcname) self.model_wrap_cfg = CFGDenoiser(self.model_wrap) + self.sampler_noises = None + self.sampler_noise_index = 0 + + k_diffusion.sampling.torch.randn_like = self.randn_like def callback_state(self, d): store_latent(d["denoised"]) + def number_of_needed_noises(self, p): + return p.steps + + def randn_like(self, x): + noise = self.sampler_noises[self.sampler_noise_index] if self.sampler_noises is not None and self.sampler_noise_index < len(self.sampler_noises) else None + + if noise is not None and x.shape == noise.shape: + res = noise + else: + print('generating') + res = original_randn_like(x) + + self.sampler_noise_index += 1 + return res + def sample_img2img(self, p, x, noise, conditioning, unconditional_conditioning): t_enc = int(min(p.denoising_strength, 0.999) * p.steps) sigmas = self.model_wrap.get_sigmas(p.steps) -- cgit v1.2.1 From 87e8b9a2ab3f033e7fdadbb2fe258857915980ac Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Fri, 16 Sep 2022 09:47:03 +0300 Subject: prevent replacing torch_randn globally (instead replacing k_diffusion.sampling.torch) and add a setting to disable this all --- modules/sd_samplers.py | 25 ++++++++++++++++++++----- 1 file changed, 20 insertions(+), 5 deletions(-) (limited to 'modules/sd_samplers.py') diff --git a/modules/sd_samplers.py b/modules/sd_samplers.py index f77fe43f..d478c5bc 100644 --- a/modules/sd_samplers.py +++ b/modules/sd_samplers.py @@ -175,7 +175,19 @@ def extended_trange(count, *args, **kwargs): shared.total_tqdm.update() -original_randn_like = torch.randn_like +class TorchHijack: + def __init__(self, kdiff_sampler): + self.kdiff_sampler = kdiff_sampler + + def __getattr__(self, item): + if item == 'randn_like': + return self.kdiff_sampler.randn_like + + if hasattr(torch, item): + return getattr(torch, item) + + raise AttributeError("'{}' object has no attribute '{}'".format(type(self).__name__, item)) + class KDiffusionSampler: def __init__(self, funcname, sd_model): @@ -186,8 +198,6 @@ class KDiffusionSampler: self.sampler_noises = None self.sampler_noise_index = 0 - k_diffusion.sampling.torch.randn_like = self.randn_like - def callback_state(self, d): store_latent(d["denoised"]) @@ -200,8 +210,7 @@ class KDiffusionSampler: if noise is not None and x.shape == noise.shape: res = noise else: - print('generating') - res = original_randn_like(x) + res = torch.randn_like(x) self.sampler_noise_index += 1 return res @@ -223,6 +232,9 @@ class KDiffusionSampler: if hasattr(k_diffusion.sampling, 'trange'): k_diffusion.sampling.trange = lambda *args, **kwargs: extended_trange(*args, **kwargs) + if self.sampler_noises is not None: + k_diffusion.sampling.torch = TorchHijack(self) + return self.func(self.model_wrap_cfg, xi, sigma_sched, extra_args={'cond': conditioning, 'uncond': unconditional_conditioning, 'cond_scale': p.cfg_scale}, disable=False, callback=self.callback_state) def sample(self, p, x, conditioning, unconditional_conditioning): @@ -232,6 +244,9 @@ class KDiffusionSampler: if hasattr(k_diffusion.sampling, 'trange'): k_diffusion.sampling.trange = lambda *args, **kwargs: extended_trange(*args, **kwargs) + if self.sampler_noises is not None: + k_diffusion.sampling.torch = TorchHijack(self) + samples_ddim = self.func(self.model_wrap_cfg, x, sigmas, extra_args={'cond': conditioning, 'uncond': unconditional_conditioning, 'cond_scale': p.cfg_scale}, disable=False, callback=self.callback_state) return samples_ddim -- cgit v1.2.1