sd_samplers_kdiffusion.py 10.1 KB
Newer Older
1
import torch
A
AUTOMATIC 已提交
2
import inspect
3
import k_diffusion.sampling
A
AUTOMATIC1111 已提交
4
from modules import sd_samplers_common, sd_samplers_extra, sd_samplers_cfg_denoiser
5

6
from modules.shared import opts
7 8
import modules.shared as shared

A
AUTOMATIC 已提交
9
samplers_k_diffusion = [
10 11 12 13
    ('DPM++ 2M Karras', 'sample_dpmpp_2m', ['k_dpmpp_2m_ka'], {'scheduler': 'karras'}),
    ('DPM++ SDE Karras', 'sample_dpmpp_sde', ['k_dpmpp_sde_ka'], {'scheduler': 'karras', "second_order": True, "brownian_noise": True}),
    ('DPM++ 2M SDE Exponential', 'sample_dpmpp_2m_sde', ['k_dpmpp_2m_sde_exp'], {'scheduler': 'exponential', "brownian_noise": True}),
    ('DPM++ 2M SDE Karras', 'sample_dpmpp_2m_sde', ['k_dpmpp_2m_sde_ka'], {'scheduler': 'karras', "brownian_noise": True}),
14
    ('Euler a', 'sample_euler_ancestral', ['k_euler_a', 'k_euler_ancestral'], {"uses_ensd": True}),
A
AUTOMATIC 已提交
15 16
    ('Euler', 'sample_euler', ['k_euler'], {}),
    ('LMS', 'sample_lms', ['k_lms'], {}),
17
    ('Heun', 'sample_heun', ['k_heun'], {"second_order": True}),
A
AUTOMATIC 已提交
18
    ('DPM2', 'sample_dpm_2', ['k_dpm_2'], {'discard_next_to_last_sigma': True}),
19
    ('DPM2 a', 'sample_dpm_2_ancestral', ['k_dpm_2_a'], {'discard_next_to_last_sigma': True, "uses_ensd": True}),
20
    ('DPM++ 2S a', 'sample_dpmpp_2s_ancestral', ['k_dpmpp_2s_a'], {"uses_ensd": True, "second_order": True}),
A
AUTOMATIC 已提交
21
    ('DPM++ 2M', 'sample_dpmpp_2m', ['k_dpmpp_2m'], {}),
22
    ('DPM++ SDE', 'sample_dpmpp_sde', ['k_dpmpp_sde'], {"second_order": True, "brownian_noise": True}),
C
catboxanon 已提交
23
    ('DPM++ 2M SDE', 'sample_dpmpp_2m_sde', ['k_dpmpp_2m_sde_ka'], {"brownian_noise": True}),
24 25
    ('DPM fast', 'sample_dpm_fast', ['k_dpm_fast'], {"uses_ensd": True}),
    ('DPM adaptive', 'sample_dpm_adaptive', ['k_dpm_ad'], {"uses_ensd": True}),
A
AUTOMATIC 已提交
26
    ('LMS Karras', 'sample_lms', ['k_lms_ka'], {'scheduler': 'karras'}),
27 28 29
    ('DPM2 Karras', 'sample_dpm_2', ['k_dpm_2_ka'], {'scheduler': 'karras', 'discard_next_to_last_sigma': True, "uses_ensd": True, "second_order": True}),
    ('DPM2 a Karras', 'sample_dpm_2_ancestral', ['k_dpm_2_a_ka'], {'scheduler': 'karras', 'discard_next_to_last_sigma': True, "uses_ensd": True, "second_order": True}),
    ('DPM++ 2S a Karras', 'sample_dpmpp_2s_ancestral', ['k_dpmpp_2s_a_ka'], {'scheduler': 'karras', "uses_ensd": True, "second_order": True}),
30
    ('Restart', sd_samplers_extra.restart_sampler, ['restart'], {'scheduler': 'karras'}),
A
AUTOMATIC 已提交
31 32
]

L
lambertae 已提交
33

A
AUTOMATIC 已提交
34
samplers_data_k_diffusion = [
35
    sd_samplers_common.SamplerData(label, lambda model, funcname=funcname: KDiffusionSampler(funcname, model), aliases, options)
A
AUTOMATIC 已提交
36
    for label, funcname, aliases, options in samplers_k_diffusion
37
    if callable(funcname) or hasattr(k_diffusion.sampling, funcname)
A
AUTOMATIC 已提交
38 39
]

40
sampler_extra_params = {
A
AUTOMATIC 已提交
41 42 43
    'sample_euler': ['s_churn', 's_tmin', 's_tmax', 's_noise'],
    'sample_heun': ['s_churn', 's_tmin', 's_tmax', 's_noise'],
    'sample_dpm_2': ['s_churn', 's_tmin', 's_tmax', 's_noise'],
44
}
45

46
k_diffusion_samplers_map = {x.name: x for x in samplers_data_k_diffusion}
47
k_diffusion_scheduler = {
48
    'Automatic': None,
49 50 51 52 53
    'karras': k_diffusion.sampling.get_sigmas_karras,
    'exponential': k_diffusion.sampling.get_sigmas_exponential,
    'polyexponential': k_diffusion.sampling.get_sigmas_polyexponential
}

54

A
AUTOMATIC1111 已提交
55 56 57 58 59 60 61 62 63 64
class CFGDenoiserKDiffusion(sd_samplers_cfg_denoiser.CFGDenoiser):
    @property
    def inner_model(self):
        if self.model_wrap is None:
            denoiser = k_diffusion.external.CompVisVDenoiser if shared.sd_model.parameterization == "v" else k_diffusion.external.CompVisDenoiser
            self.model_wrap = denoiser(shared.sd_model, quantize=shared.opts.enable_quantization)

        return self.model_wrap


65 66 67
class KDiffusionSampler(sd_samplers_common.Sampler):
    def __init__(self, funcname, sd_model):
        super().__init__(funcname)
68

69
        self.func = funcname if callable(funcname) else getattr(k_diffusion.sampling, self.funcname)
70

A
AUTOMATIC1111 已提交
71 72
        self.model_wrap_cfg = CFGDenoiserKDiffusion(self)
        self.model_wrap = self.model_wrap_cfg.inner_model
73

A
AUTOMATIC 已提交
74
    def get_sigmas(self, p, steps):
75 76 77 78 79 80
        discard_next_to_last_sigma = self.config is not None and self.config.options.get('discard_next_to_last_sigma', False)
        if opts.always_discard_next_to_last_sigma and not discard_next_to_last_sigma:
            discard_next_to_last_sigma = True
            p.extra_generation_params["Discard penultimate sigma"] = True

        steps += 1 if discard_next_to_last_sigma else 0
H
hentailord85ez 已提交
81

82
        if p.sampler_noise_scheduler_override:
A
AUTOMATIC 已提交
83
            sigmas = p.sampler_noise_scheduler_override(steps)
84
        elif opts.k_sched_type != "Automatic":
K
Kohaku-Blueleaf 已提交
85
            m_sigma_min, m_sigma_max = (self.model_wrap.sigmas[0].item(), self.model_wrap.sigmas[-1].item())
86
            sigma_min, sigma_max = (0.1, 10) if opts.use_old_karras_scheduler_sigmas else (m_sigma_min, m_sigma_max)
87
            sigmas_kwargs = {
88 89
                'sigma_min': sigma_min,
                'sigma_max': sigma_max,
90
            }
K
Kohaku-Blueleaf 已提交
91 92

            sigmas_func = k_diffusion_scheduler[opts.k_sched_type]
93 94 95 96 97 98 99 100 101 102 103 104
            p.extra_generation_params["Schedule type"] = opts.k_sched_type

            if opts.sigma_min != m_sigma_min and opts.sigma_min != 0:
                sigmas_kwargs['sigma_min'] = opts.sigma_min
                p.extra_generation_params["Schedule min sigma"] = opts.sigma_min
            if opts.sigma_max != m_sigma_max and opts.sigma_max != 0:
                sigmas_kwargs['sigma_max'] = opts.sigma_max
                p.extra_generation_params["Schedule max sigma"] = opts.sigma_max

            default_rho = 1. if opts.k_sched_type == "polyexponential" else 7.

            if opts.k_sched_type != 'exponential' and opts.rho != 0 and opts.rho != default_rho:
K
Kohaku-Blueleaf 已提交
105
                sigmas_kwargs['rho'] = opts.rho
106
                p.extra_generation_params["Schedule rho"] = opts.rho
K
Kohaku-Blueleaf 已提交
107

108
            sigmas = sigmas_func(n=steps, **sigmas_kwargs, device=shared.device)
A
AUTOMATIC 已提交
109
        elif self.config is not None and self.config.options.get('scheduler', None) == 'karras':
110 111 112
            sigma_min, sigma_max = (0.1, 10) if opts.use_old_karras_scheduler_sigmas else (self.model_wrap.sigmas[0].item(), self.model_wrap.sigmas[-1].item())

            sigmas = k_diffusion.sampling.get_sigmas_karras(n=steps, sigma_min=sigma_min, sigma_max=sigma_max, device=shared.device)
113 114 115
        elif self.config is not None and self.config.options.get('scheduler', None) == 'exponential':
            m_sigma_min, m_sigma_max = (self.model_wrap.sigmas[0].item(), self.model_wrap.sigmas[-1].item())
            sigmas = k_diffusion.sampling.get_sigmas_exponential(n=steps, sigma_min=m_sigma_min, sigma_max=m_sigma_max, device=shared.device)
116
        else:
A
AUTOMATIC 已提交
117
            sigmas = self.model_wrap.get_sigmas(steps)
118

119
        if discard_next_to_last_sigma:
120 121
            sigmas = torch.cat([sigmas[:-2], sigmas[-1:]])

A
AUTOMATIC 已提交
122 123 124
        return sigmas

    def sample_img2img(self, p, x, noise, conditioning, unconditional_conditioning, steps=None, image_conditioning=None):
125
        steps, t_enc = sd_samplers_common.setup_img2img_steps(p, steps)
A
AUTOMATIC 已提交
126 127

        sigmas = self.get_sigmas(p, steps)
128
        sigma_sched = sigmas[steps - t_enc - 1:]
129

130
        xi = x + noise * sigma_sched[0]
131

132
        extra_params_kwargs = self.initialize(p)
133 134 135
        parameters = inspect.signature(self.func).parameters

        if 'sigma_min' in parameters:
M
Martin Cairns 已提交
136
            ## last sigma is zero which isn't allowed by DPM Fast & Adaptive so taking value before last
137
            extra_params_kwargs['sigma_min'] = sigma_sched[-2]
138
        if 'sigma_max' in parameters:
139
            extra_params_kwargs['sigma_max'] = sigma_sched[0]
140
        if 'n' in parameters:
141
            extra_params_kwargs['n'] = len(sigma_sched) - 1
142
        if 'sigma_sched' in parameters:
143
            extra_params_kwargs['sigma_sched'] = sigma_sched
144
        if 'sigmas' in parameters:
145
            extra_params_kwargs['sigmas'] = sigma_sched
146

147
        if self.config.options.get('brownian_noise', False):
R
RcINS 已提交
148
            noise_sampler = self.create_noise_sampler(x, sigmas, p)
149 150
            extra_params_kwargs['noise_sampler'] = noise_sampler

151
        self.model_wrap_cfg.init_latent = x
152
        self.last_latent = x
A
AUTOMATIC1111 已提交
153
        self.sampler_extra_args = {
154 155 156
            'cond': conditioning,
            'image_cond': image_conditioning,
            'uncond': unconditional_conditioning,
K
Kyle 已提交
157
            'cond_scale': p.cfg_scale,
D
devdn 已提交
158
            's_min_uncond': self.s_min_uncond
K
Kyle 已提交
159 160
        }

A
AUTOMATIC1111 已提交
161
        samples = self.launch_sampling(t_enc + 1, lambda: self.func(self.model_wrap_cfg, xi, extra_args=self.sampler_extra_args, disable=False, callback=self.callback_state, **extra_params_kwargs))
162

163 164 165
        if self.model_wrap_cfg.padded_cond_uncond:
            p.extra_generation_params["Pad conds"] = True

166
        return samples
167

168
    def sample(self, p, x, conditioning, unconditional_conditioning, steps=None, image_conditioning=None):
A
AUTOMATIC 已提交
169 170
        steps = steps or p.steps

A
AUTOMATIC 已提交
171
        sigmas = self.get_sigmas(p, steps)
A
AUTOMATIC 已提交
172

173 174
        x = x * sigmas[0]

175
        extra_params_kwargs = self.initialize(p)
176 177
        parameters = inspect.signature(self.func).parameters

178 179 180
        if 'n' in parameters:
            extra_params_kwargs['n'] = steps

181
        if 'sigma_min' in parameters:
C
C43H66N12O12S2 已提交
182 183
            extra_params_kwargs['sigma_min'] = self.model_wrap.sigmas[0].item()
            extra_params_kwargs['sigma_max'] = self.model_wrap.sigmas[-1].item()
184 185

        if 'sigmas' in parameters:
C
C43H66N12O12S2 已提交
186
            extra_params_kwargs['sigmas'] = sigmas
187

188
        if self.config.options.get('brownian_noise', False):
R
RcINS 已提交
189
            noise_sampler = self.create_noise_sampler(x, sigmas, p)
190 191
            extra_params_kwargs['noise_sampler'] = noise_sampler

192
        self.last_latent = x
A
AUTOMATIC1111 已提交
193
        self.sampler_extra_args = {
194 195 196
            'cond': conditioning,
            'image_cond': image_conditioning,
            'uncond': unconditional_conditioning,
D
devdn 已提交
197 198
            'cond_scale': p.cfg_scale,
            's_min_uncond': self.s_min_uncond
A
AUTOMATIC1111 已提交
199 200
        }
        samples = self.launch_sampling(steps, lambda: self.func(self.model_wrap_cfg, x, extra_args=self.sampler_extra_args, disable=False, callback=self.callback_state, **extra_params_kwargs))
201

202 203 204
        if self.model_wrap_cfg.padded_cond_uncond:
            p.extra_generation_params["Pad conds"] = True

A
AUTOMATIC 已提交
205
        return samples
206

207