From f4b2d46925501c3670d72f207eb44323c5d70a6a Mon Sep 17 00:00:00 2001 From: Varuna Jayasiri Date: Sat, 24 Sep 2022 14:40:06 +0530 Subject: [PATCH] fix --- docs/diffusion/stable_diffusion/model/unet_attention.html | 2 +- labml_nn/diffusion/stable_diffusion/model/unet_attention.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/diffusion/stable_diffusion/model/unet_attention.html b/docs/diffusion/stable_diffusion/model/unet_attention.html index b7118b0a..4399937e 100644 --- a/docs/diffusion/stable_diffusion/model/unet_attention.html +++ b/docs/diffusion/stable_diffusion/model/unet_attention.html @@ -602,7 +602,7 @@ 173 k = self.to_k(cond) 174 v = self.to_v(cond) 175 -176 print('use flash', CrossAttention.use_flash_attention) +176 print('use flash', CrossAttention.use_flash_attention, self.flash) 177 178 if CrossAttention.use_flash_attention and self.flash is not None and cond is None and self.d_head <= 128: 179 return self.flash_attention(q, k, v) diff --git a/labml_nn/diffusion/stable_diffusion/model/unet_attention.py b/labml_nn/diffusion/stable_diffusion/model/unet_attention.py index 6c8b1b1a..79baa603 100644 --- a/labml_nn/diffusion/stable_diffusion/model/unet_attention.py +++ b/labml_nn/diffusion/stable_diffusion/model/unet_attention.py @@ -173,7 +173,7 @@ class CrossAttention(nn.Module): k = self.to_k(cond) v = self.to_v(cond) - print('use flash', CrossAttention.use_flash_attention) + print('use flash', CrossAttention.use_flash_attention, self.flash) if CrossAttention.use_flash_attention and self.flash is not None and cond is None and self.d_head <= 128: return self.flash_attention(q, k, v) -- GitLab