diff --git a/docs/diffusion/stable_diffusion/model/unet_attention.html b/docs/diffusion/stable_diffusion/model/unet_attention.html index 4399937e..3bb8e819 100644 --- a/docs/diffusion/stable_diffusion/model/unet_attention.html +++ b/docs/diffusion/stable_diffusion/model/unet_attention.html @@ -579,14 +579,10 @@
-If cond
- is None
- we perform self attention
168 if cond is None:
-169 cond = x167 has_cond = cond is not NoneIf cond
+ is None
+ we perform self attention
170 if not has_cond:
+171 cond = xGet query, key and value vectors
172 q = self.to_q(x)
-173 k = self.to_k(cond)
-174 v = self.to_v(cond)
-175
-176 print('use flash', CrossAttention.use_flash_attention, self.flash)
+ 174 q = self.to_q(x)
+175 k = self.to_k(cond)
+176 v = self.to_v(cond)
177
-178 if CrossAttention.use_flash_attention and self.flash is not None and cond is None and self.d_head <= 128:
+178 if CrossAttention.use_flash_attention and self.flash is not None and not has_cond and self.d_head <= 128:
179 return self.flash_attention(q, k, v)
180 else:
181 return self.normal_attention(q, k, v)