Let hunyuan dit work with all prompt lengths.

This commit is contained in:
comfyanonymous 2024-07-26 12:11:32 -04:00
parent afe732bef9
commit 8328a2d8cd
1 changed files with 1 additions and 0 deletions

View File

@ -16,6 +16,7 @@ class AttentionPool(nn.Module):
self.embed_dim = embed_dim
def forward(self, x):
x = x[:,:self.positional_embedding.shape[0] - 1]
x = x.permute(1, 0, 2) # NLC -> LNC
x = torch.cat([x.mean(dim=0, keepdim=True), x], dim=0) # (L+1)NC
x = x + self.positional_embedding[:, None, :].to(dtype=x.dtype, device=x.device) # (L+1)NC