Fix contiguous issue with pytorch nightly. (#8729)

This commit is contained in:
comfyanonymous 2025-06-29 03:38:40 -07:00 committed by GitHub
parent 2a0b138feb
commit 170c7bb90c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -146,7 +146,7 @@ class T5Attention(torch.nn.Module):
)
values = self.relative_attention_bias(relative_position_bucket, out_dtype=dtype) # shape (query_length, key_length, num_heads)
values = values.permute([2, 0, 1]).unsqueeze(0) # shape (1, num_heads, query_length, key_length)
return values
return values.contiguous()
def forward(self, x, mask=None, past_bias=None, optimized_attention=None):
q = self.q(x)