Fix another small bug in attention_bias redux (#6737)

* fix a bug in the attn_masked redux code when using weight=1.0

* oh shit wait there was another bug
This commit is contained in:
Raphael Walker 2025-02-07 20:44:43 +01:00 committed by GitHub
parent 079eccc92a
commit 832e3f5ca3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -1065,10 +1065,10 @@ class StyleModelApply:
(txt, keys) = t
keys = keys.copy()
# even if the strength is 1.0 (i.e, no change), if there's already a mask, we have to add to it
if strength_type == "attn_bias" and strength != 1.0 and "attention_mask" not in keys:
if "attention_mask" in keys or (strength_type == "attn_bias" and strength != 1.0):
# math.log raises an error if the argument is zero
# torch.log returns -inf, which is what we want
attn_bias = torch.log(torch.Tensor([strength]))
attn_bias = torch.log(torch.Tensor([strength if strength_type == "attn_bias" else 1.0]))
# get the size of the mask image
mask_ref_size = keys.get("attention_mask_img_shape", (1, 1))
n_ref = mask_ref_size[0] * mask_ref_size[1]