From 832e3f5ca3c357e527fdf811502357bd2798425e Mon Sep 17 00:00:00 2001 From: Raphael Walker Date: Fri, 7 Feb 2025 20:44:43 +0100 Subject: [PATCH] Fix another small bug in attention_bias redux (#6737) * fix a bug in the attn_masked redux code when using weight=1.0 * oh shit wait there was another bug --- nodes.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nodes.py b/nodes.py index 9779d5fd..1d2b1f9f 100644 --- a/nodes.py +++ b/nodes.py @@ -1065,10 +1065,10 @@ class StyleModelApply: (txt, keys) = t keys = keys.copy() # even if the strength is 1.0 (i.e, no change), if there's already a mask, we have to add to it - if strength_type == "attn_bias" and strength != 1.0 and "attention_mask" not in keys: + if "attention_mask" in keys or (strength_type == "attn_bias" and strength != 1.0): # math.log raises an error if the argument is zero # torch.log returns -inf, which is what we want - attn_bias = torch.log(torch.Tensor([strength])) + attn_bias = torch.log(torch.Tensor([strength if strength_type == "attn_bias" else 1.0])) # get the size of the mask image mask_ref_size = keys.get("attention_mask_img_shape", (1, 1)) n_ref = mask_ref_size[0] * mask_ref_size[1]