Don't pass adm to model when it doesn't support it.

This commit is contained in:
comfyanonymous 2023-04-19 21:11:38 -04:00
parent 2d546d510d
commit 96b57a9ad6

View File

@ -36,8 +36,8 @@ def sampling_function(model_function, x, timestep, uncond, cond, cond_scale, con
strength = cond[1]['strength'] strength = cond[1]['strength']
adm_cond = None adm_cond = None
if 'adm' in cond[1]: if 'adm_encoded' in cond[1]:
adm_cond = cond[1]['adm'] adm_cond = cond[1]['adm_encoded']
input_x = x_in[:,:,area[2]:area[0] + area[2],area[3]:area[1] + area[3]] input_x = x_in[:,:,area[2]:area[0] + area[2],area[3]:area[1] + area[3]]
mult = torch.ones_like(input_x) * strength mult = torch.ones_like(input_x) * strength
@ -405,7 +405,7 @@ def encode_adm(noise_augmentor, conds, batch_size, device):
else: else:
adm_out = torch.zeros((1, noise_augmentor.time_embed.dim * 2), device=device) adm_out = torch.zeros((1, noise_augmentor.time_embed.dim * 2), device=device)
x[1] = x[1].copy() x[1] = x[1].copy()
x[1]["adm"] = torch.cat([adm_out] * batch_size) x[1]["adm_encoded"] = torch.cat([adm_out] * batch_size)
return conds return conds