Load T5 in fp8 if it's in fp8 in the Flux checkpoint.

This commit is contained in:
comfyanonymous 2024-08-03 12:39:33 -04:00
parent 63a7e8edba
commit f123328b82

View File

@ -652,7 +652,11 @@ class Flux(supported_models_base.BASE):
return out
def clip_target(self, state_dict={}):
return supported_models_base.ClipTarget(comfy.text_encoders.flux.FluxTokenizer, comfy.text_encoders.flux.FluxClipModel)
pref = self.text_encoder_key_prefix[0]
t5_key = "{}t5xxl.transformer.encoder.final_layer_norm.weight".format(pref)
if t5_key in state_dict:
dtype_t5 = state_dict[t5_key].dtype
return supported_models_base.ClipTarget(comfy.text_encoders.flux.FluxTokenizer, comfy.text_encoders.flux.flux_clip(dtype_t5=dtype_t5))
class FluxSchnell(Flux):
unet_config = {