Skip to content

Commit

Permalink
Fix flux TE not loading t5 embeddings.
Browse files Browse the repository at this point in the history
  • Loading branch information
comfyanonymous committed Sep 25, 2024
1 parent fdf3756 commit bdd4a22
Showing 1 changed file with 1 addition and 1 deletion.
2 changes: 1 addition & 1 deletion comfy/text_encoders/flux.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ def __init__(self, device="cpu", layer="last", layer_idx=None, dtype=None, model
class T5XXLTokenizer(sd1_clip.SDTokenizer):
def __init__(self, embedding_directory=None, tokenizer_data={}):
tokenizer_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "t5_tokenizer")
super().__init__(tokenizer_path, pad_with_end=False, embedding_size=4096, embedding_key='t5xxl', tokenizer_class=T5TokenizerFast, has_start_token=False, pad_to_max_length=False, max_length=99999999, min_length=256)
super().__init__(tokenizer_path, embedding_directory=embedding_directory, pad_with_end=False, embedding_size=4096, embedding_key='t5xxl', tokenizer_class=T5TokenizerFast, has_start_token=False, pad_to_max_length=False, max_length=99999999, min_length=256)


class FluxTokenizer:
Expand Down

0 comments on commit bdd4a22

Please sign in to comment.