Skip to content

Commit

Permalink
removed python convertor from inits
Browse files Browse the repository at this point in the history
  • Loading branch information
danbider committed Jun 23, 2023
1 parent de5b5ad commit aba2921
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 6 deletions.
5 changes: 2 additions & 3 deletions llmfoundry/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,8 @@
Seq2SeqFinetuningCollator,
build_finetuning_dataloader,
build_text_denoising_dataloader)
from llmfoundry.models.hf import (ComposerHFCausalLM,
ComposerHFCausalLMFromPython,
ComposerHFPrefixLM, ComposerHFT5)
from llmfoundry.models.hf import (ComposerHFCausalLM, ComposerHFPrefixLM,
ComposerHFT5)
from llmfoundry.models.layers.attention import (
MultiheadAttention, attn_bias_shape, build_alibi_bias, build_attn_bias,
flash_attn_fn, scaled_multihead_dot_product_attention,
Expand Down
4 changes: 1 addition & 3 deletions llmfoundry/models/hf/__init__.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
# Copyright 2022 MosaicML LLM Foundry authors
# SPDX-License-Identifier: Apache-2.0

from llmfoundry.models.hf.hf_causal_lm import (ComposerHFCausalLM,
ComposerHFCausalLMFromPython)
from llmfoundry.models.hf.hf_causal_lm import ComposerHFCausalLM
from llmfoundry.models.hf.hf_fsdp import (prepare_hf_causal_lm_model_for_fsdp,
prepare_hf_enc_dec_model_for_fsdp,
prepare_hf_model_for_fsdp)
Expand All @@ -11,7 +10,6 @@

__all__ = [
'ComposerHFCausalLM',
'ComposerHFCausalLMFromPython',
'ComposerHFPrefixLM',
'ComposerHFT5',
'prepare_hf_causal_lm_model_for_fsdp',
Expand Down

0 comments on commit aba2921

Please sign in to comment.