Skip to content

Commit

Permalink
monkeypatch config for mpt
Browse files Browse the repository at this point in the history
  • Loading branch information
dakinggg committed Aug 26, 2023
1 parent 7f373e3 commit fbe38be
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 2 deletions.
3 changes: 2 additions & 1 deletion scripts/inference/convert_composer_to_hf.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,8 @@ def convert_composer_to_hf(args: Namespace) -> None:
# Register MPT auto classes so that this script works with MPT
# This script will not work without modification for other custom models,
# but will work for other HuggingFace causal LMs
AutoConfig.register('mpt', MPTConfig)
from transformers.models.auto.configuration_auto import CONFIG_MAPPING
CONFIG_MAPPING._extra_content['mpt'] = MPTConfig
MPTConfig.register_for_auto_class()
MPTForCausalLM.register_for_auto_class('AutoModelForCausalLM')

Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@
install_requires = [
'mosaicml[libcloud,wandb,mlflow]>=0.15.0,<0.16',
'accelerate>=0.20,<0.21', # for HF inference `device_map`
'transformers>=4.31,<4.32',
'transformers>=4.32,<4.33',
'mosaicml-streaming>=0.5.1,<0.6',
'torch>=1.13.1,<=2.0.1',
'datasets==2.10.1',
Expand Down

0 comments on commit fbe38be

Please sign in to comment.