From fbe38be1066de16e343280f0be33e8000a2ae1a9 Mon Sep 17 00:00:00 2001 From: Daniel King Date: Sat, 26 Aug 2023 00:13:07 +0000 Subject: [PATCH] monkeypatch config for mpt --- scripts/inference/convert_composer_to_hf.py | 3 ++- setup.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/scripts/inference/convert_composer_to_hf.py b/scripts/inference/convert_composer_to_hf.py index 5c4d4117c5..cdebd7ef44 100644 --- a/scripts/inference/convert_composer_to_hf.py +++ b/scripts/inference/convert_composer_to_hf.py @@ -168,7 +168,8 @@ def convert_composer_to_hf(args: Namespace) -> None: # Register MPT auto classes so that this script works with MPT # This script will not work without modification for other custom models, # but will work for other HuggingFace causal LMs - AutoConfig.register('mpt', MPTConfig) + from transformers.models.auto.configuration_auto import CONFIG_MAPPING + CONFIG_MAPPING._extra_content['mpt'] = MPTConfig MPTConfig.register_for_auto_class() MPTForCausalLM.register_for_auto_class('AutoModelForCausalLM') diff --git a/setup.py b/setup.py index 631c910051..0946e561d2 100644 --- a/setup.py +++ b/setup.py @@ -49,7 +49,7 @@ install_requires = [ 'mosaicml[libcloud,wandb,mlflow]>=0.15.0,<0.16', 'accelerate>=0.20,<0.21', # for HF inference `device_map` - 'transformers>=4.31,<4.32', + 'transformers>=4.32,<4.33', 'mosaicml-streaming>=0.5.1,<0.6', 'torch>=1.13.1,<=2.0.1', 'datasets==2.10.1',