From 63ef5483035697161da188d0af4ee027ece32ad7 Mon Sep 17 00:00:00 2001 From: danbider Date: Mon, 26 Jun 2023 11:14:39 -0400 Subject: [PATCH] precommit edits models --- llmfoundry/models/mpt/modeling_mpt.py | 1 + 1 file changed, 1 insertion(+) diff --git a/llmfoundry/models/mpt/modeling_mpt.py b/llmfoundry/models/mpt/modeling_mpt.py index 2c72beb071..542a793dd3 100644 --- a/llmfoundry/models/mpt/modeling_mpt.py +++ b/llmfoundry/models/mpt/modeling_mpt.py @@ -320,6 +320,7 @@ def forward( 'prefix_mask is a required argument when MPT is configured with prefix_lm=True.' ) + # Raise a not implemented error if input_embeds is not None (this is an arg in huggingface transformers and we need to support it for PEFT) if inputs_embeds is not None: raise NotImplementedError(