Skip to content

Commit

Permalink
Peft fsdp (#1520)
Browse files Browse the repository at this point in the history
  • Loading branch information
dakinggg committed Sep 12, 2024
1 parent 6849eaa commit 6d93260
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 3 deletions.
8 changes: 5 additions & 3 deletions llmfoundry/models/hf/hf_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ def __init__(
config_overrides: Optional[dict[str, Any]] = None,
use_logits: bool = True,
shift_labels: bool = False,
peft_config: Optional['PeftConfig'] = None,
peft_config: Optional[dict[str, Any]] = None,
allow_embedding_resizing: bool = False,
use_train_metrics: bool = True,
additional_train_metrics: Optional[list] = None,
Expand All @@ -92,8 +92,6 @@ def __init__(

model = self.transform_model(model)

self.prepare_inner_model(model, init_device)

metrics, eval_metrics = self.build_metrics(
use_train_metrics=use_train_metrics,
additional_train_metrics=additional_train_metrics,
Expand Down Expand Up @@ -121,6 +119,10 @@ def __init__(
should_save_peft_only=should_save_peft_only,
)

# Prepare for FSDP needs to happen after the super init, so that any model
# architecture changes are completed
self.prepare_inner_model(self.model, init_device)

def loss(self, outputs: ModelOutput, batch: Mapping):
if self.config.use_return_dict:
return outputs['loss']
Expand Down
22 changes: 22 additions & 0 deletions tests/models/hf/test_hf_peft_wrapping.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from composer import Trainer
from peft import LoraConfig, get_peft_model

from llmfoundry.models.hf.hf_causal_lm import ComposerHFCausalLM
from llmfoundry.models.hf.hf_fsdp import prepare_hf_model_for_fsdp
from llmfoundry.utils.builders import build_composer_model, build_tokenizer

Expand All @@ -36,6 +37,27 @@ def test_peft_wraps():
assert m._fsdp_wrap


def test_causal_lm_peft_wraps():
model = ComposerHFCausalLM(
tokenizer=None,
pretrained_model_name_or_path='mosaicml/mpt-7b',
pretrained=False,
trust_remote_code=True,
config_overrides={'n_layers': 2},
peft_config={
'peft_type': 'LORA',
'task_type': 'CAUSAL_LM',
},
)

for n, m in model.named_modules():
if 'lora' in n and 'default' in n:
has_parameters = any(True for _ in m.parameters())
has_buffers = any(True for _ in m.buffers())
if has_parameters or has_buffers:
assert m._fsdp_wrap


@pytest.mark.world_size(2)
@pytest.mark.gpu
@pytest.mark.parametrize(
Expand Down

0 comments on commit 6d93260

Please sign in to comment.