From b8320b9f1829e0dc13e9c0e9187eeedb47692d35 Mon Sep 17 00:00:00 2001 From: bigning Date: Wed, 3 Apr 2024 12:59:11 -0700 Subject: [PATCH] Update composer/trainer/mosaic_fsdp.py Co-authored-by: Brian <23239305+b-chu@users.noreply.github.com> --- composer/trainer/mosaic_fsdp.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/composer/trainer/mosaic_fsdp.py b/composer/trainer/mosaic_fsdp.py index 454eb312a1..6db553cbd0 100644 --- a/composer/trainer/mosaic_fsdp.py +++ b/composer/trainer/mosaic_fsdp.py @@ -98,7 +98,7 @@ def patch_pytorch(): _state_dict_utils._set_use_dtensor = _set_use_dtensor # Monkeypatch _flat_param.py to fix 2D with SHARD_GRAD_OP - # issue: https://github.com/pytorch/pytorch/issues/123272 + # Issue: https://github.com/pytorch/pytorch/issues/123272 from torch.distributed.fsdp import _flat_param from composer.trainer.mosaic_fsdp_utils import _same_storage