Skip to content

Commit

Permalink
fix model parallel tests to apply optimizer correctly
Browse files Browse the repository at this point in the history
Summary:
parameter naming was incorrect as well as the check if "embedding_bag" or "embeddings" string existed in parameter name.

because of this the optimizers defined in `apply_optimizer_in_backward_config` were not being applied

Reviewed By: kausv

Differential Revision: D63468297
  • Loading branch information
iamzainhuda authored and facebook-github-bot committed Sep 26, 2024
1 parent a4ea5d1 commit 97adae5
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 6 deletions.
10 changes: 5 additions & 5 deletions torchrec/distributed/test_utils/test_model_parallel.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,7 @@ def setUp(self, backend: str = "nccl") -> None:
[
None,
{
"embeddingbags": (torch.optim.SGD, {"lr": 0.01}),
"embedding_bags": (torch.optim.SGD, {"lr": 0.01}),
"embeddings": (torch.optim.SGD, {"lr": 0.2}),
},
]
Expand Down Expand Up @@ -296,7 +296,7 @@ def test_sharding_dp(
[
None,
{
"embeddingbags": (torch.optim.SGD, {"lr": 0.01}),
"embedding_bags": (torch.optim.SGD, {"lr": 0.01}),
"embeddings": (torch.optim.SGD, {"lr": 0.2}),
},
]
Expand Down Expand Up @@ -373,7 +373,7 @@ def test_sharding_cw(
[
None,
{
"embeddingbags": (torch.optim.SGD, {"lr": 0.01}),
"embedding_bags": (torch.optim.SGD, {"lr": 0.01}),
"embeddings": (torch.optim.SGD, {"lr": 0.2}),
},
]
Expand Down Expand Up @@ -451,7 +451,7 @@ def test_sharding_twcw(
[
None,
{
"embeddingbags": (torch.optim.SGD, {"lr": 0.01}),
"embedding_bags": (torch.optim.SGD, {"lr": 0.01}),
"embeddings": (torch.optim.SGD, {"lr": 0.2}),
},
]
Expand Down Expand Up @@ -529,7 +529,7 @@ def test_sharding_tw(
[
None,
{
"embeddingbags": (torch.optim.SGD, {"lr": 0.01}),
"embedding_bags": (torch.optim.SGD, {"lr": 0.01}),
"embeddings": (torch.optim.SGD, {"lr": 0.2}),
},
]
Expand Down
2 changes: 1 addition & 1 deletion torchrec/distributed/test_utils/test_sharding.py
Original file line number Diff line number Diff line change
Expand Up @@ -331,7 +331,7 @@ def sharding_single_rank_test(
optimizer_kwargs,
) in apply_optimizer_in_backward_config.items():
for name, param in global_model_named_params_as_dict.items():
if name not in apply_optim_name:
if apply_optim_name not in name:
continue
assert name in local_model_named_params_as_dict
local_param = local_model_named_params_as_dict[name]
Expand Down

0 comments on commit 97adae5

Please sign in to comment.