From 8e52908af2c18702f5a4856c2099dfb4ad075e05 Mon Sep 17 00:00:00 2001 From: Shuai Yang Date: Fri, 16 Aug 2024 13:13:34 -0700 Subject: [PATCH] Don't skip_torchrec when using torchrec PT2 pipeline (#2298) Summary: Pull Request resolved: https://github.com/pytorch/torchrec/pull/2298 We introduced `torch._dynamo.config.skip_torchrec` to control whether tracing into torchrec paths. PT2 pipeline is mainly used for torchrec PT2 compilation, so it should set `skip_torchrec` to False by default Reviewed By: IvanKobzarev Differential Revision: D61219995 --- torchrec/distributed/train_pipeline/train_pipelines.py | 1 + 1 file changed, 1 insertion(+) diff --git a/torchrec/distributed/train_pipeline/train_pipelines.py b/torchrec/distributed/train_pipeline/train_pipelines.py index 8a75e8602..094b4a236 100644 --- a/torchrec/distributed/train_pipeline/train_pipelines.py +++ b/torchrec/distributed/train_pipeline/train_pipelines.py @@ -248,6 +248,7 @@ def progress(self, dataloader_iter: Iterator[In]) -> Out: torch._dynamo.config.force_unspec_int_unbacked_size_like_on_torchrec_kjt = ( True ) + torch._dynamo.config.skip_torchrec = False # Importing only before compilation to not slow-done train_pipelines import torch.ops.import_module("fbgemm_gpu.sparse_ops")