Skip to content

Commit

Permalink
Revert "[2/3] 3D Composability - move pp tests (pytorch#129801)"
Browse files Browse the repository at this point in the history
This reverts commit 84cd062.

Reverted pytorch#129801 on behalf of https://github.com/atalman due to Broke periodic CI: distributed/_composable/test_composability/test_pp_composability.py::ComposabilityTest::test_manual_with_data_parallel_dp_type_DDP_ScheduleClass4 [GH job link](https://github.com/pytorch/pytorch/actions/runs/10083807511/job/27882848654) [HUD commit link](https://hud.pytorch.org/pytorch/pytorch/commit/544f950d14e8ce4da0e592d9a3faac349e718151) ([comment](pytorch#129801 (comment)))
  • Loading branch information
pytorchmergebot committed Jul 25, 2024
1 parent 9c4cf86 commit d962dba
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 16 deletions.
4 changes: 3 additions & 1 deletion .ci/pytorch/multigpu-test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,11 @@ time python test/run_test.py --verbose -i distributed/tensor/parallel/test_tp_ra
# FSDP2 tests
time python test/run_test.py --verbose -i distributed/_composable/fsdp/test_fully_shard_training -- -k test_2d_mlp_with_nd_mesh

# Pipelining composability tests
time python test/run_test.py --verbose -i distributed/pipelining/test_composability.py

# ND composability tests
time python test/run_test.py --verbose -i distributed/_composable/test_composability/test_2d_composability
time python test/run_test.py --verbose -i distributed/_composable/test_composability/test_pp_composability

# Other tests
time python test/run_test.py --verbose -i test_cuda_primary_ctx
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,12 @@
# Copyright (c) Meta Platforms, Inc. and affiliates
# Owner(s): ["oncall: distributed"]
import copy
import os
import sys
import tempfile

from model_registry import MLPModule

import torch
import torch.distributed as dist
import torch.nn as nn
Expand Down Expand Up @@ -35,21 +38,6 @@
)


# MLP Layer
class MLPModule(torch.nn.Module):
def __init__(self, d_hid: int):
super().__init__()
self.net1 = torch.nn.Linear(d_hid, d_hid)
self.relu = torch.nn.ReLU()
self.net2 = torch.nn.Linear(d_hid, d_hid)

def forward(self, x):
x = self.net1(x)
x = self.relu(x)
x = self.net2(x)
return x


class ComposabilityTest(MultiProcContinousTest):
@classmethod
def backend_str(cls) -> str:
Expand Down

0 comments on commit d962dba

Please sign in to comment.