Skip to content

Commit 9c86b6b

Browse files
jataylopruthvistony
authored andcommitted
Skip ddp apply_optim_in_bwd tests for gloo (#1302)
To resolve https://ontrack-internal.amd.com/browse/SWDEV-403530 and https://ontrack-internal.amd.com/browse/SWDEV-419837. For more context check upstream issue pytorch#111834
1 parent c137d5a commit 9c86b6b

File tree

1 file changed

+9
-1
lines changed

1 file changed

+9
-1
lines changed

torch/testing/_internal/distributed/distributed_test.py

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4807,7 +4807,11 @@ def _test_ddp_apply_optim_in_backward(
48074807
# set_to_none for regular optimizer to match in backward
48084808
# case.
48094809
optim.zero_grad(set_to_none=True)
4810-
4810+
4811+
@skip_but_pass_in_sandcastle_if(
4812+
BACKEND == "gloo" and HAS_TORCHVISION,
4813+
"Failing with gloo backend + torchvision due to ongoing issue https://github.com/pytorch/pytorch/issues/111834",
4814+
)
48114815
@skip_if_lt_x_gpu(2)
48124816
def test_ddp_apply_optim_in_backward(self):
48134817
for optim_cls, init_before in itertools.product(
@@ -4820,6 +4824,10 @@ def test_ddp_apply_optim_in_backward(self):
48204824
init_before=init_before,
48214825
)
48224826

4827+
@skip_but_pass_in_sandcastle_if(
4828+
BACKEND == "gloo" and HAS_TORCHVISION,
4829+
"Failing with gloo backend + torchvision due to ongoing issue https://github.com/pytorch/pytorch/issues/111834",
4830+
)
48234831
@skip_if_lt_x_gpu(2)
48244832
def test_ddp_apply_optim_in_backward_grad_as_bucket_view_false(self):
48254833
for init_before in [True, False]:

0 commit comments

Comments
 (0)