From 46dfe665f559f42018c7db9b3cf843b59f56eb51 Mon Sep 17 00:00:00 2001 From: Jack Taylor <108682042+jataylo@users.noreply.github.com> Date: Mon, 23 Oct 2023 20:53:36 +0100 Subject: [PATCH] Skip ddp apply_optim_in_bwd tests for gloo To resolve https://ontrack-internal.amd.com/browse/SWDEV-403530 and https://ontrack-internal.amd.com/browse/SWDEV-419837. For more context check upstream issue https://github.com/pytorch/pytorch/issues/111834 --- .../testing/_internal/distributed/distributed_test.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/torch/testing/_internal/distributed/distributed_test.py b/torch/testing/_internal/distributed/distributed_test.py index b9249ae160b8c1..492a8a7b6d1bc1 100644 --- a/torch/testing/_internal/distributed/distributed_test.py +++ b/torch/testing/_internal/distributed/distributed_test.py @@ -5066,7 +5066,11 @@ def _test_ddp_apply_optim_in_backward( # set_to_none for regular optimizer to match in backward # case. optim.zero_grad(set_to_none=True) - + + @skip_but_pass_in_sandcastle_if( + BACKEND == "gloo" and HAS_TORCHVISION, + "Failing with gloo backend + torchvision due to ongoing issue https://github.com/pytorch/pytorch/issues/111834", + ) @skip_if_lt_x_gpu(2) def test_ddp_apply_optim_in_backward(self): for optim_cls, init_before in itertools.product( @@ -5079,6 +5083,10 @@ def test_ddp_apply_optim_in_backward(self): init_before=init_before, ) + @skip_but_pass_in_sandcastle_if( + BACKEND == "gloo" and HAS_TORCHVISION, + "Failing with gloo backend + torchvision due to ongoing issue https://github.com/pytorch/pytorch/issues/111834", + ) @skip_if_lt_x_gpu(2) def test_ddp_apply_optim_in_backward_grad_as_bucket_view_false(self): for init_before in [True, False]: