Skip to content

Commit febad68

Browse files
authored
Skip ddp apply_optim_in_bwd tests for gloo (#1302)
To resolve https://ontrack-internal.amd.com/browse/SWDEV-403530 and https://ontrack-internal.amd.com/browse/SWDEV-419837. For more context check upstream issue pytorch#111834
1 parent 7e8b73a commit febad68

File tree

1 file changed

+9
-1
lines changed

1 file changed

+9
-1
lines changed

torch/testing/_internal/distributed/distributed_test.py

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5066,7 +5066,11 @@ def _test_ddp_apply_optim_in_backward(
50665066
# set_to_none for regular optimizer to match in backward
50675067
# case.
50685068
optim.zero_grad(set_to_none=True)
5069-
5069+
5070+
@skip_but_pass_in_sandcastle_if(
5071+
BACKEND == "gloo" and HAS_TORCHVISION,
5072+
"Failing with gloo backend + torchvision due to ongoing issue https://github.com/pytorch/pytorch/issues/111834",
5073+
)
50705074
@skip_if_lt_x_gpu(2)
50715075
def test_ddp_apply_optim_in_backward(self):
50725076
for optim_cls, init_before in itertools.product(
@@ -5079,6 +5083,10 @@ def test_ddp_apply_optim_in_backward(self):
50795083
init_before=init_before,
50805084
)
50815085

5086+
@skip_but_pass_in_sandcastle_if(
5087+
BACKEND == "gloo" and HAS_TORCHVISION,
5088+
"Failing with gloo backend + torchvision due to ongoing issue https://github.com/pytorch/pytorch/issues/111834",
5089+
)
50825090
@skip_if_lt_x_gpu(2)
50835091
def test_ddp_apply_optim_in_backward_grad_as_bucket_view_false(self):
50845092
for init_before in [True, False]:

0 commit comments

Comments
 (0)