Skip to content

Commit cfb673e

Browse files
authored
Fixed rocm skip import issue (#1949)
skip_if_rocm does not exist in torch/testing/_internal/common_distributed.py. Use skipIfRocm from torch/testing/_internal/common_utils.py instead.
1 parent 9f390e6 commit cfb673e

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

torch/testing/_internal/distributed/rpc/rpc_test.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -32,13 +32,13 @@
3232
skip_if_lt_x_gpu,
3333
captured_output,
3434
tp_transports,
35-
skip_if_rocm,
3635
)
3736
from torch.testing._internal.common_utils import (
3837
IS_MACOS,
3938
load_tests,
4039
skip_but_pass_in_sandcastle_if,
4140
get_cycles_per_ms,
41+
skipIfRocm,
4242
)
4343

4444
from torch.testing._internal.dist_utils import (
@@ -5055,7 +5055,7 @@ def test_dynamic_rpc_existing_rank_can_communicate_with_new_rank(self):
50555055

50565056
# Dynamic RPC existing ranks can communicate with new ranks using CUDA rpc
50575057
@skip_if_lt_x_gpu(2)
5058-
@skip_if_rocm
5058+
@skipIfRocm
50595059
@dist_init(setup_rpc=False)
50605060
def test_dynamic_rpc_existing_rank_can_communicate_with_new_rank_cuda(self):
50615061
initialize_pg(self.file_init_method, self.rank, self.world_size)

0 commit comments

Comments
 (0)