diff --git a/py/torch_tensorrt/fx/test/converters/aten_op/test_flatten_aten.py b/py/torch_tensorrt/fx/test/converters/aten_op/test_flatten_aten.py index 64a46ce50a..69dea57efb 100644 --- a/py/torch_tensorrt/fx/test/converters/aten_op/test_flatten_aten.py +++ b/py/torch_tensorrt/fx/test/converters/aten_op/test_flatten_aten.py @@ -27,7 +27,7 @@ def forward(self, x): self.run_test( Flatten(start_dim, end_dim), inputs, - expected_ops={torch.ops.aten._reshape_alias.default}, + expected_ops={torch.ops.aten.view.default}, test_implicit_batch_dim=(start_dim != 0), ) diff --git a/py/torch_tensorrt/fx/test/converters/aten_op/test_reshape_aten.py b/py/torch_tensorrt/fx/test/converters/aten_op/test_reshape_aten.py index 36cf0c1578..22a254e407 100644 --- a/py/torch_tensorrt/fx/test/converters/aten_op/test_reshape_aten.py +++ b/py/torch_tensorrt/fx/test/converters/aten_op/test_reshape_aten.py @@ -25,7 +25,7 @@ def forward(self, x): self.run_test( TestModule(target_shape), inputs, - expected_ops={torch.ops.aten._reshape_alias.default}, + expected_ops={torch.ops.aten.view.default}, ) ## TODO: proxytensor tracer does not support output size containing -1. If dim=0 is set to -1 for dynamic batch,