diff --git a/check_binary.sh b/check_binary.sh index 2f8d30550..269a340b4 100755 --- a/check_binary.sh +++ b/check_binary.sh @@ -310,9 +310,7 @@ if [[ "$PACKAGE_TYPE" == 'libtorch' ]]; then build_example_cpp_with_incorrect_abi simple-torch-test fi else - TMP_DIR=$(mktemp -d) - trap 'rm -rf ${TMP_DIR}' EXIT - pushd "${TMP_DIR}" # To attempt to not muddle results with local python path + pushd /tmp python -c 'import torch' python -c 'from caffe2.python import core' popd @@ -329,7 +327,9 @@ if [[ "$PACKAGE_TYPE" == 'libtorch' ]]; then else if [[ "$(uname)" != 'Darwin' || "$PACKAGE_TYPE" != *wheel ]]; then echo "Checking that MKL is available" + pushd /tmp python -c 'import torch; exit(0 if torch.backends.mkl.is_available() else 1)' + popd fi fi @@ -351,6 +351,7 @@ if [[ "$DESIRED_CUDA" != 'cpu' ]]; then if [[ "$PACKAGE_TYPE" == 'libtorch' ]]; then build_and_run_example_cpp check-torch-cuda else + pushd /tmp echo "Checking that CUDA archs are setup correctly" timeout 20 python -c 'import torch; torch.randn([3,5]).cuda()' @@ -361,5 +362,6 @@ if [[ "$DESIRED_CUDA" != 'cpu' ]]; then echo "Checking that CuDNN is available" python -c 'import torch; exit(0 if torch.backends.cudnn.is_available() else 1)' + popd fi # if libtorch fi # if cuda