We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent b234b94 commit 5b9b816Copy full SHA for 5b9b816
benchmarks/dynamo/common.py
@@ -3002,9 +3002,12 @@ def main(runner, original_dir=None):
3002
f"--diff-branch: current branch is same as {args.diff_branch} branch, what are you diffing?"
3003
)
3004
3005
- device_count = torch.cuda.device_count()
3006
args.use_distributed = (args.ddp or args.fsdp) and args.only
3007
if args.multiprocess:
+ # NB: Do NOT query device count before CUDA initialization; we're
3008
+ # going to overwrite CUDA_VISIBLE_DEVICES and this will result in
3009
+ # https://github.com/pytorch/pytorch/issues/107300
3010
+ device_count = torch.cuda.device_count()
3011
if device_count <= 1:
3012
log.warning(
3013
"The use multiprocess flag is set but there are <= 1 devices available."
0 commit comments