Skip to content

Commit 0b097a3

Browse files
jdsgomesfacebook-github-bot
authored andcommitted
[fbsync] Turn warnings in prototype datasets tests into errors (#5540)
Summary: * fix PCAM prototype dataset * update Zip and Tar archive loader datapipes * only fail on warnings from the pytorch ecosystem * Revert "only fail on warnings from the pytorch ecosystem" This reverts commit 2bf3aa6. (Note: this ignores all push blocking failures!) Reviewed By: datumbox Differential Revision: D35216776 fbshipit-source-id: 2cf636ea5c037da771ab2c46ed6b632b4780d477
1 parent da75f5e commit 0b097a3

File tree

5 files changed

+13
-8
lines changed

5 files changed

+13
-8
lines changed

test/builtin_dataset_mocks.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
import pathlib
1111
import pickle
1212
import random
13+
import warnings
1314
import xml.etree.ElementTree as ET
1415
from collections import defaultdict, Counter
1516

@@ -470,7 +471,10 @@ def imagenet(info, root, config):
470471
]
471472
num_children = 1
472473
synsets.extend((0, "", "", "", num_children, [], 0, 0) for _ in range(5))
473-
savemat(data_root / "meta.mat", dict(synsets=synsets))
474+
with warnings.catch_warnings():
475+
# The warning is not for savemat, but rather for some internals savemet is using
476+
warnings.filterwarnings("ignore", category=np.VisibleDeprecationWarning)
477+
savemat(data_root / "meta.mat", dict(synsets=synsets))
474478

475479
make_tar(root, devkit_root.with_suffix(".tar.gz").name, compression="gz")
476480
else: # config.split == "test"

test/test_prototype_builtin_datasets.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ def test_coverage():
3535
)
3636

3737

38+
@pytest.mark.filterwarnings("error")
3839
class TestCommon:
3940
@parametrize_dataset_mocks(DATASET_MOCKS)
4041
def test_smoke(self, test_home, dataset_mock, config):

torchvision/prototype/datasets/_builtin/imagenet.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
Mapper,
1111
Filter,
1212
Demultiplexer,
13-
TarArchiveReader,
13+
TarArchiveLoader,
1414
Enumerator,
1515
)
1616
from torchvision.prototype.datasets.utils import (
@@ -158,7 +158,7 @@ def _make_datapipe(
158158

159159
# the train archive is a tar of tars
160160
if config.split == "train":
161-
dp = TarArchiveReader(dp)
161+
dp = TarArchiveLoader(dp)
162162

163163
dp = hint_sharding(dp)
164164
dp = hint_shuffling(dp)

torchvision/prototype/datasets/_builtin/pcam.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,7 @@ def _prepare_sample(self, data: Tuple[Any, Any]) -> Dict[str, Any]:
9999
image, target = data # They're both numpy arrays at this point
100100

101101
return {
102-
"image": features.Image(image),
102+
"image": features.Image(image.transpose(2, 0, 1)),
103103
"label": Label(target.item()),
104104
}
105105

torchvision/prototype/datasets/utils/_resource.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,8 @@
1010
FileLister,
1111
FileOpener,
1212
IterDataPipe,
13-
ZipArchiveReader,
14-
TarArchiveReader,
13+
ZipArchiveLoader,
14+
TarArchiveLoader,
1515
RarArchiveLoader,
1616
)
1717
from torchvision.datasets.utils import (
@@ -72,8 +72,8 @@ def _loader(self, path: pathlib.Path) -> IterDataPipe[Tuple[str, IO]]:
7272
return dp
7373

7474
_ARCHIVE_LOADERS = {
75-
".tar": TarArchiveReader,
76-
".zip": ZipArchiveReader,
75+
".tar": TarArchiveLoader,
76+
".zip": ZipArchiveLoader,
7777
".rar": RarArchiveLoader,
7878
}
7979

0 commit comments

Comments
 (0)