Skip to content

Commit b9a239e

Browse files
committed
Test open_mfdataset_manyfiles with engine zarr
Zarr objects are folders with seem to cause issues with closing, so added a try-except to api.py to catch failures in f.close(). Some tests failing when chunks=None because a numpy array is returned instead of a dask array.
1 parent 5571fff commit b9a239e

File tree

2 files changed

+9
-3
lines changed

2 files changed

+9
-3
lines changed

xarray/backends/api.py

+4-1
Original file line numberDiff line numberDiff line change
@@ -774,7 +774,10 @@ def __init__(self, file_objs):
774774

775775
def close(self):
776776
for f in self.file_objs:
777-
f.close()
777+
try:
778+
f.close()
779+
except AttributeError: # 'NoneType' object has no attribute 'close'
780+
pass
778781

779782

780783
def open_mfdataset(

xarray/tests/test_backends.py

+5-2
Original file line numberDiff line numberDiff line change
@@ -2507,7 +2507,7 @@ def test_write_inconsistent_chunks(self):
25072507
assert actual["y"].encoding["chunksizes"] == (100, 50)
25082508

25092509

2510-
@pytest.fixture(params=["scipy", "netcdf4", "h5netcdf", "pynio"])
2510+
@pytest.fixture(params=["scipy", "netcdf4", "h5netcdf", "pynio", "zarr"])
25112511
def readengine(request):
25122512
return request.param
25132513

@@ -2567,7 +2567,10 @@ def test_open_mfdataset_manyfiles(
25672567
# split into multiple sets of temp files
25682568
for ii in original.x.values:
25692569
subds = original.isel(x=slice(ii, ii + 1))
2570-
subds.to_netcdf(tmpfiles[ii], engine=writeengine)
2570+
if writeengine != "zarr":
2571+
subds.to_netcdf(tmpfiles[ii], engine=writeengine)
2572+
else: # if writeengine == "zarr":
2573+
subds.to_zarr(store=tmpfiles[ii])
25712574

25722575
# check that calculation on opened datasets works properly
25732576
with open_mfdataset(

0 commit comments

Comments
 (0)