From 762654a4d642f7db0c6db7f26c56c09b66ee0609 Mon Sep 17 00:00:00 2001 From: Keewis Date: Sun, 29 Mar 2020 23:04:41 +0200 Subject: [PATCH 1/9] add a new ci that does not install dask --- azure-pipelines.yml | 2 ++ ci/requirements/py38-without-dask.yml | 45 +++++++++++++++++++++++++++ 2 files changed, 47 insertions(+) create mode 100644 ci/requirements/py38-without-dask.yml diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 8d43de7b1d5..79f37a5a4f3 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -20,6 +20,8 @@ jobs: conda_env: py37 py38: conda_env: py38 + py38-without-dask: + conda_env: py38-without-dask py38-upstream-dev: conda_env: py38 upstream_dev: true diff --git a/ci/requirements/py38-without-dask.yml b/ci/requirements/py38-without-dask.yml new file mode 100644 index 00000000000..4e641a66e29 --- /dev/null +++ b/ci/requirements/py38-without-dask.yml @@ -0,0 +1,45 @@ +name: xarray-tests +channels: + - conda-forge +dependencies: + - python=3.8 + - black + - boto3 + - bottleneck + - cartopy + - cdms2 + - cfgrib + - cftime + - coveralls + - flake8 + - h5netcdf + - h5py + - hdf5 + - hypothesis + - iris + - isort + - lxml # Optional dep of pydap + - matplotlib + - mypy=0.761 # Must match .pre-commit-config.yaml + - nc-time-axis + - netcdf4 + - numba + - numpy + - pandas + - pint + - pip + - pseudonetcdf + - pydap + - pynio + - pytest + - pytest-cov + - pytest-env + - rasterio + - scipy + - seaborn + - setuptools + - sparse + - toolz + - zarr + - pip: + - numbagg From 621c41abbc6c44b314914fd4d56d7487aeee8273 Mon Sep 17 00:00:00 2001 From: Keewis Date: Mon, 30 Mar 2020 00:04:22 +0200 Subject: [PATCH 2/9] remove iris since it depends on dask --- ci/requirements/py38-without-dask.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/ci/requirements/py38-without-dask.yml b/ci/requirements/py38-without-dask.yml index 4e641a66e29..a375d9e1e5a 100644 --- a/ci/requirements/py38-without-dask.yml +++ b/ci/requirements/py38-without-dask.yml @@ -16,7 +16,6 @@ dependencies: - h5py - hdf5 - hypothesis - - iris - isort - lxml # Optional dep of pydap - matplotlib From daa3ebebbdc0ead9be7dca02b912d237c2bf49e5 Mon Sep 17 00:00:00 2001 From: Keewis Date: Mon, 30 Mar 2020 00:05:28 +0200 Subject: [PATCH 3/9] rename the new CI to py38-all-but-dask --- azure-pipelines.yml | 4 ++-- .../{py38-without-dask.yml => py38-all-but-dask.yml} | 0 2 files changed, 2 insertions(+), 2 deletions(-) rename ci/requirements/{py38-without-dask.yml => py38-all-but-dask.yml} (100%) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 79f37a5a4f3..1da9a06b1f3 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -20,8 +20,8 @@ jobs: conda_env: py37 py38: conda_env: py38 - py38-without-dask: - conda_env: py38-without-dask + py38-all-but-dask: + conda_env: py38-all-but-dask py38-upstream-dev: conda_env: py38 upstream_dev: true diff --git a/ci/requirements/py38-without-dask.yml b/ci/requirements/py38-all-but-dask.yml similarity index 100% rename from ci/requirements/py38-without-dask.yml rename to ci/requirements/py38-all-but-dask.yml From 280a14ff5298c83fcee23e1e18e3a37397856ea9 Mon Sep 17 00:00:00 2001 From: Keewis Date: Mon, 30 Mar 2020 18:07:52 +0200 Subject: [PATCH 4/9] decorate the some of the failing tests with requires_dask --- xarray/tests/test_backends.py | 2 ++ xarray/tests/test_sparse.py | 1 + 2 files changed, 3 insertions(+) diff --git a/xarray/tests/test_backends.py b/xarray/tests/test_backends.py index 82fe1b38149..1541f4b33dc 100644 --- a/xarray/tests/test_backends.py +++ b/xarray/tests/test_backends.py @@ -3513,6 +3513,7 @@ def test_uamiv_format_read(self): assert_allclose(expected, actual) camxfile.close() + @requires_dask def test_uamiv_format_mfread(self): """ Open a CAMx file and test data variables @@ -4012,6 +4013,7 @@ def test_geotiff_tags(self): with xr.open_rasterio(tmp_file) as rioda: assert isinstance(rioda.attrs["AREA_OR_POINT"], str) + @requires_dask def test_no_mftime(self): # rasterio can accept "filename" urguments that are actually urls, # including paths to remote files. diff --git a/xarray/tests/test_sparse.py b/xarray/tests/test_sparse.py index 09ab1be9af9..f3c09ba6a5f 100644 --- a/xarray/tests/test_sparse.py +++ b/xarray/tests/test_sparse.py @@ -837,6 +837,7 @@ def test_sparse_coords(self): ) +@requires_dask def test_chunk(): s = sparse.COO.from_numpy(np.array([0, 0, 1, 2])) a = DataArray(s) From b5e4ad76f9588e3db6a6981a9d21366ddc5918f7 Mon Sep 17 00:00:00 2001 From: Keewis Date: Mon, 30 Mar 2020 18:41:07 +0200 Subject: [PATCH 5/9] decorate the zarr tests using dask or obj.chunk with requires_dask --- xarray/tests/test_backends.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/xarray/tests/test_backends.py b/xarray/tests/test_backends.py index 1541f4b33dc..8ac6728c57e 100644 --- a/xarray/tests/test_backends.py +++ b/xarray/tests/test_backends.py @@ -1539,6 +1539,7 @@ def test_roundtrip_consolidated(self): self.check_dtypes_roundtripped(expected, actual) assert_identical(expected, actual) + @requires_dask def test_auto_chunk(self): original = create_test_data().chunk() @@ -1556,6 +1557,7 @@ def test_auto_chunk(self): # chunk size should be the same as original assert v.chunks == original[k].chunks + @requires_dask @pytest.mark.filterwarnings("ignore:Specified Dask chunks") def test_manual_chunk(self): original = create_test_data().chunk({"dim1": 3, "dim2": 4, "dim3": 3}) @@ -1598,6 +1600,7 @@ def test_manual_chunk(self): assert_identical(actual, auto) assert_identical(actual.load(), auto.load()) + @requires_dask def test_warning_on_bad_chunks(self): original = create_test_data().chunk({"dim1": 4, "dim2": 3, "dim3": 5}) @@ -1620,6 +1623,7 @@ def test_warning_on_bad_chunks(self): assert v._in_memory == (k in actual.dims) assert len(record) == 0 + @requires_dask def test_deprecate_auto_chunk(self): original = create_test_data().chunk() with pytest.warns(FutureWarning): @@ -1638,6 +1642,7 @@ def test_deprecate_auto_chunk(self): # there should be no chunks assert v.chunks is None + @requires_dask def test_write_uneven_dask_chunks(self): # regression for GH#2225 original = create_test_data().chunk({"dim1": 3, "dim2": 4, "dim3": 3}) @@ -1662,6 +1667,7 @@ def test_chunk_encoding(self): with self.roundtrip(data) as actual: pass + @requires_dask def test_chunk_encoding_with_dask(self): # These datasets DO have dask chunks. Need to check for various # interactions between dask and zarr chunks @@ -1896,6 +1902,7 @@ def test_append_with_new_variable(self): combined["new_var"] = ds_with_new_var["new_var"] assert_identical(combined, xr.open_zarr(store_target)) + @requires_dask def test_to_zarr_compute_false_roundtrip(self): from dask.delayed import Delayed @@ -1915,6 +1922,7 @@ def test_to_zarr_compute_false_roundtrip(self): with self.open(store) as actual: assert_identical(original, actual) + @requires_dask def test_to_zarr_append_compute_false_roundtrip(self): from dask.delayed import Delayed @@ -1951,6 +1959,7 @@ def test_to_zarr_append_compute_false_roundtrip(self): with self.open(store) as actual: assert_identical(xr.concat([ds, ds_to_append], dim="time"), actual) + @requires_dask def test_encoding_chunksizes(self): # regression test for GH2278 # see also test_encoding_chunksizes_unlimited From fa37ee6d5a3c53f50dff61caebb342b89e2e35d5 Mon Sep 17 00:00:00 2001 From: Keewis Date: Mon, 30 Mar 2020 18:50:20 +0200 Subject: [PATCH 6/9] don't chunk if dask is not available --- xarray/backends/zarr.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/xarray/backends/zarr.py b/xarray/backends/zarr.py index cdc74e06882..9a50be2f7b8 100644 --- a/xarray/backends/zarr.py +++ b/xarray/backends/zarr.py @@ -586,6 +586,12 @@ def open_zarr( "Instead found %s. " % chunks ) + if chunks == "auto": + try: + import dask.array # noqa + except ImportError: + chunks = None + if not decode_cf: mask_and_scale = False decode_times = False From e6ceb60109673c66f4fdfa4799db274c67821675 Mon Sep 17 00:00:00 2001 From: Keewis Date: Mon, 30 Mar 2020 19:20:20 +0200 Subject: [PATCH 7/9] pass an indexer instead of a tuple to _arrayize_vectorized_indexer --- xarray/backends/zarr.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/xarray/backends/zarr.py b/xarray/backends/zarr.py index 9a50be2f7b8..c262dae2811 100644 --- a/xarray/backends/zarr.py +++ b/xarray/backends/zarr.py @@ -56,7 +56,7 @@ def __getitem__(self, key): return array[key.tuple] elif isinstance(key, indexing.VectorizedIndexer): return array.vindex[ - indexing._arrayize_vectorized_indexer(key.tuple, self.shape).tuple + indexing._arrayize_vectorized_indexer(key, self.shape).tuple ] else: assert isinstance(key, indexing.OuterIndexer) From c5cca783690f1c73b7cef2fbdd8d0d26c8dedc37 Mon Sep 17 00:00:00 2001 From: Keewis Date: Mon, 30 Mar 2020 23:48:54 +0200 Subject: [PATCH 8/9] xfail the remaining tests if dask is not available --- xarray/tests/test_backends.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/xarray/tests/test_backends.py b/xarray/tests/test_backends.py index 8ac6728c57e..a611e05599f 100644 --- a/xarray/tests/test_backends.py +++ b/xarray/tests/test_backends.py @@ -581,6 +581,10 @@ def test_orthogonal_indexing(self): actual = on_disk.isel(**indexers) assert_identical(expected, actual) + @pytest.mark.xfail( + not has_dask, + reason="the code for indexing without dask handles negative steps in slices incorrectly", + ) def test_vectorized_indexing(self): in_memory = create_test_data() with self.roundtrip(in_memory) as on_disk: @@ -3949,6 +3953,9 @@ def test_chunks(self): ex = expected.sel(band=1).mean(dim="x") assert_allclose(ac, ex) + @pytest.mark.xfail( + not has_dask, reason="without dask, a non-serializable lock is used" + ) def test_pickle_rasterio(self): # regression test for https://github.com/pydata/xarray/issues/2121 with create_tmp_geotiff() as (tmp_file, expected): From 8ec3af7db2ada90d3b1292e3bb5f070bf6aadebf Mon Sep 17 00:00:00 2001 From: Keewis Date: Mon, 30 Mar 2020 23:51:30 +0200 Subject: [PATCH 9/9] update whats-new.rst --- doc/whats-new.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/whats-new.rst b/doc/whats-new.rst index c70dfd4f3f6..f811d996076 100644 --- a/doc/whats-new.rst +++ b/doc/whats-new.rst @@ -63,6 +63,9 @@ Internal Changes - Run the ``isort`` pre-commit hook only on python source files and update the ``flake8`` version. (:issue:`3750`, :pull:`3711`) By `Justus Magin `_. +- Add a CI job that runs the tests with every optional dependency + except ``dask``. (:issue:`3794`, :pull:`3919`) + By `Justus Magin `_. .. _whats-new.0.15.1: