Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -152,8 +152,6 @@ ignore =
E501 # line too long - let black worry about that
E731 # do not assign a lambda expression, use a def
W503 # line break before binary operator
per-file-ignores =
xarray/tests/*.py:F401,F811
exclude=
.eggs
doc
Expand Down
74 changes: 73 additions & 1 deletion xarray/tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,80 @@
import numpy as np
import pandas as pd
import pytest

from . import requires_dask
from xarray import DataArray, Dataset

from . import create_test_data, requires_dask


@pytest.fixture(params=["numpy", pytest.param("dask", marks=requires_dask)])
def backend(request):
return request.param


@pytest.fixture(params=[1])
def ds_fixture(request, backend):
if request.param == 1:
ds = Dataset(
dict(
z1=(["y", "x"], np.random.randn(2, 8)),
z2=(["time", "y"], np.random.randn(10, 2)),
),
dict(
x=("x", np.linspace(0, 1.0, 8)),
time=("time", np.linspace(0, 1.0, 10)),
c=("y", ["a", "b"]),
y=range(2),
),
)
elif request.param == 2:
ds = Dataset(
dict(
z1=(["time", "y"], np.random.randn(10, 2)),
z2=(["time"], np.random.randn(10)),
z3=(["x", "time"], np.random.randn(8, 10)),
),
dict(
x=("x", np.linspace(0, 1.0, 8)),
time=("time", np.linspace(0, 1.0, 10)),
c=("y", ["a", "b"]),
y=range(2),
),
)
elif request.param == 3:
ds = create_test_data()
else:
raise ValueError

if backend == "dask":
return ds.chunk()

return ds


@pytest.fixture(params=[1])
def da_fixture(request, backend):
if request.param == 1:
times = pd.date_range("2000-01-01", freq="1D", periods=21)
da = DataArray(
np.random.random((3, 21, 4)),
dims=("a", "time", "x"),
coords=dict(time=times),
)

if request.param == 2:
da = DataArray([0, np.nan, 1, 2, np.nan, 3, 4, 5, np.nan, 6, 7], dims="time")

if request.param == "repeating_ints":
da = DataArray(
np.tile(np.arange(12), 5).reshape(5, 4, 3),
coords={"x": list("abc"), "y": list("defg")},
dims=list("zyx"),
)

if backend == "dask":
return da.chunk()
elif backend == "numpy":
return da
else:
raise ValueError
31 changes: 16 additions & 15 deletions xarray/tests/test_coarsen.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,18 +15,18 @@
raise_if_dask_computes,
requires_cftime,
)
from .test_dataarray import da
from .test_dataset import ds


def test_coarsen_absent_dims_error(ds) -> None:
def test_coarsen_absent_dims_error(ds_fixture) -> None:
with pytest.raises(ValueError, match=r"not found in Dataset."):
ds.coarsen(foo=2)
ds_fixture.coarsen(foo=2)


@pytest.mark.parametrize("dask", [True, False])
@pytest.mark.parametrize(("boundary", "side"), [("trim", "left"), ("pad", "right")])
def test_coarsen_dataset(ds, dask, boundary, side) -> None:
def test_coarsen_dataset(ds_fixture, dask, boundary, side) -> None:
ds = ds_fixture

if dask and has_dask:
ds = ds.chunk({"x": 4})

Expand All @@ -41,7 +41,8 @@ def test_coarsen_dataset(ds, dask, boundary, side) -> None:


@pytest.mark.parametrize("dask", [True, False])
def test_coarsen_coords(ds, dask) -> None:
def test_coarsen_coords(ds_fixture, dask) -> None:
ds = ds_fixture
if dask and has_dask:
ds = ds.chunk({"x": 4})

Expand Down Expand Up @@ -156,23 +157,23 @@ def test_coarsen_keep_attrs(funcname, argument) -> None:


@pytest.mark.slow
@pytest.mark.parametrize("ds", (1, 2), indirect=True)
@pytest.mark.parametrize("ds_fixture", (1, 2), indirect=True)
@pytest.mark.parametrize("window", (1, 2, 3, 4))
@pytest.mark.parametrize("name", ("sum", "mean", "std", "var", "min", "max", "median"))
def test_coarsen_reduce(ds, window, name) -> None:
def test_coarsen_reduce(ds_fixture, window, name) -> None:
# Use boundary="trim" to accommodate all window sizes used in tests
coarsen_obj = ds.coarsen(time=window, boundary="trim")
coarsen_obj = ds_fixture.coarsen(time=window, boundary="trim")

# add nan prefix to numpy methods to get similar behavior as bottleneck
actual = coarsen_obj.reduce(getattr(np, f"nan{name}"))
expected = getattr(coarsen_obj, name)()
assert_allclose(actual, expected)

# make sure the order of data_var are not changed.
assert list(ds.data_vars.keys()) == list(actual.data_vars.keys())
assert list(ds_fixture.data_vars.keys()) == list(actual.data_vars.keys())

# Make sure the dimension order is restored
for key, src_var in ds.data_vars.items():
for key, src_var in ds_fixture.data_vars.items():
assert src_var.dims == actual[key].dims


Expand Down Expand Up @@ -236,15 +237,15 @@ def test_coarsen_da_keep_attrs(funcname, argument) -> None:
assert result.name == "name"


@pytest.mark.parametrize("da", (1, 2), indirect=True)
@pytest.mark.parametrize("da_fixture", (1, 2), indirect=True)
@pytest.mark.parametrize("window", (1, 2, 3, 4))
@pytest.mark.parametrize("name", ("sum", "mean", "std", "max"))
def test_coarsen_da_reduce(da, window, name) -> None:
if da.isnull().sum() > 1 and window == 1:
def test_coarsen_da_reduce(da_fixture, window, name) -> None:
if da_fixture.isnull().sum() > 1 and window == 1:
pytest.skip("These parameters lead to all-NaN slices")

# Use boundary="trim" to accommodate all window sizes used in tests
coarsen_obj = da.coarsen(time=window, boundary="trim")
coarsen_obj = da_fixture.coarsen(time=window, boundary="trim")

# add nan prefix to numpy methods to get similar # behavior as bottleneck
actual = coarsen_obj.reduce(getattr(np, f"nan{name}"))
Expand Down
11 changes: 2 additions & 9 deletions xarray/tests/test_computation.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,15 +24,8 @@
unified_dim_sizes,
)
from xarray.core.pycompat import dask_version
from xarray.core.types import T_Xarray

from . import (
has_cftime,
has_dask,
raise_if_dask_computes,
requires_cftime,
requires_dask,
)

from . import has_dask, raise_if_dask_computes, requires_cftime, requires_dask


def assert_identical(a, b):
Expand Down
Loading