Skip to content
Merged
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion .github/stale.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,8 @@ staleLabel: stale
# Comment to post when marking as stale. Set to `false` to disable
markComment: |
In order to maintain a list of currently relevant issues, we mark issues as stale after a period of inactivity
If this issue remains relevant, please comment here; otherwise it will be marked as closed automatically

If this issue remains relevant, please comment here or remove the `stale` label; otherwise it will be marked as closed automatically

# Comment to post when removing the stale label.
# unmarkComment: >
Expand Down
4 changes: 4 additions & 0 deletions doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,10 @@ Breaking changes
- Remove support for Python 2. This is the first version of xarray that is
Python 3 only. (:issue:`1876`).
By `Joe Hamman <https://github.com/jhamman>`_.
- The `compat` argument to `Dataset` and the `encoding` argument to
`DataArray` are deprecated and will be removed in a future release.
(:issue:`1188`)
By `Maximilian Roos <https://github.com/max-sixty>`_.

Enhancements
~~~~~~~~~~~~
Expand Down
2 changes: 1 addition & 1 deletion xarray/core/alignment.py
Original file line number Diff line number Diff line change
Expand Up @@ -495,7 +495,7 @@ def _broadcast_array(array):
coords = OrderedDict(array.coords)
coords.update(common_coords)
return DataArray(data, coords, data.dims, name=array.name,
attrs=array.attrs, encoding=array.encoding)
attrs=array.attrs)

def _broadcast_dataset(ds):
data_vars = OrderedDict(
Expand Down
24 changes: 12 additions & 12 deletions xarray/core/dataarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,15 +12,13 @@
from .alignment import align, reindex_like_indexers
from .common import AbstractArray, DataWithCoords
from .coordinates import (
DataArrayCoordinates, LevelCoordinatesSource,
assert_coordinate_consistent, remap_label_indexers)
DataArrayCoordinates, LevelCoordinatesSource, assert_coordinate_consistent,
remap_label_indexers)
from .dataset import Dataset, merge_indexes, split_indexes
from .formatting import format_item
from .indexes import default_indexes, Indexes
from .indexes import Indexes, default_indexes
from .options import OPTIONS
from .utils import (
_check_inplace, decode_numpy_dict_values, either_dict_or_kwargs,
ensure_us_time_resolution)
from .utils import _check_inplace, either_dict_or_kwargs
from .variable import (
IndexVariable, Variable, as_compatible_data, as_variable,
assert_unique_multiindex_level_names)
Expand Down Expand Up @@ -192,13 +190,15 @@ def __init__(self, data, coords=None, dims=None, name=None,
attrs : dict_like or None, optional
Attributes to assign to the new instance. By default, an empty
attribute dictionary is initialized.
encoding : dict_like or None, optional
Dictionary specifying how to encode this array's data into a
serialized format like netCDF4. Currently used keys (for netCDF)
include '_FillValue', 'scale_factor', 'add_offset', 'dtype',
'units' and 'calendar' (the later two only for datetime arrays).
Unrecognized keys are ignored.
encoding : deprecated
"""

if encoding is not None:
warnings.warn(
'The `encoding` argument to `DataArray` is deprecated. '
'Instead, specify the encoding when writing to disk or '
'set the `encoding` attribute directly.',
FutureWarning, stacklevel=2)
if fastpath:
variable = data
assert dims is None
Expand Down
32 changes: 16 additions & 16 deletions xarray/core/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,16 +13,16 @@
import xarray as xr

from . import (
alignment, dtypes, duck_array_ops, formatting, groupby,
indexing, ops, pdcompat, resample, rolling, utils)
alignment, dtypes, duck_array_ops, formatting, groupby, indexing, ops,
pdcompat, resample, rolling, utils)
from ..coding.cftimeindex import _parse_array_of_cftime_strings
from .alignment import align
from .common import (
ALL_DIMS, DataWithCoords, ImplementsDatasetReduce,
_contains_datetime_like_objects)
from .coordinates import (
DatasetCoordinates, LevelCoordinatesSource,
assert_coordinate_consistent, remap_label_indexers)
DatasetCoordinates, LevelCoordinatesSource, assert_coordinate_consistent,
remap_label_indexers)
from .indexes import Indexes, default_indexes
from .merge import (
dataset_merge_method, dataset_update_method, merge_data_and_coords,
Expand All @@ -31,8 +31,8 @@
from .pycompat import dask_array_type
from .utils import (
Frozen, SortedKeysDict, _check_inplace, datetime_to_numeric,
decode_numpy_dict_values, either_dict_or_kwargs, ensure_us_time_resolution,
hashable, maybe_wrap_array)
decode_numpy_dict_values, either_dict_or_kwargs, hashable,
maybe_wrap_array)
from .variable import IndexVariable, Variable, as_variable, broadcast_variables

# list of attributes of pd.DatetimeIndex that are ndarrays of time info
Expand Down Expand Up @@ -324,7 +324,7 @@ class Dataset(Mapping, ImplementsDatasetReduce, DataWithCoords):
_resample_cls = resample.DatasetResample

def __init__(self, data_vars=None, coords=None, attrs=None,
compat='broadcast_equals'):
compat=None):
"""To load data from a file or file-like object, use the `open_dataset`
function.

Expand All @@ -348,16 +348,16 @@ def __init__(self, data_vars=None, coords=None, attrs=None,
name.
attrs : dict-like, optional
Global attributes to save on this dataset.
compat : {'broadcast_equals', 'equals', 'identical'}, optional
String indicating how to compare variables of the same name for
potential conflicts when initializing this dataset:

- 'broadcast_equals': all values must be equal when variables are
broadcast against each other to ensure common dimensions.
- 'equals': all values and dimensions must be the same.
- 'identical': all values, dimensions and attributes must be the
same.
compat : deprecated
"""

if compat is not None:
warnings.warn(
'The `compat` argument to Dataset is deprecated. '
'Instead, use `merge` to control how variables are combined',
FutureWarning, stacklevel=2)
else:
compat = 'broadcast_equals'
self._variables = OrderedDict()
self._coord_names = set()
self._dims = {}
Expand Down
4 changes: 2 additions & 2 deletions xarray/tests/test_dataarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,7 +258,7 @@ def test_constructor(self):
expected = Dataset({None: (['x', 'y'], data, {'bar': 2})})[None]
assert_identical(expected, actual)

actual = DataArray(data, dims=['x', 'y'], encoding={'bar': 2})
actual = DataArray(data, dims=['x', 'y'])
expected = Dataset({None: (['x', 'y'], data, {}, {'bar': 2})})[None]
assert_identical(expected, actual)

Expand Down Expand Up @@ -296,7 +296,7 @@ def test_constructor_from_self_described(self):
expected = DataArray(data,
coords={'x': ['a', 'b'], 'y': [-1, -2]},
dims=['x', 'y'], name='foobar',
attrs={'bar': 2}, encoding={'foo': 3})
attrs={'bar': 2})
actual = DataArray(expected)
assert_identical(expected, actual)

Expand Down
4 changes: 0 additions & 4 deletions xarray/tests/test_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -354,13 +354,9 @@ def test_constructor_pandas_single(self):
def test_constructor_compat(self):
data = OrderedDict([('x', DataArray(0, coords={'y': 1})),
('y', ('z', [1, 1, 1]))])
with pytest.raises(MergeError):
Dataset(data, compat='equals')
expected = Dataset({'x': 0}, {'y': ('z', [1, 1, 1])})
actual = Dataset(data)
assert_identical(expected, actual)
actual = Dataset(data, compat='broadcast_equals')
assert_identical(expected, actual)

data = OrderedDict([('y', ('z', [1, 1, 1])),
('x', DataArray(0, coords={'y': 1}))])
Expand Down