diff --git a/doc/whats-new.rst b/doc/whats-new.rst index 4445c9bcbe3..00bceed405e 100644 --- a/doc/whats-new.rst +++ b/doc/whats-new.rst @@ -22,7 +22,8 @@ v0.16.3 (unreleased) Breaking changes ~~~~~~~~~~~~~~~~ - +- remove deprecated ``autoclose`` kwargs from :py:func:`open_dataset` (:pull: `4725`). + By `Aureliana Barghini `_ New Features ~~~~~~~~~~~~ diff --git a/xarray/backends/api.py b/xarray/backends/api.py index fc53a8583ac..faa7e6cf3d3 100644 --- a/xarray/backends/api.py +++ b/xarray/backends/api.py @@ -1,6 +1,4 @@ import os -import pathlib -import warnings from glob import glob from io import BytesIO from numbers import Number @@ -151,7 +149,7 @@ def _get_default_engine(path: str, allow_remote: bool = False): def _autodetect_engine(filename_or_obj): if isinstance(filename_or_obj, AbstractDataStore): engine = "store" - elif isinstance(filename_or_obj, (str, pathlib.Path)): + elif isinstance(filename_or_obj, (str, Path)): engine = _get_default_engine(str(filename_or_obj), allow_remote=True) else: engine = _get_engine_from_magic_number(filename_or_obj) @@ -312,7 +310,6 @@ def open_dataset( decode_cf=True, mask_and_scale=None, decode_times=True, - autoclose=None, concat_characters=True, decode_coords=True, engine=None, @@ -352,10 +349,6 @@ def open_dataset( decode_times : bool, optional If True, decode times encoded in the standard NetCDF datetime format into datetime objects. Otherwise, leave them encoded as numbers. - autoclose : bool, optional - If True, automatically close files to avoid OS Error of too many files - being open. However, this option doesn't work with streams, e.g., - BytesIO. concat_characters : bool, optional If True, concatenate along the last dimension of character arrays to form string arrays. Dimensions will only be concatenated over (and @@ -435,17 +428,6 @@ def open_dataset( return apiv2.open_dataset(**kwargs) - if autoclose is not None: - warnings.warn( - "The autoclose argument is no longer used by " - "xarray.open_dataset() and is now ignored; it will be removed in " - "a future version of xarray. If necessary, you can control the " - "maximum number of simultaneous open files with " - "xarray.set_options(file_cache_maxsize=...).", - FutureWarning, - stacklevel=2, - ) - if mask_and_scale is None: mask_and_scale = not engine == "pseudonetcdf" @@ -583,7 +565,6 @@ def open_dataarray( decode_cf=True, mask_and_scale=None, decode_times=True, - autoclose=None, concat_characters=True, decode_coords=True, engine=None, @@ -699,7 +680,6 @@ def open_dataarray( decode_cf=decode_cf, mask_and_scale=mask_and_scale, decode_times=decode_times, - autoclose=autoclose, concat_characters=concat_characters, decode_coords=decode_coords, engine=engine, @@ -757,7 +737,6 @@ def open_mfdataset( data_vars="all", coords="different", combine="by_coords", - autoclose=None, parallel=False, join="outer", attrs_file=None, @@ -924,9 +903,7 @@ def open_mfdataset( combined_ids_paths = _infer_concat_order_from_positions(paths) ids, paths = (list(combined_ids_paths.keys()), list(combined_ids_paths.values())) - open_kwargs = dict( - engine=engine, chunks=chunks or {}, lock=lock, autoclose=autoclose, **kwargs - ) + open_kwargs = dict(engine=engine, chunks=chunks or {}, lock=lock, **kwargs) if parallel: import dask diff --git a/xarray/backends/apiv2.py b/xarray/backends/apiv2.py index 37a0f8c34a5..0f98291983d 100644 --- a/xarray/backends/apiv2.py +++ b/xarray/backends/apiv2.py @@ -1,5 +1,4 @@ import os -import warnings from ..core import indexing from ..core.dataset import _get_chunk, _maybe_chunk @@ -124,7 +123,6 @@ def open_dataset( concat_characters=None, decode_coords=None, drop_variables=None, - autoclose=None, backend_kwargs=None, **kwargs, ): @@ -239,16 +237,6 @@ def open_dataset( -------- open_mfdataset """ - if autoclose is not None: - warnings.warn( - "The autoclose argument is no longer used by " - "xarray.open_dataset() and is now ignored; it will be removed in " - "a future version of xarray. If necessary, you can control the " - "maximum number of simultaneous open files with " - "xarray.set_options(file_cache_maxsize=...).", - FutureWarning, - stacklevel=2, - ) if cache is None: cache = chunks is None diff --git a/xarray/tests/test_backends.py b/xarray/tests/test_backends.py index 1ddc16c52e4..71fd65a8614 100644 --- a/xarray/tests/test_backends.py +++ b/xarray/tests/test_backends.py @@ -1454,14 +1454,6 @@ def test_setncattr_string(self): assert_array_equal(one_element_list_of_strings, totest.attrs["bar"]) assert one_string == totest.attrs["baz"] - def test_autoclose_future_warning(self): - data = create_test_data() - with create_tmp_file() as tmp_file: - self.save(data, tmp_file) - with pytest.warns(FutureWarning): - with self.open(tmp_file, autoclose=True) as actual: - assert_identical(data, actual) - @requires_netCDF4 class TestNetCDF4AlreadyOpen: