Skip to content

remove autoclose in open_dataset and related warning test #4725

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Dec 24, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,8 @@ v0.16.3 (unreleased)

Breaking changes
~~~~~~~~~~~~~~~~

- remove deprecated ``autoclose`` kwargs from :py:func:`open_dataset` (:pull: `4725`).
By `Aureliana Barghini <https://github.com/aurghs>`_

New Features
~~~~~~~~~~~~
Expand Down
27 changes: 2 additions & 25 deletions xarray/backends/api.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
import os
import pathlib
import warnings
from glob import glob
from io import BytesIO
from numbers import Number
Expand Down Expand Up @@ -151,7 +149,7 @@ def _get_default_engine(path: str, allow_remote: bool = False):
def _autodetect_engine(filename_or_obj):
if isinstance(filename_or_obj, AbstractDataStore):
engine = "store"
elif isinstance(filename_or_obj, (str, pathlib.Path)):
elif isinstance(filename_or_obj, (str, Path)):
engine = _get_default_engine(str(filename_or_obj), allow_remote=True)
else:
engine = _get_engine_from_magic_number(filename_or_obj)
Expand Down Expand Up @@ -312,7 +310,6 @@ def open_dataset(
decode_cf=True,
mask_and_scale=None,
decode_times=True,
autoclose=None,
concat_characters=True,
decode_coords=True,
engine=None,
Expand Down Expand Up @@ -352,10 +349,6 @@ def open_dataset(
decode_times : bool, optional
If True, decode times encoded in the standard NetCDF datetime format
into datetime objects. Otherwise, leave them encoded as numbers.
autoclose : bool, optional
If True, automatically close files to avoid OS Error of too many files
being open. However, this option doesn't work with streams, e.g.,
BytesIO.
concat_characters : bool, optional
If True, concatenate along the last dimension of character arrays to
form string arrays. Dimensions will only be concatenated over (and
Expand Down Expand Up @@ -435,17 +428,6 @@ def open_dataset(

return apiv2.open_dataset(**kwargs)

if autoclose is not None:
warnings.warn(
"The autoclose argument is no longer used by "
"xarray.open_dataset() and is now ignored; it will be removed in "
"a future version of xarray. If necessary, you can control the "
"maximum number of simultaneous open files with "
"xarray.set_options(file_cache_maxsize=...).",
FutureWarning,
stacklevel=2,
)

if mask_and_scale is None:
mask_and_scale = not engine == "pseudonetcdf"

Expand Down Expand Up @@ -583,7 +565,6 @@ def open_dataarray(
decode_cf=True,
mask_and_scale=None,
decode_times=True,
autoclose=None,
concat_characters=True,
decode_coords=True,
engine=None,
Expand Down Expand Up @@ -699,7 +680,6 @@ def open_dataarray(
decode_cf=decode_cf,
mask_and_scale=mask_and_scale,
decode_times=decode_times,
autoclose=autoclose,
concat_characters=concat_characters,
decode_coords=decode_coords,
engine=engine,
Expand Down Expand Up @@ -757,7 +737,6 @@ def open_mfdataset(
data_vars="all",
coords="different",
combine="by_coords",
autoclose=None,
parallel=False,
join="outer",
attrs_file=None,
Expand Down Expand Up @@ -924,9 +903,7 @@ def open_mfdataset(
combined_ids_paths = _infer_concat_order_from_positions(paths)
ids, paths = (list(combined_ids_paths.keys()), list(combined_ids_paths.values()))

open_kwargs = dict(
engine=engine, chunks=chunks or {}, lock=lock, autoclose=autoclose, **kwargs
)
open_kwargs = dict(engine=engine, chunks=chunks or {}, lock=lock, **kwargs)

if parallel:
import dask
Expand Down
12 changes: 0 additions & 12 deletions xarray/backends/apiv2.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import os
import warnings

from ..core import indexing
from ..core.dataset import _get_chunk, _maybe_chunk
Expand Down Expand Up @@ -124,7 +123,6 @@ def open_dataset(
concat_characters=None,
decode_coords=None,
drop_variables=None,
autoclose=None,
backend_kwargs=None,
**kwargs,
):
Expand Down Expand Up @@ -239,16 +237,6 @@ def open_dataset(
--------
open_mfdataset
"""
if autoclose is not None:
warnings.warn(
"The autoclose argument is no longer used by "
"xarray.open_dataset() and is now ignored; it will be removed in "
"a future version of xarray. If necessary, you can control the "
"maximum number of simultaneous open files with "
"xarray.set_options(file_cache_maxsize=...).",
FutureWarning,
stacklevel=2,
)

if cache is None:
cache = chunks is None
Expand Down
8 changes: 0 additions & 8 deletions xarray/tests/test_backends.py
Original file line number Diff line number Diff line change
Expand Up @@ -1454,14 +1454,6 @@ def test_setncattr_string(self):
assert_array_equal(one_element_list_of_strings, totest.attrs["bar"])
assert one_string == totest.attrs["baz"]

def test_autoclose_future_warning(self):
data = create_test_data()
with create_tmp_file() as tmp_file:
self.save(data, tmp_file)
with pytest.warns(FutureWarning):
with self.open(tmp_file, autoclose=True) as actual:
assert_identical(data, actual)


@requires_netCDF4
class TestNetCDF4AlreadyOpen:
Expand Down