From 40b249e47eb74159e907e8c28e9e451cab5f797e Mon Sep 17 00:00:00 2001 From: Deepak Cherian Date: Sat, 11 Nov 2023 21:28:30 -0700 Subject: [PATCH 1/2] Remove PseudoNetCDF --- ci/requirements/all-but-dask.yml | 1 - ci/requirements/environment-py311.yml | 1 - ci/requirements/environment-windows-py311.yml | 1 - ci/requirements/environment-windows.yml | 1 - ci/requirements/environment.yml | 1 - ci/requirements/min-all-deps.yml | 1 - doc/api-hidden.rst | 14 -- doc/api.rst | 2 - doc/getting-started-guide/installing.rst | 3 - doc/user-guide/io.rst | 21 -- doc/whats-new.rst | 4 +- pyproject.toml | 1 - xarray/backends/__init__.py | 6 - xarray/backends/api.py | 19 +- xarray/backends/pseudonetcdf_.py | 187 --------------- xarray/tests/__init__.py | 1 - xarray/tests/test_backends.py | 221 ------------------ xarray/tests/test_plugins.py | 3 - xarray/util/print_versions.py | 1 - 19 files changed, 10 insertions(+), 479 deletions(-) delete mode 100644 xarray/backends/pseudonetcdf_.py diff --git a/ci/requirements/all-but-dask.yml b/ci/requirements/all-but-dask.yml index 4645be08b83..694e33f2480 100644 --- a/ci/requirements/all-but-dask.yml +++ b/ci/requirements/all-but-dask.yml @@ -28,7 +28,6 @@ dependencies: - pandas - pint<0.21 - pip - - pseudonetcdf - pydap - pytest - pytest-cov diff --git a/ci/requirements/environment-py311.yml b/ci/requirements/environment-py311.yml index 0b9817daef3..7c5f411a868 100644 --- a/ci/requirements/environment-py311.yml +++ b/ci/requirements/environment-py311.yml @@ -32,7 +32,6 @@ dependencies: - pip - pooch - pre-commit - - pseudonetcdf - pydap - pytest - pytest-cov diff --git a/ci/requirements/environment-windows-py311.yml b/ci/requirements/environment-windows-py311.yml index 8c36c5a9fd4..957923cc4ce 100644 --- a/ci/requirements/environment-windows-py311.yml +++ b/ci/requirements/environment-windows-py311.yml @@ -28,7 +28,6 @@ dependencies: - pint<0.21 - pip - pre-commit - - pseudonetcdf - pydap - pytest - pytest-cov diff --git a/ci/requirements/environment-windows.yml b/ci/requirements/environment-windows.yml index efa9ccb5a9a..3e9edf6cd43 100644 --- a/ci/requirements/environment-windows.yml +++ b/ci/requirements/environment-windows.yml @@ -28,7 +28,6 @@ dependencies: - pint<0.21 - pip - pre-commit - - pseudonetcdf - pydap - pytest - pytest-cov diff --git a/ci/requirements/environment.yml b/ci/requirements/environment.yml index 6e93ab7a946..bbc05ac018c 100644 --- a/ci/requirements/environment.yml +++ b/ci/requirements/environment.yml @@ -33,7 +33,6 @@ dependencies: - pip - pooch - pre-commit - - pseudonetcdf - pydap - pytest - pytest-cov diff --git a/ci/requirements/min-all-deps.yml b/ci/requirements/min-all-deps.yml index 8400270ce1b..20a24ffd3b7 100644 --- a/ci/requirements/min-all-deps.yml +++ b/ci/requirements/min-all-deps.yml @@ -37,7 +37,6 @@ dependencies: - pandas=1.4 - pint=0.19 - pip - - pseudonetcdf=3.2 - pydap=3.3 - pytest - pytest-cov diff --git a/doc/api-hidden.rst b/doc/api-hidden.rst index c96b0aa5c3b..374fe41fde5 100644 --- a/doc/api-hidden.rst +++ b/doc/api-hidden.rst @@ -591,20 +591,6 @@ backends.H5netcdfBackendEntrypoint.guess_can_open backends.H5netcdfBackendEntrypoint.open_dataset - backends.PseudoNetCDFDataStore.close - backends.PseudoNetCDFDataStore.get_attrs - backends.PseudoNetCDFDataStore.get_dimensions - backends.PseudoNetCDFDataStore.get_encoding - backends.PseudoNetCDFDataStore.get_variables - backends.PseudoNetCDFDataStore.open - backends.PseudoNetCDFDataStore.open_store_variable - backends.PseudoNetCDFDataStore.ds - - backends.PseudoNetCDFBackendEntrypoint.description - backends.PseudoNetCDFBackendEntrypoint.url - backends.PseudoNetCDFBackendEntrypoint.guess_can_open - backends.PseudoNetCDFBackendEntrypoint.open_dataset - backends.PydapDataStore.close backends.PydapDataStore.get_attrs backends.PydapDataStore.get_dimensions diff --git a/doc/api.rst b/doc/api.rst index 51d79161578..095ef56666c 100644 --- a/doc/api.rst +++ b/doc/api.rst @@ -1117,7 +1117,6 @@ arguments for the ``load_store`` and ``dump_to_store`` Dataset methods: backends.NetCDF4DataStore backends.H5NetCDFStore - backends.PseudoNetCDFDataStore backends.PydapDataStore backends.ScipyDataStore backends.ZarrStore @@ -1133,7 +1132,6 @@ used filetypes in the xarray universe. backends.NetCDF4BackendEntrypoint backends.H5netcdfBackendEntrypoint - backends.PseudoNetCDFBackendEntrypoint backends.PydapBackendEntrypoint backends.ScipyBackendEntrypoint backends.StoreBackendEntrypoint diff --git a/doc/getting-started-guide/installing.rst b/doc/getting-started-guide/installing.rst index e8c498b6664..357d7ae0d4d 100644 --- a/doc/getting-started-guide/installing.rst +++ b/doc/getting-started-guide/installing.rst @@ -38,9 +38,6 @@ For netCDF and IO - `cftime `__: recommended if you want to encode/decode datetimes for non-standard calendars or dates before year 1678 or after year 2262. -- `PseudoNetCDF `__: recommended - for accessing CAMx, GEOS-Chem (bpch), NOAA ARL files, ICARTT files - (ffi1001) and many other. - `iris `__: for conversion to and from iris' Cube objects diff --git a/doc/user-guide/io.rst b/doc/user-guide/io.rst index 9656a2ba973..4edf7b3c570 100644 --- a/doc/user-guide/io.rst +++ b/doc/user-guide/io.rst @@ -1308,27 +1308,6 @@ We recommend installing PyNIO via conda:: .. _PyNIO backend is deprecated: https://github.com/pydata/xarray/issues/4491 .. _PyNIO is no longer maintained: https://github.com/NCAR/pynio/issues/53 -.. _io.PseudoNetCDF: - -Formats supported by PseudoNetCDF ---------------------------------- - -Xarray can also read CAMx, BPCH, ARL PACKED BIT, and many other file -formats supported by PseudoNetCDF_, if PseudoNetCDF is installed. -PseudoNetCDF can also provide Climate Forecasting Conventions to -CMAQ files. In addition, PseudoNetCDF can automatically register custom -readers that subclass PseudoNetCDF.PseudoNetCDFFile. PseudoNetCDF can -identify readers either heuristically, or by a format specified via a key in -`backend_kwargs`. - -To use PseudoNetCDF to read such files, supply -``engine='pseudonetcdf'`` to :py:func:`open_dataset`. - -Add ``backend_kwargs={'format': ''}`` where `` -options are listed on the PseudoNetCDF page. - -.. _PseudoNetCDF: https://github.com/barronh/PseudoNetCDF - CSV and other formats supported by pandas ----------------------------------------- diff --git a/doc/whats-new.rst b/doc/whats-new.rst index 157282803cc..84b40de0b4b 100644 --- a/doc/whats-new.rst +++ b/doc/whats-new.rst @@ -33,7 +33,7 @@ Breaking changes Deprecations ~~~~~~~~~~~~ - +- The PseudoNetCDF backend has been removed. By `Deepak Cherian `_. - Supplying dimension-ordered sequences to :py:meth:`DataArray.chunk` & :py:meth:`Dataset.chunk` is deprecated in favor of supplying a dictionary of dimensions, or a single ``int`` or ``"auto"`` argument covering all @@ -4526,7 +4526,7 @@ Enhancements - New PseudoNetCDF backend for many Atmospheric data formats including GEOS-Chem, CAMx, NOAA arlpacked bit and many others. See - :ref:`io.PseudoNetCDF` for more details. + ``io.PseudoNetCDF`` for more details. By `Barron Henderson `_. - The :py:class:`Dataset` constructor now aligns :py:class:`DataArray` diff --git a/pyproject.toml b/pyproject.toml index b16063e0370..fc3729a2451 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -109,7 +109,6 @@ module = [ "opt_einsum.*", "pandas.*", "pooch.*", - "PseudoNetCDF.*", "pydap.*", "pytest.*", "scipy.*", diff --git a/xarray/backends/__init__.py b/xarray/backends/__init__.py index cf27998b6fb..0044593b4ea 100644 --- a/xarray/backends/__init__.py +++ b/xarray/backends/__init__.py @@ -13,10 +13,6 @@ from xarray.backends.memory import InMemoryDataStore from xarray.backends.netCDF4_ import NetCDF4BackendEntrypoint, NetCDF4DataStore from xarray.backends.plugins import list_engines, refresh_engines -from xarray.backends.pseudonetcdf_ import ( - PseudoNetCDFBackendEntrypoint, - PseudoNetCDFDataStore, -) from xarray.backends.pydap_ import PydapBackendEntrypoint, PydapDataStore from xarray.backends.pynio_ import NioDataStore from xarray.backends.scipy_ import ScipyBackendEntrypoint, ScipyDataStore @@ -37,10 +33,8 @@ "ScipyDataStore", "H5NetCDFStore", "ZarrStore", - "PseudoNetCDFDataStore", "H5netcdfBackendEntrypoint", "NetCDF4BackendEntrypoint", - "PseudoNetCDFBackendEntrypoint", "PydapBackendEntrypoint", "ScipyBackendEntrypoint", "StoreBackendEntrypoint", diff --git a/xarray/backends/api.py b/xarray/backends/api.py index 27e155872de..84817745b0a 100644 --- a/xarray/backends/api.py +++ b/xarray/backends/api.py @@ -59,7 +59,7 @@ T_NetcdfEngine = Literal["netcdf4", "scipy", "h5netcdf"] T_Engine = Union[ T_NetcdfEngine, - Literal["pydap", "pynio", "pseudonetcdf", "zarr"], + Literal["pydap", "pynio", "zarr"], type[BackendEntrypoint], str, # no nice typing support for custom backends None, @@ -78,7 +78,6 @@ "pydap": backends.PydapDataStore.open, "h5netcdf": backends.H5NetCDFStore.open, "pynio": backends.NioDataStore, - "pseudonetcdf": backends.PseudoNetCDFDataStore.open, "zarr": backends.ZarrStore.open_group, } @@ -420,7 +419,7 @@ def open_dataset( scipy.io.netcdf (only netCDF3 supported). Byte-strings or file-like objects are opened by scipy.io.netcdf (netCDF3) or h5py (netCDF4/HDF). engine : {"netcdf4", "scipy", "pydap", "h5netcdf", "pynio", \ - "pseudonetcdf", "zarr", None}, installed backend \ + "zarr", None}, installed backend \ or subclass of xarray.backends.BackendEntrypoint, optional Engine to use when reading files. If not provided, the default engine is chosen based on available dependencies, with a preference for @@ -452,8 +451,7 @@ def open_dataset( taken from variable attributes (if they exist). If the `_FillValue` or `missing_value` attribute contains multiple values a warning will be issued and all array values matching one of the multiple values will - be replaced by NA. mask_and_scale defaults to True except for the - pseudonetcdf backend. This keyword may not be supported by all the backends. + be replaced by NA. This keyword may not be supported by all the backends. decode_times : bool, optional If True, decode times encoded in the standard NetCDF datetime format into datetime objects. Otherwise, leave them encoded as numbers. @@ -523,7 +521,7 @@ def open_dataset( relevant when using dask or another form of parallelism. By default, appropriate locks are chosen to safely read and write files with the currently active dask scheduler. Supported by "netcdf4", "h5netcdf", - "scipy", "pynio", "pseudonetcdf". + "scipy", "pynio". See engine open function for kwargs accepted by each specific engine. @@ -628,7 +626,7 @@ def open_dataarray( scipy.io.netcdf (only netCDF3 supported). Byte-strings or file-like objects are opened by scipy.io.netcdf (netCDF3) or h5py (netCDF4/HDF). engine : {"netcdf4", "scipy", "pydap", "h5netcdf", "pynio", \ - "pseudonetcdf", "zarr", None}, installed backend \ + "zarr", None}, installed backend \ or subclass of xarray.backends.BackendEntrypoint, optional Engine to use when reading files. If not provided, the default engine is chosen based on available dependencies, with a preference for @@ -658,8 +656,7 @@ def open_dataarray( taken from variable attributes (if they exist). If the `_FillValue` or `missing_value` attribute contains multiple values a warning will be issued and all array values matching one of the multiple values will - be replaced by NA. mask_and_scale defaults to True except for the - pseudonetcdf backend. This keyword may not be supported by all the backends. + be replaced by NA. This keyword may not be supported by all the backends. decode_times : bool, optional If True, decode times encoded in the standard NetCDF datetime format into datetime objects. Otherwise, leave them encoded as numbers. @@ -729,7 +726,7 @@ def open_dataarray( relevant when using dask or another form of parallelism. By default, appropriate locks are chosen to safely read and write files with the currently active dask scheduler. Supported by "netcdf4", "h5netcdf", - "scipy", "pynio", "pseudonetcdf". + "scipy", "pynio". See engine open function for kwargs accepted by each specific engine. @@ -869,7 +866,7 @@ def open_mfdataset( You can find the file-name from which each dataset was loaded in ``ds.encoding["source"]``. engine : {"netcdf4", "scipy", "pydap", "h5netcdf", "pynio", \ - "pseudonetcdf", "zarr", None}, installed backend \ + "zarr", None}, installed backend \ or subclass of xarray.backends.BackendEntrypoint, optional Engine to use when reading files. If not provided, the default engine is chosen based on available dependencies, with a preference for diff --git a/xarray/backends/pseudonetcdf_.py b/xarray/backends/pseudonetcdf_.py deleted file mode 100644 index 71cdd3199e0..00000000000 --- a/xarray/backends/pseudonetcdf_.py +++ /dev/null @@ -1,187 +0,0 @@ -from __future__ import annotations - -from collections.abc import Iterable -from typing import TYPE_CHECKING, Any - -import numpy as np - -from xarray.backends.common import ( - BACKEND_ENTRYPOINTS, - AbstractDataStore, - BackendArray, - BackendEntrypoint, - _normalize_path, -) -from xarray.backends.file_manager import CachingFileManager -from xarray.backends.locks import HDF5_LOCK, NETCDFC_LOCK, combine_locks, ensure_lock -from xarray.backends.store import StoreBackendEntrypoint -from xarray.core import indexing -from xarray.core.utils import Frozen, FrozenDict, close_on_error -from xarray.core.variable import Variable - -if TYPE_CHECKING: - import os - from io import BufferedIOBase - - from xarray.core.dataset import Dataset - -# psuedonetcdf can invoke netCDF libraries internally -PNETCDF_LOCK = combine_locks([HDF5_LOCK, NETCDFC_LOCK]) - - -class PncArrayWrapper(BackendArray): - def __init__(self, variable_name, datastore): - self.datastore = datastore - self.variable_name = variable_name - array = self.get_array() - self.shape = array.shape - self.dtype = np.dtype(array.dtype) - - def get_array(self, needs_lock=True): - ds = self.datastore._manager.acquire(needs_lock) - return ds.variables[self.variable_name] - - def __getitem__(self, key): - return indexing.explicit_indexing_adapter( - key, self.shape, indexing.IndexingSupport.OUTER_1VECTOR, self._getitem - ) - - def _getitem(self, key): - with self.datastore.lock: - array = self.get_array(needs_lock=False) - return array[key] - - -class PseudoNetCDFDataStore(AbstractDataStore): - """Store for accessing datasets via PseudoNetCDF""" - - @classmethod - def open(cls, filename, lock=None, mode=None, **format_kwargs): - from PseudoNetCDF import pncopen - - keywords = {"kwargs": format_kwargs} - # only include mode if explicitly passed - if mode is not None: - keywords["mode"] = mode - - if lock is None: - lock = PNETCDF_LOCK - - manager = CachingFileManager(pncopen, filename, lock=lock, **keywords) - return cls(manager, lock) - - def __init__(self, manager, lock=None): - self._manager = manager - self.lock = ensure_lock(lock) - - @property - def ds(self): - return self._manager.acquire() - - def open_store_variable(self, name, var): - data = indexing.LazilyIndexedArray(PncArrayWrapper(name, self)) - attrs = {k: getattr(var, k) for k in var.ncattrs()} - return Variable(var.dimensions, data, attrs) - - def get_variables(self): - return FrozenDict( - (k, self.open_store_variable(k, v)) for k, v in self.ds.variables.items() - ) - - def get_attrs(self): - return Frozen({k: getattr(self.ds, k) for k in self.ds.ncattrs()}) - - def get_dimensions(self): - return Frozen(self.ds.dimensions) - - def get_encoding(self): - return { - "unlimited_dims": { - k for k in self.ds.dimensions if self.ds.dimensions[k].isunlimited() - } - } - - def close(self): - self._manager.close() - - -class PseudoNetCDFBackendEntrypoint(BackendEntrypoint): - """ - Backend for netCDF-like data formats in the air quality field - based on the PseudoNetCDF package. - - It can open: - - CAMx - - RACM2 box-model outputs - - Kinetic Pre-Processor outputs - - ICARTT Data files (ffi1001) - - CMAQ Files - - GEOS-Chem Binary Punch/NetCDF files - - and many more - - This backend is not selected by default for any files, so make - sure to specify ``engine="pseudonetcdf"`` in ``open_dataset``. - - For more information about the underlying library, visit: - https://pseudonetcdf.readthedocs.io - - See Also - -------- - backends.PseudoNetCDFDataStore - """ - - description = ( - "Open many atmospheric science data formats using PseudoNetCDF in Xarray" - ) - url = "https://docs.xarray.dev/en/stable/generated/xarray.backends.PseudoNetCDFBackendEntrypoint.html" - - # *args and **kwargs are not allowed in open_backend_dataset_ kwargs, - # unless the open_dataset_parameters are explicitly defined like this: - open_dataset_parameters = ( - "filename_or_obj", - "mask_and_scale", - "decode_times", - "concat_characters", - "decode_coords", - "drop_variables", - "use_cftime", - "decode_timedelta", - "mode", - "lock", - ) - - def open_dataset( - self, - filename_or_obj: str | os.PathLike[Any] | BufferedIOBase | AbstractDataStore, - mask_and_scale=False, - decode_times=True, - concat_characters=True, - decode_coords=True, - drop_variables: str | Iterable[str] | None = None, - use_cftime=None, - decode_timedelta=None, - mode=None, - lock=None, - **format_kwargs, - ) -> Dataset: - filename_or_obj = _normalize_path(filename_or_obj) - store = PseudoNetCDFDataStore.open( - filename_or_obj, lock=lock, mode=mode, **format_kwargs - ) - - store_entrypoint = StoreBackendEntrypoint() - with close_on_error(store): - ds = store_entrypoint.open_dataset( - store, - mask_and_scale=mask_and_scale, - decode_times=decode_times, - concat_characters=concat_characters, - decode_coords=decode_coords, - drop_variables=drop_variables, - use_cftime=use_cftime, - decode_timedelta=decode_timedelta, - ) - return ds - - -BACKEND_ENTRYPOINTS["pseudonetcdf"] = ("PseudoNetCDF", PseudoNetCDFBackendEntrypoint) diff --git a/xarray/tests/__init__.py b/xarray/tests/__init__.py index 07ba0be6a8c..4f9c188e6a6 100644 --- a/xarray/tests/__init__.py +++ b/xarray/tests/__init__.py @@ -67,7 +67,6 @@ def _importorskip( has_netCDF4, requires_netCDF4 = _importorskip("netCDF4") has_h5netcdf, requires_h5netcdf = _importorskip("h5netcdf") has_pynio, requires_pynio = _importorskip("Nio") -has_pseudonetcdf, requires_pseudonetcdf = _importorskip("PseudoNetCDF") has_cftime, requires_cftime = _importorskip("cftime") has_dask, requires_dask = _importorskip("dask") has_bottleneck, requires_bottleneck = _importorskip("bottleneck") diff --git a/xarray/tests/test_backends.py b/xarray/tests/test_backends.py index 73352c3f7e1..a0823e9ec96 100644 --- a/xarray/tests/test_backends.py +++ b/xarray/tests/test_backends.py @@ -71,7 +71,6 @@ requires_h5netcdf, requires_iris, requires_netCDF4, - requires_pseudonetcdf, requires_pydap, requires_pynio, requires_scipy, @@ -4440,226 +4439,6 @@ def test_weakrefs(self) -> None: assert_identical(actual, expected) -@requires_pseudonetcdf -@pytest.mark.filterwarnings("ignore:IOAPI_ISPH is assumed to be 6370000") -class TestPseudoNetCDFFormat: - def open(self, path, **kwargs): - return open_dataset(path, engine="pseudonetcdf", **kwargs) - - @contextlib.contextmanager - def roundtrip( - self, data, save_kwargs=None, open_kwargs=None, allow_cleanup_failure=False - ): - if save_kwargs is None: - save_kwargs = {} - if open_kwargs is None: - open_kwargs = {} - with create_tmp_file(allow_cleanup_failure=allow_cleanup_failure) as path: - self.save(data, path, **save_kwargs) - with self.open(path, **open_kwargs) as ds: - yield ds - - def test_ict_format(self) -> None: - """ - Open a CAMx file and test data variables - """ - stdattr = { - "fill_value": -9999.0, - "missing_value": -9999, - "scale": 1, - "llod_flag": -8888, - "llod_value": "N/A", - "ulod_flag": -7777, - "ulod_value": "N/A", - } - - def myatts(**attrs): - outattr = stdattr.copy() - outattr.update(attrs) - return outattr - - input = { - "coords": {}, - "attrs": { - "fmt": "1001", - "n_header_lines": 29, - "PI_NAME": "Henderson, Barron", - "ORGANIZATION_NAME": "U.S. EPA", - "SOURCE_DESCRIPTION": "Example file with artificial data", - "MISSION_NAME": "JUST_A_TEST", - "VOLUME_INFO": "1, 1", - "SDATE": "2018, 04, 27", - "WDATE": "2018, 04, 27", - "TIME_INTERVAL": "0", - "INDEPENDENT_VARIABLE_DEFINITION": "Start_UTC", - "INDEPENDENT_VARIABLE": "Start_UTC", - "INDEPENDENT_VARIABLE_UNITS": "Start_UTC", - "ULOD_FLAG": "-7777", - "ULOD_VALUE": "N/A", - "LLOD_FLAG": "-8888", - "LLOD_VALUE": ("N/A, N/A, N/A, N/A, 0.025"), - "OTHER_COMMENTS": ( - "www-air.larc.nasa.gov/missions/etc/" + "IcarttDataFormat.htm" - ), - "REVISION": "R0", - "R0": "No comments for this revision.", - "TFLAG": "Start_UTC", - }, - "dims": {"POINTS": 4}, - "data_vars": { - "Start_UTC": { - "data": [43200.0, 46800.0, 50400.0, 50400.0], - "dims": ("POINTS",), - "attrs": myatts(units="Start_UTC", standard_name="Start_UTC"), - }, - "lat": { - "data": [41.0, 42.0, 42.0, 42.0], - "dims": ("POINTS",), - "attrs": myatts(units="degrees_north", standard_name="lat"), - }, - "lon": { - "data": [-71.0, -72.0, -73.0, -74.0], - "dims": ("POINTS",), - "attrs": myatts(units="degrees_east", standard_name="lon"), - }, - "elev": { - "data": [5.0, 15.0, 20.0, 25.0], - "dims": ("POINTS",), - "attrs": myatts(units="meters", standard_name="elev"), - }, - "TEST_ppbv": { - "data": [1.2345, 2.3456, 3.4567, 4.5678], - "dims": ("POINTS",), - "attrs": myatts(units="ppbv", standard_name="TEST_ppbv"), - }, - "TESTM_ppbv": { - "data": [2.22, -9999.0, -7777.0, -8888.0], - "dims": ("POINTS",), - "attrs": myatts( - units="ppbv", standard_name="TESTM_ppbv", llod_value=0.025 - ), - }, - }, - } - chkfile = Dataset.from_dict(input) - with open_example_dataset( - "example.ict", engine="pseudonetcdf", backend_kwargs={"format": "ffi1001"} - ) as ictfile: - assert_identical(ictfile, chkfile) - - def test_ict_format_write(self) -> None: - fmtkw = {"format": "ffi1001"} - with open_example_dataset( - "example.ict", engine="pseudonetcdf", backend_kwargs=fmtkw - ) as expected: - with self.roundtrip( - expected, save_kwargs=fmtkw, open_kwargs={"backend_kwargs": fmtkw} - ) as actual: - assert_identical(expected, actual) - - def test_uamiv_format_read(self) -> None: - """ - Open a CAMx file and test data variables - """ - - camxfile = open_example_dataset( - "example.uamiv", engine="pseudonetcdf", backend_kwargs={"format": "uamiv"} - ) - data = np.arange(20, dtype="f").reshape(1, 1, 4, 5) - expected = xr.Variable( - ("TSTEP", "LAY", "ROW", "COL"), - data, - dict(units="ppm", long_name="O3".ljust(16), var_desc="O3".ljust(80)), - ) - actual = camxfile.variables["O3"] - assert_allclose(expected, actual) - - data = np.array([[[2002154, 0]]], dtype="i") - expected = xr.Variable( - ("TSTEP", "VAR", "DATE-TIME"), - data, - dict( - long_name="TFLAG".ljust(16), - var_desc="TFLAG".ljust(80), - units="DATE-TIME".ljust(16), - ), - ) - actual = camxfile.variables["TFLAG"] - assert_allclose(expected, actual) - camxfile.close() - - @requires_dask - def test_uamiv_format_mfread(self) -> None: - """ - Open a CAMx file and test data variables - """ - - camxfile = open_example_mfdataset( - ["example.uamiv", "example.uamiv"], - engine="pseudonetcdf", - concat_dim="TSTEP", - combine="nested", - backend_kwargs={"format": "uamiv"}, - ) - - data1 = np.arange(20, dtype="f").reshape(1, 1, 4, 5) - data = np.concatenate([data1] * 2, axis=0) - expected = xr.Variable( - ("TSTEP", "LAY", "ROW", "COL"), - data, - dict(units="ppm", long_name="O3".ljust(16), var_desc="O3".ljust(80)), - ) - actual = camxfile.variables["O3"] - assert_allclose(expected, actual) - - data = np.array([[[2002154, 0]]], dtype="i").repeat(2, 0) - attrs = dict( - long_name="TFLAG".ljust(16), - var_desc="TFLAG".ljust(80), - units="DATE-TIME".ljust(16), - ) - dims = ("TSTEP", "VAR", "DATE-TIME") - expected = xr.Variable(dims, data, attrs) - actual = camxfile.variables["TFLAG"] - assert_allclose(expected, actual) - camxfile.close() - - @pytest.mark.xfail(reason="Flaky; see GH3711") - def test_uamiv_format_write(self) -> None: - fmtkw = {"format": "uamiv"} - - expected = open_example_dataset( - "example.uamiv", engine="pseudonetcdf", backend_kwargs=fmtkw - ) - with self.roundtrip( - expected, - save_kwargs=fmtkw, - open_kwargs={"backend_kwargs": fmtkw}, - allow_cleanup_failure=True, - ) as actual: - assert_identical(expected, actual) - - expected.close() - - def save(self, dataset, path, **save_kwargs): - import PseudoNetCDF as pnc - - pncf = pnc.PseudoNetCDFFile() - pncf.dimensions = { - k: pnc.PseudoNetCDFDimension(pncf, k, v) for k, v in dataset.dims.items() - } - pncf.variables = { - k: pnc.PseudoNetCDFVariable( - pncf, k, v.dtype.char, v.dims, values=v.data[...], **v.attrs - ) - for k, v in dataset.variables.items() - } - for pk, pv in dataset.attrs.items(): - setattr(pncf, pk, pv) - - pnc.pncwrite(pncf, path, **save_kwargs) - - class TestEncodingInvalid: def test_extract_nc4_variable_encoding(self) -> None: var = xr.Variable(("x",), [1, 2, 3], {}, {"foo": "bar"}) diff --git a/xarray/tests/test_plugins.py b/xarray/tests/test_plugins.py index 441f16f4dca..1af255d30bb 100644 --- a/xarray/tests/test_plugins.py +++ b/xarray/tests/test_plugins.py @@ -15,7 +15,6 @@ from xarray.tests import ( has_h5netcdf, has_netCDF4, - has_pseudonetcdf, has_pydap, has_pynio, has_scipy, @@ -222,7 +221,6 @@ def test_lazy_import() -> None: blacklisted = [ "h5netcdf", "netCDF4", - "PseudoNetCDF", "pydap", "Nio", "scipy", @@ -279,7 +277,6 @@ def test_list_engines() -> None: assert ("scipy" in engines) == has_scipy assert ("h5netcdf" in engines) == has_h5netcdf assert ("netcdf4" in engines) == has_netCDF4 - assert ("pseudonetcdf" in engines) == has_pseudonetcdf assert ("pydap" in engines) == has_pydap assert ("zarr" in engines) == has_zarr assert ("pynio" in engines) == has_pynio diff --git a/xarray/util/print_versions.py b/xarray/util/print_versions.py index e4984def498..4b7f28cb34b 100755 --- a/xarray/util/print_versions.py +++ b/xarray/util/print_versions.py @@ -107,7 +107,6 @@ def show_versions(file=sys.stdout): ("zarr", lambda mod: mod.__version__), ("cftime", lambda mod: mod.__version__), ("nc_time_axis", lambda mod: mod.__version__), - ("PseudoNetCDF", lambda mod: mod.__version__), ("iris", lambda mod: mod.__version__), ("bottleneck", lambda mod: mod.__version__), ("dask", lambda mod: mod.__version__), From 2525050d01f46afb3ab71608f6dd2d5b7ccff4a0 Mon Sep 17 00:00:00 2001 From: Deepak Cherian Date: Mon, 13 Nov 2023 14:30:58 -0700 Subject: [PATCH 2/2] Fix syntaxc --- doc/whats-new.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/whats-new.rst b/doc/whats-new.rst index fd86a2a412e..4da1d45a3dd 100644 --- a/doc/whats-new.rst +++ b/doc/whats-new.rst @@ -34,7 +34,7 @@ Breaking changes Deprecations ~~~~~~~~~~~~ -- The PseudoNetCDF backend has been removed. By `Deepak Cherian `_. +- The PseudoNetCDF backend has been removed. By `Deepak Cherian `_. - Supplying dimension-ordered sequences to :py:meth:`DataArray.chunk` & :py:meth:`Dataset.chunk` is deprecated in favor of supplying a dictionary of dimensions, or a single ``int`` or ``"auto"`` argument covering all