1
1
from __future__ import absolute_import , division , print_function
2
2
3
3
import os .path
4
+ import warnings
4
5
from glob import glob
5
6
from io import BytesIO
6
7
from numbers import Number
7
- import warnings
8
8
9
9
import numpy as np
10
10
11
11
from .. import Dataset , backends , conventions
12
12
from ..core import indexing
13
- from ..core .combine import _infer_concat_order_from_positions , _auto_combine
13
+ from ..core .combine import _auto_combine , _infer_concat_order_from_positions
14
14
from ..core .pycompat import basestring , path_type
15
- from ..core .utils import close_on_error , is_remote_uri , is_grib_path
15
+ from ..core .utils import close_on_error , is_grib_path , is_remote_uri
16
16
from .common import ArrayWriter
17
17
from .locks import _get_scheduler
18
18
19
-
20
19
DATAARRAY_NAME = '__xarray_dataarray_name__'
21
20
DATAARRAY_VARIABLE = '__xarray_dataarray_variable__'
22
21
23
22
24
23
def _get_default_engine_remote_uri ():
25
24
try :
26
- import netCDF4
25
+ import netCDF4 # noqa
27
26
engine = 'netcdf4'
28
27
except ImportError : # pragma: no cover
29
28
try :
30
- import pydap # flake8: noqa
29
+ import pydap # noqa
31
30
engine = 'pydap'
32
31
except ImportError :
33
32
raise ValueError ('netCDF4 or pydap is required for accessing '
@@ -38,12 +37,12 @@ def _get_default_engine_remote_uri():
38
37
def _get_default_engine_grib ():
39
38
msgs = []
40
39
try :
41
- import Nio # flake8: noqa
40
+ import Nio # noqa
42
41
msgs += ["set engine='pynio' to access GRIB files with PyNIO" ]
43
42
except ImportError : # pragma: no cover
44
43
pass
45
44
try :
46
- import cfgrib # flake8: noqa
45
+ import cfgrib # noqa
47
46
msgs += ["set engine='cfgrib' to access GRIB files with cfgrib" ]
48
47
except ImportError : # pragma: no cover
49
48
pass
@@ -56,7 +55,7 @@ def _get_default_engine_grib():
56
55
57
56
def _get_default_engine_gz ():
58
57
try :
59
- import scipy # flake8: noqa
58
+ import scipy # noqa
60
59
engine = 'scipy'
61
60
except ImportError : # pragma: no cover
62
61
raise ValueError ('scipy is required for accessing .gz files' )
@@ -65,11 +64,11 @@ def _get_default_engine_gz():
65
64
66
65
def _get_default_engine_netcdf ():
67
66
try :
68
- import netCDF4 # flake8: noqa
67
+ import netCDF4 # noqa
69
68
engine = 'netcdf4'
70
69
except ImportError : # pragma: no cover
71
70
try :
72
- import scipy .io .netcdf # flake8: noqa
71
+ import scipy .io .netcdf # noqa
73
72
engine = 'scipy'
74
73
except ImportError :
75
74
raise ValueError ('cannot read or write netCDF files without '
@@ -579,7 +578,7 @@ def open_mfdataset(paths, chunks=None, concat_dim=_CONCAT_DIM_DEFAULT,
579
578
580
579
.. [1] http://xarray.pydata.org/en/stable/dask.html
581
580
.. [2] http://xarray.pydata.org/en/stable/dask.html#chunking-and-performance
582
- """
581
+ """ # noqa
583
582
if isinstance (paths , basestring ):
584
583
if is_remote_uri (paths ):
585
584
raise ValueError (
@@ -642,11 +641,12 @@ def open_mfdataset(paths, chunks=None, concat_dim=_CONCAT_DIM_DEFAULT,
642
641
# Discard ordering because it should be redone from coordinates
643
642
ids = False
644
643
645
- combined = _auto_combine (datasets , concat_dims = concat_dims ,
646
- compat = compat ,
647
- data_vars = data_vars , coords = coords ,
648
- infer_order_from_coords = infer_order_from_coords ,
649
- ids = ids )
644
+ combined = _auto_combine (
645
+ datasets , concat_dims = concat_dims ,
646
+ compat = compat ,
647
+ data_vars = data_vars , coords = coords ,
648
+ infer_order_from_coords = infer_order_from_coords ,
649
+ ids = ids )
650
650
except ValueError :
651
651
for ds in datasets :
652
652
ds .close ()
0 commit comments