Skip to content

Commit 2667deb

Browse files
max-sixtyshoyer
authored andcommitted
Flake fixed (pydata#2629)
* add ignores * test_combine * isort * fixes * odd interation between pytest fixture loop and flake * fix
1 parent d8d87d2 commit 2667deb

36 files changed

+179
-177
lines changed

setup.cfg

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,10 @@ testpaths=xarray/tests
88
[flake8]
99
max-line-length=79
1010
ignore=
11-
W503
11+
E402 # module level import not at top of file
12+
E731 # do not assign a lambda expression, use a def
13+
W503 # line break before binary operator
14+
W504 # line break after binary operator
1215
exclude=
1316
doc/
1417

versioneer.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
# flake8: noqa
12

23
# Version: 0.18
34

xarray/backends/api.py

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -1,33 +1,32 @@
11
from __future__ import absolute_import, division, print_function
22

33
import os.path
4+
import warnings
45
from glob import glob
56
from io import BytesIO
67
from numbers import Number
7-
import warnings
88

99
import numpy as np
1010

1111
from .. import Dataset, backends, conventions
1212
from ..core import indexing
13-
from ..core.combine import _infer_concat_order_from_positions, _auto_combine
13+
from ..core.combine import _auto_combine, _infer_concat_order_from_positions
1414
from ..core.pycompat import basestring, path_type
15-
from ..core.utils import close_on_error, is_remote_uri, is_grib_path
15+
from ..core.utils import close_on_error, is_grib_path, is_remote_uri
1616
from .common import ArrayWriter
1717
from .locks import _get_scheduler
1818

19-
2019
DATAARRAY_NAME = '__xarray_dataarray_name__'
2120
DATAARRAY_VARIABLE = '__xarray_dataarray_variable__'
2221

2322

2423
def _get_default_engine_remote_uri():
2524
try:
26-
import netCDF4
25+
import netCDF4 # noqa
2726
engine = 'netcdf4'
2827
except ImportError: # pragma: no cover
2928
try:
30-
import pydap # flake8: noqa
29+
import pydap # noqa
3130
engine = 'pydap'
3231
except ImportError:
3332
raise ValueError('netCDF4 or pydap is required for accessing '
@@ -38,12 +37,12 @@ def _get_default_engine_remote_uri():
3837
def _get_default_engine_grib():
3938
msgs = []
4039
try:
41-
import Nio # flake8: noqa
40+
import Nio # noqa
4241
msgs += ["set engine='pynio' to access GRIB files with PyNIO"]
4342
except ImportError: # pragma: no cover
4443
pass
4544
try:
46-
import cfgrib # flake8: noqa
45+
import cfgrib # noqa
4746
msgs += ["set engine='cfgrib' to access GRIB files with cfgrib"]
4847
except ImportError: # pragma: no cover
4948
pass
@@ -56,7 +55,7 @@ def _get_default_engine_grib():
5655

5756
def _get_default_engine_gz():
5857
try:
59-
import scipy # flake8: noqa
58+
import scipy # noqa
6059
engine = 'scipy'
6160
except ImportError: # pragma: no cover
6261
raise ValueError('scipy is required for accessing .gz files')
@@ -65,11 +64,11 @@ def _get_default_engine_gz():
6564

6665
def _get_default_engine_netcdf():
6766
try:
68-
import netCDF4 # flake8: noqa
67+
import netCDF4 # noqa
6968
engine = 'netcdf4'
7069
except ImportError: # pragma: no cover
7170
try:
72-
import scipy.io.netcdf # flake8: noqa
71+
import scipy.io.netcdf # noqa
7372
engine = 'scipy'
7473
except ImportError:
7574
raise ValueError('cannot read or write netCDF files without '
@@ -579,7 +578,7 @@ def open_mfdataset(paths, chunks=None, concat_dim=_CONCAT_DIM_DEFAULT,
579578
580579
.. [1] http://xarray.pydata.org/en/stable/dask.html
581580
.. [2] http://xarray.pydata.org/en/stable/dask.html#chunking-and-performance
582-
"""
581+
""" # noqa
583582
if isinstance(paths, basestring):
584583
if is_remote_uri(paths):
585584
raise ValueError(
@@ -642,11 +641,12 @@ def open_mfdataset(paths, chunks=None, concat_dim=_CONCAT_DIM_DEFAULT,
642641
# Discard ordering because it should be redone from coordinates
643642
ids = False
644643

645-
combined = _auto_combine(datasets, concat_dims=concat_dims,
646-
compat=compat,
647-
data_vars=data_vars, coords=coords,
648-
infer_order_from_coords=infer_order_from_coords,
649-
ids=ids)
644+
combined = _auto_combine(
645+
datasets, concat_dims=concat_dims,
646+
compat=compat,
647+
data_vars=data_vars, coords=coords,
648+
infer_order_from_coords=infer_order_from_coords,
649+
ids=ids)
650650
except ValueError:
651651
for ds in datasets:
652652
ds.close()

xarray/backends/cfgrib_.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
from ..core import indexing
77
from ..core.utils import Frozen, FrozenOrderedDict
88
from .common import AbstractDataStore, BackendArray
9-
from .locks import ensure_lock, SerializableLock
9+
from .locks import SerializableLock, ensure_lock
1010

1111
# FIXME: Add a dedicated lock, even if ecCodes is supposed to be thread-safe
1212
# in most circumstances. See:

xarray/backends/file_manager.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77
from .locks import acquire
88
from .lru_cache import LRUCache
99

10-
1110
# Global cache for storing open files.
1211
FILE_CACHE = LRUCache(
1312
OPTIONS['file_cache_maxsize'], on_evict=lambda k, v: v.close())

xarray/backends/netCDF4_.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,9 +14,9 @@
1414
from ..core.utils import FrozenOrderedDict, close_on_error, is_remote_uri
1515
from .common import (
1616
BackendArray, WritableCFDataStore, find_root, robust_getitem)
17-
from .locks import (NETCDFC_LOCK, HDF5_LOCK,
18-
combine_locks, ensure_lock, get_write_lock)
1917
from .file_manager import CachingFileManager, DummyFileManager
18+
from .locks import (
19+
HDF5_LOCK, NETCDFC_LOCK, combine_locks, ensure_lock, get_write_lock)
2020
from .netcdf3 import encode_nc3_attr_value, encode_nc3_variable
2121

2222
# This lookup table maps from dtype.byteorder to a readable endian

xarray/backends/netcdf3.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99

1010
# Special characters that are permitted in netCDF names except in the
1111
# 0th position of the string
12-
_specialchars = '_.@+- !"#$%&\()*,:;<=>?[]^`{|}~'
12+
_specialchars = '_.@+- !"#$%&\\()*,:;<=>?[]^`{|}~'
1313

1414
# The following are reserved names in CDL and may not be used as names of
1515
# variables, dimension, attributes

xarray/backends/pseudonetcdf_.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@
1010
from .file_manager import CachingFileManager
1111
from .locks import HDF5_LOCK, NETCDFC_LOCK, combine_locks, ensure_lock
1212

13-
1413
# psuedonetcdf can invoke netCDF libraries internally
1514
PNETCDF_LOCK = combine_locks([HDF5_LOCK, NETCDFC_LOCK])
1615

xarray/backends/pynio_.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,7 @@
88
from .common import AbstractDataStore, BackendArray
99
from .file_manager import CachingFileManager
1010
from .locks import (
11-
HDF5_LOCK, NETCDFC_LOCK, combine_locks, ensure_lock, SerializableLock)
12-
11+
HDF5_LOCK, NETCDFC_LOCK, SerializableLock, combine_locks, ensure_lock)
1312

1413
# PyNIO can invoke netCDF libraries internally
1514
# Add a dedicated lock just in case NCL as well isn't thread-safe.

xarray/backends/rasterio_.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,6 @@
1111
from .file_manager import CachingFileManager
1212
from .locks import SerializableLock
1313

14-
1514
# TODO: should this be GDAL_LOCK instead?
1615
RASTERIO_LOCK = SerializableLock()
1716

xarray/backends/scipy_.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,8 @@
1111
from ..core.pycompat import OrderedDict, basestring, iteritems
1212
from ..core.utils import Frozen, FrozenOrderedDict
1313
from .common import BackendArray, WritableCFDataStore
14-
from .locks import ensure_lock, get_write_lock
1514
from .file_manager import CachingFileManager, DummyFileManager
15+
from .locks import ensure_lock, get_write_lock
1616
from .netcdf3 import (
1717
encode_nc3_attr_value, encode_nc3_variable, is_valid_nc3_name)
1818

xarray/backends/zarr.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
from ..core import indexing
99
from ..core.pycompat import OrderedDict, integer_types, iteritems
1010
from ..core.utils import FrozenOrderedDict, HiddenKeyDict
11-
from .common import AbstractWritableDataStore, ArrayWriter, BackendArray
11+
from .common import AbstractWritableDataStore, BackendArray
1212

1313
# need some special secret attributes to tell us the dimensions
1414
_DIMENSION_KEY = '_ARRAY_DIMENSIONS'
@@ -237,7 +237,8 @@ def open_group(cls, store, mode='r', synchronizer=None, group=None,
237237
"#installation" % min_zarr)
238238

239239
if consolidated or consolidate_on_close:
240-
if LooseVersion(zarr.__version__) <= '2.2.1.dev2': # pragma: no cover
240+
if LooseVersion(
241+
zarr.__version__) <= '2.2.1.dev2': # pragma: no cover
241242
raise NotImplementedError("Zarr version 2.2.1.dev2 or greater "
242243
"is required by for consolidated "
243244
"metadata.")

xarray/coding/cftime_offsets.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -419,7 +419,7 @@ def __apply__(self, other):
419419

420420

421421
_FREQUENCY_CONDITION = '|'.join(_FREQUENCIES.keys())
422-
_PATTERN = '^((?P<multiple>\d+)|())(?P<freq>({0}))$'.format(
422+
_PATTERN = r'^((?P<multiple>\d+)|())(?P<freq>({0}))$'.format(
423423
_FREQUENCY_CONDITION)
424424

425425

@@ -726,10 +726,10 @@ def cftime_range(start=None, end=None, periods=None, freq='D',
726726
raise ValueError("Closed must be either 'left', 'right' or None")
727727

728728
if (not left_closed and len(dates) and
729-
start is not None and dates[0] == start):
729+
start is not None and dates[0] == start):
730730
dates = dates[1:]
731731
if (not right_closed and len(dates) and
732-
end is not None and dates[-1] == end):
732+
end is not None and dates[-1] == end):
733733
dates = dates[:-1]
734734

735735
return CFTimeIndex(dates, name=name)

xarray/coding/cftimeindex.py

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@
5151
from xarray.core import pycompat
5252
from xarray.core.utils import is_scalar
5353

54-
from .times import cftime_to_nptime, infer_calendar_name, _STANDARD_CALENDARS
54+
from .times import _STANDARD_CALENDARS, cftime_to_nptime, infer_calendar_name
5555

5656

5757
def named(name, pattern):
@@ -68,13 +68,13 @@ def trailing_optional(xs):
6868
return xs[0] + optional(trailing_optional(xs[1:]))
6969

7070

71-
def build_pattern(date_sep='\-', datetime_sep='T', time_sep='\:'):
72-
pieces = [(None, 'year', '\d{4}'),
73-
(date_sep, 'month', '\d{2}'),
74-
(date_sep, 'day', '\d{2}'),
75-
(datetime_sep, 'hour', '\d{2}'),
76-
(time_sep, 'minute', '\d{2}'),
77-
(time_sep, 'second', '\d{2}')]
71+
def build_pattern(date_sep=r'\-', datetime_sep=r'T', time_sep=r'\:'):
72+
pieces = [(None, 'year', r'\d{4}'),
73+
(date_sep, 'month', r'\d{2}'),
74+
(date_sep, 'day', r'\d{2}'),
75+
(datetime_sep, 'hour', r'\d{2}'),
76+
(time_sep, 'minute', r'\d{2}'),
77+
(time_sep, 'second', r'\d{2}')]
7878
pattern_list = []
7979
for sep, name, sub_pattern in pieces:
8080
pattern_list.append((sep if sep else '') + named(name, sub_pattern))
@@ -152,6 +152,7 @@ def get_date_field(datetimes, field):
152152

153153
def _field_accessor(name, docstring=None):
154154
"""Adapted from pandas.tseries.index._field_accessor"""
155+
155156
def f(self):
156157
return get_date_field(self._data, name)
157158

xarray/core/combine.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from __future__ import absolute_import, division, print_function
22

3-
import warnings
43
import itertools
4+
import warnings
55
from collections import Counter
66

77
import pandas as pd
@@ -378,7 +378,7 @@ def _infer_concat_order_from_positions(datasets, concat_dims):
378378
tile_id, ds = list(combined_ids.items())[0]
379379
n_dims = len(tile_id)
380380
if concat_dims == _CONCAT_DIM_DEFAULT or concat_dims is None:
381-
concat_dims = [concat_dims]*n_dims
381+
concat_dims = [concat_dims] * n_dims
382382
else:
383383
if len(concat_dims) != n_dims:
384384
raise ValueError("concat_dims has length {} but the datasets "
@@ -533,8 +533,8 @@ def _auto_combine(datasets, concat_dims, compat, data_vars, coords,
533533
if not ids:
534534
# Determine tile_IDs by structure of input in N-D
535535
# (i.e. ordering in list-of-lists)
536-
combined_ids, concat_dims = _infer_concat_order_from_positions\
537-
(datasets, concat_dims)
536+
combined_ids, concat_dims = _infer_concat_order_from_positions(
537+
datasets, concat_dims)
538538
else:
539539
# Already sorted so just use the ids already passed
540540
combined_ids = OrderedDict(zip(ids, datasets))

0 commit comments

Comments
 (0)