Skip to content

Commit

Permalink
Flake fixed (pydata#2629)
Browse files Browse the repository at this point in the history
* add ignores

* test_combine

* isort

* fixes

* odd interation between pytest fixture loop and flake

* fix
  • Loading branch information
max-sixty authored and shoyer committed Dec 25, 2018
1 parent d8d87d2 commit 2667deb
Show file tree
Hide file tree
Showing 36 changed files with 179 additions and 177 deletions.
5 changes: 4 additions & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,10 @@ testpaths=xarray/tests
[flake8]
max-line-length=79
ignore=
W503
E402 # module level import not at top of file
E731 # do not assign a lambda expression, use a def
W503 # line break before binary operator
W504 # line break after binary operator
exclude=
doc/

Expand Down
1 change: 1 addition & 0 deletions versioneer.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
# flake8: noqa

# Version: 0.18

Expand Down
34 changes: 17 additions & 17 deletions xarray/backends/api.py
Original file line number Diff line number Diff line change
@@ -1,33 +1,32 @@
from __future__ import absolute_import, division, print_function

import os.path
import warnings
from glob import glob
from io import BytesIO
from numbers import Number
import warnings

import numpy as np

from .. import Dataset, backends, conventions
from ..core import indexing
from ..core.combine import _infer_concat_order_from_positions, _auto_combine
from ..core.combine import _auto_combine, _infer_concat_order_from_positions
from ..core.pycompat import basestring, path_type
from ..core.utils import close_on_error, is_remote_uri, is_grib_path
from ..core.utils import close_on_error, is_grib_path, is_remote_uri
from .common import ArrayWriter
from .locks import _get_scheduler


DATAARRAY_NAME = '__xarray_dataarray_name__'
DATAARRAY_VARIABLE = '__xarray_dataarray_variable__'


def _get_default_engine_remote_uri():
try:
import netCDF4
import netCDF4 # noqa
engine = 'netcdf4'
except ImportError: # pragma: no cover
try:
import pydap # flake8: noqa
import pydap # noqa
engine = 'pydap'
except ImportError:
raise ValueError('netCDF4 or pydap is required for accessing '
Expand All @@ -38,12 +37,12 @@ def _get_default_engine_remote_uri():
def _get_default_engine_grib():
msgs = []
try:
import Nio # flake8: noqa
import Nio # noqa
msgs += ["set engine='pynio' to access GRIB files with PyNIO"]
except ImportError: # pragma: no cover
pass
try:
import cfgrib # flake8: noqa
import cfgrib # noqa
msgs += ["set engine='cfgrib' to access GRIB files with cfgrib"]
except ImportError: # pragma: no cover
pass
Expand All @@ -56,7 +55,7 @@ def _get_default_engine_grib():

def _get_default_engine_gz():
try:
import scipy # flake8: noqa
import scipy # noqa
engine = 'scipy'
except ImportError: # pragma: no cover
raise ValueError('scipy is required for accessing .gz files')
Expand All @@ -65,11 +64,11 @@ def _get_default_engine_gz():

def _get_default_engine_netcdf():
try:
import netCDF4 # flake8: noqa
import netCDF4 # noqa
engine = 'netcdf4'
except ImportError: # pragma: no cover
try:
import scipy.io.netcdf # flake8: noqa
import scipy.io.netcdf # noqa
engine = 'scipy'
except ImportError:
raise ValueError('cannot read or write netCDF files without '
Expand Down Expand Up @@ -579,7 +578,7 @@ def open_mfdataset(paths, chunks=None, concat_dim=_CONCAT_DIM_DEFAULT,
.. [1] http://xarray.pydata.org/en/stable/dask.html
.. [2] http://xarray.pydata.org/en/stable/dask.html#chunking-and-performance
"""
""" # noqa
if isinstance(paths, basestring):
if is_remote_uri(paths):
raise ValueError(
Expand Down Expand Up @@ -642,11 +641,12 @@ def open_mfdataset(paths, chunks=None, concat_dim=_CONCAT_DIM_DEFAULT,
# Discard ordering because it should be redone from coordinates
ids = False

combined = _auto_combine(datasets, concat_dims=concat_dims,
compat=compat,
data_vars=data_vars, coords=coords,
infer_order_from_coords=infer_order_from_coords,
ids=ids)
combined = _auto_combine(
datasets, concat_dims=concat_dims,
compat=compat,
data_vars=data_vars, coords=coords,
infer_order_from_coords=infer_order_from_coords,
ids=ids)
except ValueError:
for ds in datasets:
ds.close()
Expand Down
2 changes: 1 addition & 1 deletion xarray/backends/cfgrib_.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from ..core import indexing
from ..core.utils import Frozen, FrozenOrderedDict
from .common import AbstractDataStore, BackendArray
from .locks import ensure_lock, SerializableLock
from .locks import SerializableLock, ensure_lock

# FIXME: Add a dedicated lock, even if ecCodes is supposed to be thread-safe
# in most circumstances. See:
Expand Down
1 change: 0 additions & 1 deletion xarray/backends/file_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
from .locks import acquire
from .lru_cache import LRUCache


# Global cache for storing open files.
FILE_CACHE = LRUCache(
OPTIONS['file_cache_maxsize'], on_evict=lambda k, v: v.close())
Expand Down
4 changes: 2 additions & 2 deletions xarray/backends/netCDF4_.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,9 @@
from ..core.utils import FrozenOrderedDict, close_on_error, is_remote_uri
from .common import (
BackendArray, WritableCFDataStore, find_root, robust_getitem)
from .locks import (NETCDFC_LOCK, HDF5_LOCK,
combine_locks, ensure_lock, get_write_lock)
from .file_manager import CachingFileManager, DummyFileManager
from .locks import (
HDF5_LOCK, NETCDFC_LOCK, combine_locks, ensure_lock, get_write_lock)
from .netcdf3 import encode_nc3_attr_value, encode_nc3_variable

# This lookup table maps from dtype.byteorder to a readable endian
Expand Down
2 changes: 1 addition & 1 deletion xarray/backends/netcdf3.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

# Special characters that are permitted in netCDF names except in the
# 0th position of the string
_specialchars = '_.@+- !"#$%&\()*,:;<=>?[]^`{|}~'
_specialchars = '_.@+- !"#$%&\\()*,:;<=>?[]^`{|}~'

# The following are reserved names in CDL and may not be used as names of
# variables, dimension, attributes
Expand Down
1 change: 0 additions & 1 deletion xarray/backends/pseudonetcdf_.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
from .file_manager import CachingFileManager
from .locks import HDF5_LOCK, NETCDFC_LOCK, combine_locks, ensure_lock


# psuedonetcdf can invoke netCDF libraries internally
PNETCDF_LOCK = combine_locks([HDF5_LOCK, NETCDFC_LOCK])

Expand Down
3 changes: 1 addition & 2 deletions xarray/backends/pynio_.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,7 @@
from .common import AbstractDataStore, BackendArray
from .file_manager import CachingFileManager
from .locks import (
HDF5_LOCK, NETCDFC_LOCK, combine_locks, ensure_lock, SerializableLock)

HDF5_LOCK, NETCDFC_LOCK, SerializableLock, combine_locks, ensure_lock)

# PyNIO can invoke netCDF libraries internally
# Add a dedicated lock just in case NCL as well isn't thread-safe.
Expand Down
1 change: 0 additions & 1 deletion xarray/backends/rasterio_.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
from .file_manager import CachingFileManager
from .locks import SerializableLock


# TODO: should this be GDAL_LOCK instead?
RASTERIO_LOCK = SerializableLock()

Expand Down
2 changes: 1 addition & 1 deletion xarray/backends/scipy_.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@
from ..core.pycompat import OrderedDict, basestring, iteritems
from ..core.utils import Frozen, FrozenOrderedDict
from .common import BackendArray, WritableCFDataStore
from .locks import ensure_lock, get_write_lock
from .file_manager import CachingFileManager, DummyFileManager
from .locks import ensure_lock, get_write_lock
from .netcdf3 import (
encode_nc3_attr_value, encode_nc3_variable, is_valid_nc3_name)

Expand Down
5 changes: 3 additions & 2 deletions xarray/backends/zarr.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from ..core import indexing
from ..core.pycompat import OrderedDict, integer_types, iteritems
from ..core.utils import FrozenOrderedDict, HiddenKeyDict
from .common import AbstractWritableDataStore, ArrayWriter, BackendArray
from .common import AbstractWritableDataStore, BackendArray

# need some special secret attributes to tell us the dimensions
_DIMENSION_KEY = '_ARRAY_DIMENSIONS'
Expand Down Expand Up @@ -237,7 +237,8 @@ def open_group(cls, store, mode='r', synchronizer=None, group=None,
"#installation" % min_zarr)

if consolidated or consolidate_on_close:
if LooseVersion(zarr.__version__) <= '2.2.1.dev2': # pragma: no cover
if LooseVersion(
zarr.__version__) <= '2.2.1.dev2': # pragma: no cover
raise NotImplementedError("Zarr version 2.2.1.dev2 or greater "
"is required by for consolidated "
"metadata.")
Expand Down
6 changes: 3 additions & 3 deletions xarray/coding/cftime_offsets.py
Original file line number Diff line number Diff line change
Expand Up @@ -419,7 +419,7 @@ def __apply__(self, other):


_FREQUENCY_CONDITION = '|'.join(_FREQUENCIES.keys())
_PATTERN = '^((?P<multiple>\d+)|())(?P<freq>({0}))$'.format(
_PATTERN = r'^((?P<multiple>\d+)|())(?P<freq>({0}))$'.format(
_FREQUENCY_CONDITION)


Expand Down Expand Up @@ -726,10 +726,10 @@ def cftime_range(start=None, end=None, periods=None, freq='D',
raise ValueError("Closed must be either 'left', 'right' or None")

if (not left_closed and len(dates) and
start is not None and dates[0] == start):
start is not None and dates[0] == start):
dates = dates[1:]
if (not right_closed and len(dates) and
end is not None and dates[-1] == end):
end is not None and dates[-1] == end):
dates = dates[:-1]

return CFTimeIndex(dates, name=name)
17 changes: 9 additions & 8 deletions xarray/coding/cftimeindex.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@
from xarray.core import pycompat
from xarray.core.utils import is_scalar

from .times import cftime_to_nptime, infer_calendar_name, _STANDARD_CALENDARS
from .times import _STANDARD_CALENDARS, cftime_to_nptime, infer_calendar_name


def named(name, pattern):
Expand All @@ -68,13 +68,13 @@ def trailing_optional(xs):
return xs[0] + optional(trailing_optional(xs[1:]))


def build_pattern(date_sep='\-', datetime_sep='T', time_sep='\:'):
pieces = [(None, 'year', '\d{4}'),
(date_sep, 'month', '\d{2}'),
(date_sep, 'day', '\d{2}'),
(datetime_sep, 'hour', '\d{2}'),
(time_sep, 'minute', '\d{2}'),
(time_sep, 'second', '\d{2}')]
def build_pattern(date_sep=r'\-', datetime_sep=r'T', time_sep=r'\:'):
pieces = [(None, 'year', r'\d{4}'),
(date_sep, 'month', r'\d{2}'),
(date_sep, 'day', r'\d{2}'),
(datetime_sep, 'hour', r'\d{2}'),
(time_sep, 'minute', r'\d{2}'),
(time_sep, 'second', r'\d{2}')]
pattern_list = []
for sep, name, sub_pattern in pieces:
pattern_list.append((sep if sep else '') + named(name, sub_pattern))
Expand Down Expand Up @@ -152,6 +152,7 @@ def get_date_field(datetimes, field):

def _field_accessor(name, docstring=None):
"""Adapted from pandas.tseries.index._field_accessor"""

def f(self):
return get_date_field(self._data, name)

Expand Down
8 changes: 4 additions & 4 deletions xarray/core/combine.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from __future__ import absolute_import, division, print_function

import warnings
import itertools
import warnings
from collections import Counter

import pandas as pd
Expand Down Expand Up @@ -378,7 +378,7 @@ def _infer_concat_order_from_positions(datasets, concat_dims):
tile_id, ds = list(combined_ids.items())[0]
n_dims = len(tile_id)
if concat_dims == _CONCAT_DIM_DEFAULT or concat_dims is None:
concat_dims = [concat_dims]*n_dims
concat_dims = [concat_dims] * n_dims
else:
if len(concat_dims) != n_dims:
raise ValueError("concat_dims has length {} but the datasets "
Expand Down Expand Up @@ -533,8 +533,8 @@ def _auto_combine(datasets, concat_dims, compat, data_vars, coords,
if not ids:
# Determine tile_IDs by structure of input in N-D
# (i.e. ordering in list-of-lists)
combined_ids, concat_dims = _infer_concat_order_from_positions\
(datasets, concat_dims)
combined_ids, concat_dims = _infer_concat_order_from_positions(
datasets, concat_dims)
else:
# Already sorted so just use the ids already passed
combined_ids = OrderedDict(zip(ids, datasets))
Expand Down
Loading

0 comments on commit 2667deb

Please sign in to comment.