Skip to content

Commit

Permalink
TYP: Upgrade to mypy 0.920 (pandas-dev#44936)
Browse files Browse the repository at this point in the history
  • Loading branch information
phofl authored Dec 22, 2021
1 parent 3a4821e commit 0e58307
Show file tree
Hide file tree
Showing 13 changed files with 33 additions and 29 deletions.
2 changes: 1 addition & 1 deletion doc/source/whatsnew/v1.4.0.rst
Original file line number Diff line number Diff line change
Expand Up @@ -397,7 +397,7 @@ If installed, we now require:
+-----------------+-----------------+----------+---------+
| pytest (dev) | 6.0 | | |
+-----------------+-----------------+----------+---------+
| mypy (dev) | 0.910 | | X |
| mypy (dev) | 0.920 | | X |
+-----------------+-----------------+----------+---------+

For `optional libraries <https://pandas.pydata.org/docs/getting_started/install.html>`_ the general recommendation is to use the latest version.
Expand Down
2 changes: 1 addition & 1 deletion environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ dependencies:
- flake8-bugbear=21.3.2 # used by flake8, find likely bugs
- flake8-comprehensions=3.1.0 # used by flake8, linting of unnecessary comprehensions
- isort>=5.2.1 # check that imports are in the right order
- mypy=0.910
- mypy=0.920
- pre-commit>=2.9.2
- pycodestyle # used by flake8
- pyupgrade
Expand Down
7 changes: 4 additions & 3 deletions pandas/compat/pickle_compat.py
Original file line number Diff line number Diff line change
Expand Up @@ -194,8 +194,8 @@ def __new__(cls) -> DataFrame: # type: ignore[misc]
# our Unpickler sub-class to override methods and some dispatcher
# functions for compat and uses a non-public class of the pickle module.

# error: Name 'pkl._Unpickler' is not defined
class Unpickler(pkl._Unpickler): # type: ignore[name-defined]

class Unpickler(pkl._Unpickler):
def find_class(self, module, name):
# override superclass
key = (module, name)
Expand Down Expand Up @@ -266,7 +266,8 @@ def load(fh, encoding: str | None = None, is_verbose: bool = False):
up = Unpickler(fh, encoding=encoding)
else:
up = Unpickler(fh)
up.is_verbose = is_verbose
# "Unpickler" has no attribute "is_verbose" [attr-defined]
up.is_verbose = is_verbose # type: ignore[attr-defined]

return up.load()
except (ValueError, TypeError):
Expand Down
5 changes: 1 addition & 4 deletions pandas/core/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -527,10 +527,7 @@ def to_numpy(
dtype='datetime64[ns]')
"""
if is_extension_array_dtype(self.dtype):
# error: Too many arguments for "to_numpy" of "ExtensionArray"
return self.array.to_numpy( # type: ignore[call-arg]
dtype, copy=copy, na_value=na_value, **kwargs
)
return self.array.to_numpy(dtype, copy=copy, na_value=na_value, **kwargs)
elif kwargs:
bad_keys = list(kwargs.keys())[0]
raise TypeError(
Expand Down
3 changes: 1 addition & 2 deletions pandas/core/frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -2634,8 +2634,7 @@ def to_stata(
# Specifying the version is only supported for UTF8 (118 or 119)
kwargs["version"] = version

# mypy: Too many arguments for "StataWriter"
writer = statawriter( # type: ignore[call-arg]
writer = statawriter(
path,
self,
convert_dates=convert_dates,
Expand Down
4 changes: 3 additions & 1 deletion pandas/core/indexes/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -443,7 +443,9 @@ def __new__(
return Index._simple_new(data, name=name)

elif is_ea_or_datetimelike_dtype(data_dtype):
klass = cls._dtype_to_subclass(data_dtype)
# Argument 1 to "_dtype_to_subclass" of "Index" has incompatible type
# "Optional[Any]"; expected "Union[dtype[Any], ExtensionDtype]" [arg-type]
klass = cls._dtype_to_subclass(data_dtype) # type: ignore[arg-type]
if klass is not Index:
result = klass(data, copy=copy, name=name, **kwargs)
if dtype is not None:
Expand Down
4 changes: 3 additions & 1 deletion pandas/core/indexes/multi.py
Original file line number Diff line number Diff line change
Expand Up @@ -327,7 +327,9 @@ def __new__(
result._set_levels(levels, copy=copy, validate=False)
result._set_codes(codes, copy=copy, validate=False)

result._names = [None] * len(levels)
# Incompatible types in assignment (expression has type "List[None]",
# variable has type "FrozenList") [assignment]
result._names = [None] * len(levels) # type: ignore[assignment]
if names is not None:
# handles name validation
result._set_names(names)
Expand Down
5 changes: 4 additions & 1 deletion pandas/core/window/rolling.py
Original file line number Diff line number Diff line change
Expand Up @@ -1127,7 +1127,10 @@ def _apply(
-------
y : type of input
"""
window = self._scipy_weight_generator(self.window, **kwargs)
# "None" not callable [misc]
window = self._scipy_weight_generator( # type: ignore[misc]
self.window, **kwargs
)
offset = (len(window) - 1) // 2 if self.center else 0

def homogeneous_func(values: np.ndarray):
Expand Down
10 changes: 4 additions & 6 deletions pandas/io/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -714,12 +714,10 @@ def get_handle(

# BZ Compression
elif compression == "bz2":
handle = bz2.BZ2File(
# Argument 1 to "BZ2File" has incompatible type "Union[str,
# Union[IO[Any], RawIOBase, BufferedIOBase, TextIOBase, TextIOWrapper,
# mmap]]"; expected "Union[Union[str, bytes, _PathLike[str],
# _PathLike[bytes]], IO[bytes]]"
handle, # type: ignore[arg-type]
# No overload variant of "BZ2File" matches argument types
# "Union[str, BaseBuffer]", "str", "Dict[str, Any]"
handle = bz2.BZ2File( # type: ignore[call-overload]
handle,
mode=ioargs.mode,
**compression_args,
)
Expand Down
3 changes: 1 addition & 2 deletions pandas/io/parsers/readers.py
Original file line number Diff line number Diff line change
Expand Up @@ -1170,8 +1170,7 @@ def _make_engine(self, engine="c"):
raise ValueError(
f"Unknown engine: {engine} (valid options are {mapping.keys()})"
)
# error: Too many arguments for "ParserBase"
return mapping[engine](self.f, **self.options) # type: ignore[call-arg]
return mapping[engine](self.f, **self.options)

def _failover_to_python(self):
raise AbstractMethodError(self)
Expand Down
11 changes: 8 additions & 3 deletions pandas/io/pytables.py
Original file line number Diff line number Diff line change
Expand Up @@ -3351,8 +3351,11 @@ def validate(self, other):
if sv != ov:

# show the error for the specific axes
for i, sax in enumerate(sv):
oax = ov[i]
# Argument 1 to "enumerate" has incompatible type
# "Optional[Any]"; expected "Iterable[Any]" [arg-type]
for i, sax in enumerate(sv): # type: ignore[arg-type]
# Value of type "Optional[Any]" is not indexable [index]
oax = ov[i] # type: ignore[index]
if sax != oax:
raise ValueError(
f"invalid combination of [{c}] on appending data "
Expand Down Expand Up @@ -3592,7 +3595,9 @@ def f(i, c):
# TODO: why kind_attr here?
values = getattr(table_attrs, f"{adj_name}_kind", None)
dtype = getattr(table_attrs, f"{adj_name}_dtype", None)
kind = _dtype_to_kind(dtype)
# Argument 1 to "_dtype_to_kind" has incompatible type
# "Optional[Any]"; expected "str" [arg-type]
kind = _dtype_to_kind(dtype) # type: ignore[arg-type]

md = self.read_metadata(c)
# TODO: figure out why these two versions of `meta` dont always match.
Expand Down
4 changes: 1 addition & 3 deletions pandas/plotting/_matplotlib/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -1671,9 +1671,7 @@ def blank_labeler(label, value):
if labels is not None:
blabels = [blank_labeler(left, value) for left, value in zip(labels, y)]
else:
# error: Incompatible types in assignment (expression has type "None",
# variable has type "List[Any]")
blabels = None # type: ignore[assignment]
blabels = None
results = ax.pie(y, labels=blabels, **kwds)

if kwds.get("autopct", None) is not None:
Expand Down
2 changes: 1 addition & 1 deletion requirements-dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ flake8==3.9.2
flake8-bugbear==21.3.2
flake8-comprehensions==3.1.0
isort>=5.2.1
mypy==0.910
mypy==0.920
pre-commit>=2.9.2
pycodestyle
pyupgrade
Expand Down

0 comments on commit 0e58307

Please sign in to comment.